mypy-0.560/0000755€tŠÔÚ€2›s®0000000000013215007244016672 5ustar jukkaDROPBOX\Domain Users00000000000000mypy-0.560/docs/0000755€tŠÔÚ€2›s®0000000000013215007242017620 5ustar jukkaDROPBOX\Domain Users00000000000000mypy-0.560/docs/make.bat0000755€tŠÔÚ€2›s®0000001506213215007205021233 0ustar jukkaDROPBOX\Domain Users00000000000000@ECHO OFF REM Command file for Sphinx documentation if "%SPHINXBUILD%" == "" ( set SPHINXBUILD=sphinx-build ) set BUILDDIR=build set ALLSPHINXOPTS=-d %BUILDDIR%/doctrees %SPHINXOPTS% source set I18NSPHINXOPTS=%SPHINXOPTS% source if NOT "%PAPER%" == "" ( set ALLSPHINXOPTS=-D latex_paper_size=%PAPER% %ALLSPHINXOPTS% set I18NSPHINXOPTS=-D latex_paper_size=%PAPER% %I18NSPHINXOPTS% ) if "%1" == "" goto help if "%1" == "help" ( :help echo.Please use `make ^` where ^ is one of echo. html to make standalone HTML files echo. dirhtml to make HTML files named index.html in directories echo. singlehtml to make a single large HTML file echo. pickle to make pickle files echo. json to make JSON files echo. htmlhelp to make HTML files and a HTML help project echo. qthelp to make HTML files and a qthelp project echo. devhelp to make HTML files and a Devhelp project echo. epub to make an epub echo. latex to make LaTeX files, you can set PAPER=a4 or PAPER=letter echo. text to make text files echo. man to make manual pages echo. texinfo to make Texinfo files echo. gettext to make PO message catalogs echo. changes to make an overview over all changed/added/deprecated items echo. xml to make Docutils-native XML files echo. pseudoxml to make pseudoxml-XML files for display purposes echo. linkcheck to check all external links for integrity echo. doctest to run all doctests embedded in the documentation if enabled goto end ) if "%1" == "clean" ( for /d %%i in (%BUILDDIR%\*) do rmdir /q /s %%i del /q /s %BUILDDIR%\* goto end ) %SPHINXBUILD% 2> nul if errorlevel 9009 ( echo. echo.The 'sphinx-build' command was not found. Make sure you have Sphinx echo.installed, then set the SPHINXBUILD environment variable to point echo.to the full path of the 'sphinx-build' executable. Alternatively you echo.may add the Sphinx directory to PATH. echo. echo.If you don't have Sphinx installed, grab it from echo.http://sphinx-doc.org/ exit /b 1 ) if "%1" == "html" ( %SPHINXBUILD% -b html %ALLSPHINXOPTS% %BUILDDIR%/html if errorlevel 1 exit /b 1 echo. echo.Build finished. The HTML pages are in %BUILDDIR%/html. goto end ) if "%1" == "dirhtml" ( %SPHINXBUILD% -b dirhtml %ALLSPHINXOPTS% %BUILDDIR%/dirhtml if errorlevel 1 exit /b 1 echo. echo.Build finished. The HTML pages are in %BUILDDIR%/dirhtml. goto end ) if "%1" == "singlehtml" ( %SPHINXBUILD% -b singlehtml %ALLSPHINXOPTS% %BUILDDIR%/singlehtml if errorlevel 1 exit /b 1 echo. echo.Build finished. The HTML pages are in %BUILDDIR%/singlehtml. goto end ) if "%1" == "pickle" ( %SPHINXBUILD% -b pickle %ALLSPHINXOPTS% %BUILDDIR%/pickle if errorlevel 1 exit /b 1 echo. echo.Build finished; now you can process the pickle files. goto end ) if "%1" == "json" ( %SPHINXBUILD% -b json %ALLSPHINXOPTS% %BUILDDIR%/json if errorlevel 1 exit /b 1 echo. echo.Build finished; now you can process the JSON files. goto end ) if "%1" == "htmlhelp" ( %SPHINXBUILD% -b htmlhelp %ALLSPHINXOPTS% %BUILDDIR%/htmlhelp if errorlevel 1 exit /b 1 echo. echo.Build finished; now you can run HTML Help Workshop with the ^ .hhp project file in %BUILDDIR%/htmlhelp. goto end ) if "%1" == "qthelp" ( %SPHINXBUILD% -b qthelp %ALLSPHINXOPTS% %BUILDDIR%/qthelp if errorlevel 1 exit /b 1 echo. echo.Build finished; now you can run "qcollectiongenerator" with the ^ .qhcp project file in %BUILDDIR%/qthelp, like this: echo.^> qcollectiongenerator %BUILDDIR%\qthelp\Mypy.qhcp echo.To view the help file: echo.^> assistant -collectionFile %BUILDDIR%\qthelp\Mypy.ghc goto end ) if "%1" == "devhelp" ( %SPHINXBUILD% -b devhelp %ALLSPHINXOPTS% %BUILDDIR%/devhelp if errorlevel 1 exit /b 1 echo. echo.Build finished. goto end ) if "%1" == "epub" ( %SPHINXBUILD% -b epub %ALLSPHINXOPTS% %BUILDDIR%/epub if errorlevel 1 exit /b 1 echo. echo.Build finished. The epub file is in %BUILDDIR%/epub. goto end ) if "%1" == "latex" ( %SPHINXBUILD% -b latex %ALLSPHINXOPTS% %BUILDDIR%/latex if errorlevel 1 exit /b 1 echo. echo.Build finished; the LaTeX files are in %BUILDDIR%/latex. goto end ) if "%1" == "latexpdf" ( %SPHINXBUILD% -b latex %ALLSPHINXOPTS% %BUILDDIR%/latex cd %BUILDDIR%/latex make all-pdf cd %BUILDDIR%/.. echo. echo.Build finished; the PDF files are in %BUILDDIR%/latex. goto end ) if "%1" == "latexpdfja" ( %SPHINXBUILD% -b latex %ALLSPHINXOPTS% %BUILDDIR%/latex cd %BUILDDIR%/latex make all-pdf-ja cd %BUILDDIR%/.. echo. echo.Build finished; the PDF files are in %BUILDDIR%/latex. goto end ) if "%1" == "text" ( %SPHINXBUILD% -b text %ALLSPHINXOPTS% %BUILDDIR%/text if errorlevel 1 exit /b 1 echo. echo.Build finished. The text files are in %BUILDDIR%/text. goto end ) if "%1" == "man" ( %SPHINXBUILD% -b man %ALLSPHINXOPTS% %BUILDDIR%/man if errorlevel 1 exit /b 1 echo. echo.Build finished. The manual pages are in %BUILDDIR%/man. goto end ) if "%1" == "texinfo" ( %SPHINXBUILD% -b texinfo %ALLSPHINXOPTS% %BUILDDIR%/texinfo if errorlevel 1 exit /b 1 echo. echo.Build finished. The Texinfo files are in %BUILDDIR%/texinfo. goto end ) if "%1" == "gettext" ( %SPHINXBUILD% -b gettext %I18NSPHINXOPTS% %BUILDDIR%/locale if errorlevel 1 exit /b 1 echo. echo.Build finished. The message catalogs are in %BUILDDIR%/locale. goto end ) if "%1" == "changes" ( %SPHINXBUILD% -b changes %ALLSPHINXOPTS% %BUILDDIR%/changes if errorlevel 1 exit /b 1 echo. echo.The overview file is in %BUILDDIR%/changes. goto end ) if "%1" == "linkcheck" ( %SPHINXBUILD% -b linkcheck %ALLSPHINXOPTS% %BUILDDIR%/linkcheck if errorlevel 1 exit /b 1 echo. echo.Link check complete; look for any errors in the above output ^ or in %BUILDDIR%/linkcheck/output.txt. goto end ) if "%1" == "doctest" ( %SPHINXBUILD% -b doctest %ALLSPHINXOPTS% %BUILDDIR%/doctest if errorlevel 1 exit /b 1 echo. echo.Testing of doctests in the sources finished, look at the ^ results in %BUILDDIR%/doctest/output.txt. goto end ) if "%1" == "xml" ( %SPHINXBUILD% -b xml %ALLSPHINXOPTS% %BUILDDIR%/xml if errorlevel 1 exit /b 1 echo. echo.Build finished. The XML files are in %BUILDDIR%/xml. goto end ) if "%1" == "pseudoxml" ( %SPHINXBUILD% -b pseudoxml %ALLSPHINXOPTS% %BUILDDIR%/pseudoxml if errorlevel 1 exit /b 1 echo. echo.Build finished. The pseudo-XML files are in %BUILDDIR%/pseudoxml. goto end ) :end mypy-0.560/docs/Makefile0000644€tŠÔÚ€2›s®0000001515313215007205021264 0ustar jukkaDROPBOX\Domain Users00000000000000# Makefile for Sphinx documentation # # You can set these variables from the command line. SPHINXOPTS = SPHINXBUILD = sphinx-build PAPER = BUILDDIR = build # User-friendly check for sphinx-build ifeq ($(shell which $(SPHINXBUILD) >/dev/null 2>&1; echo $$?), 1) $(error The '$(SPHINXBUILD)' command was not found. Make sure you have Sphinx installed, then set the SPHINXBUILD environment variable to point to the full path of the '$(SPHINXBUILD)' executable. Alternatively you can add the directory with the executable to your PATH. If you don't have Sphinx installed, grab it from http://sphinx-doc.org/) endif # Internal variables. PAPEROPT_a4 = -D latex_paper_size=a4 PAPEROPT_letter = -D latex_paper_size=letter ALLSPHINXOPTS = -d $(BUILDDIR)/doctrees $(PAPEROPT_$(PAPER)) $(SPHINXOPTS) source # the i18n builder cannot share the environment and doctrees with the others I18NSPHINXOPTS = $(PAPEROPT_$(PAPER)) $(SPHINXOPTS) source .PHONY: help clean html dirhtml singlehtml pickle json htmlhelp qthelp devhelp epub latex latexpdf text man changes linkcheck doctest gettext help: @echo "Please use \`make ' where is one of" @echo " html to make standalone HTML files" @echo " dirhtml to make HTML files named index.html in directories" @echo " singlehtml to make a single large HTML file" @echo " pickle to make pickle files" @echo " json to make JSON files" @echo " htmlhelp to make HTML files and a HTML help project" @echo " qthelp to make HTML files and a qthelp project" @echo " devhelp to make HTML files and a Devhelp project" @echo " epub to make an epub" @echo " latex to make LaTeX files, you can set PAPER=a4 or PAPER=letter" @echo " latexpdf to make LaTeX files and run them through pdflatex" @echo " latexpdfja to make LaTeX files and run them through platex/dvipdfmx" @echo " text to make text files" @echo " man to make manual pages" @echo " texinfo to make Texinfo files" @echo " info to make Texinfo files and run them through makeinfo" @echo " gettext to make PO message catalogs" @echo " changes to make an overview of all changed/added/deprecated items" @echo " xml to make Docutils-native XML files" @echo " pseudoxml to make pseudoxml-XML files for display purposes" @echo " linkcheck to check all external links for integrity" @echo " doctest to run all doctests embedded in the documentation (if enabled)" clean: rm -rf $(BUILDDIR)/* html: $(SPHINXBUILD) -b html $(ALLSPHINXOPTS) $(BUILDDIR)/html @echo @echo "Build finished. The HTML pages are in $(BUILDDIR)/html." dirhtml: $(SPHINXBUILD) -b dirhtml $(ALLSPHINXOPTS) $(BUILDDIR)/dirhtml @echo @echo "Build finished. The HTML pages are in $(BUILDDIR)/dirhtml." singlehtml: $(SPHINXBUILD) -b singlehtml $(ALLSPHINXOPTS) $(BUILDDIR)/singlehtml @echo @echo "Build finished. The HTML page is in $(BUILDDIR)/singlehtml." pickle: $(SPHINXBUILD) -b pickle $(ALLSPHINXOPTS) $(BUILDDIR)/pickle @echo @echo "Build finished; now you can process the pickle files." json: $(SPHINXBUILD) -b json $(ALLSPHINXOPTS) $(BUILDDIR)/json @echo @echo "Build finished; now you can process the JSON files." htmlhelp: $(SPHINXBUILD) -b htmlhelp $(ALLSPHINXOPTS) $(BUILDDIR)/htmlhelp @echo @echo "Build finished; now you can run HTML Help Workshop with the" \ ".hhp project file in $(BUILDDIR)/htmlhelp." qthelp: $(SPHINXBUILD) -b qthelp $(ALLSPHINXOPTS) $(BUILDDIR)/qthelp @echo @echo "Build finished; now you can run "qcollectiongenerator" with the" \ ".qhcp project file in $(BUILDDIR)/qthelp, like this:" @echo "# qcollectiongenerator $(BUILDDIR)/qthelp/Mypy.qhcp" @echo "To view the help file:" @echo "# assistant -collectionFile $(BUILDDIR)/qthelp/Mypy.qhc" devhelp: $(SPHINXBUILD) -b devhelp $(ALLSPHINXOPTS) $(BUILDDIR)/devhelp @echo @echo "Build finished." @echo "To view the help file:" @echo "# mkdir -p $$HOME/.local/share/devhelp/Mypy" @echo "# ln -s $(BUILDDIR)/devhelp $$HOME/.local/share/devhelp/Mypy" @echo "# devhelp" epub: $(SPHINXBUILD) -b epub $(ALLSPHINXOPTS) $(BUILDDIR)/epub @echo @echo "Build finished. The epub file is in $(BUILDDIR)/epub." latex: $(SPHINXBUILD) -b latex $(ALLSPHINXOPTS) $(BUILDDIR)/latex @echo @echo "Build finished; the LaTeX files are in $(BUILDDIR)/latex." @echo "Run \`make' in that directory to run these through (pdf)latex" \ "(use \`make latexpdf' here to do that automatically)." latexpdf: $(SPHINXBUILD) -b latex $(ALLSPHINXOPTS) $(BUILDDIR)/latex @echo "Running LaTeX files through pdflatex..." $(MAKE) -C $(BUILDDIR)/latex all-pdf @echo "pdflatex finished; the PDF files are in $(BUILDDIR)/latex." latexpdfja: $(SPHINXBUILD) -b latex $(ALLSPHINXOPTS) $(BUILDDIR)/latex @echo "Running LaTeX files through platex and dvipdfmx..." $(MAKE) -C $(BUILDDIR)/latex all-pdf-ja @echo "pdflatex finished; the PDF files are in $(BUILDDIR)/latex." text: $(SPHINXBUILD) -b text $(ALLSPHINXOPTS) $(BUILDDIR)/text @echo @echo "Build finished. The text files are in $(BUILDDIR)/text." man: $(SPHINXBUILD) -b man $(ALLSPHINXOPTS) $(BUILDDIR)/man @echo @echo "Build finished. The manual pages are in $(BUILDDIR)/man." texinfo: $(SPHINXBUILD) -b texinfo $(ALLSPHINXOPTS) $(BUILDDIR)/texinfo @echo @echo "Build finished. The Texinfo files are in $(BUILDDIR)/texinfo." @echo "Run \`make' in that directory to run these through makeinfo" \ "(use \`make info' here to do that automatically)." info: $(SPHINXBUILD) -b texinfo $(ALLSPHINXOPTS) $(BUILDDIR)/texinfo @echo "Running Texinfo files through makeinfo..." make -C $(BUILDDIR)/texinfo info @echo "makeinfo finished; the Info files are in $(BUILDDIR)/texinfo." gettext: $(SPHINXBUILD) -b gettext $(I18NSPHINXOPTS) $(BUILDDIR)/locale @echo @echo "Build finished. The message catalogs are in $(BUILDDIR)/locale." changes: $(SPHINXBUILD) -b changes $(ALLSPHINXOPTS) $(BUILDDIR)/changes @echo @echo "The overview file is in $(BUILDDIR)/changes." linkcheck: $(SPHINXBUILD) -b linkcheck $(ALLSPHINXOPTS) $(BUILDDIR)/linkcheck @echo @echo "Link check complete; look for any errors in the above output " \ "or in $(BUILDDIR)/linkcheck/output.txt." doctest: $(SPHINXBUILD) -b doctest $(ALLSPHINXOPTS) $(BUILDDIR)/doctest @echo "Testing of doctests in the sources finished, look at the " \ "results in $(BUILDDIR)/doctest/output.txt." xml: $(SPHINXBUILD) -b xml $(ALLSPHINXOPTS) $(BUILDDIR)/xml @echo @echo "Build finished. The XML files are in $(BUILDDIR)/xml." pseudoxml: $(SPHINXBUILD) -b pseudoxml $(ALLSPHINXOPTS) $(BUILDDIR)/pseudoxml @echo @echo "Build finished. The pseudo-XML files are in $(BUILDDIR)/pseudoxml." mypy-0.560/docs/README.md0000644€tŠÔÚ€2›s®0000000207113215007205021076 0ustar jukkaDROPBOX\Domain Users00000000000000Mypy Documentation ================== What's this? ------------ This directory contains the source code for Mypy documentation (under `source/`) and build scripts. The documentation uses Sphinx and reStructuredText. We use `sphinx-rtd-theme` as the documentation theme. Building the documentation -------------------------- Install Sphinx and other dependencies (i.e. theme) needed for the documentation. From the `docs` directory, use `pip`: ``` $ pip install -r requirements-docs.txt ``` Build the documentation like this: ``` $ make html ``` The built documentation will be placed in the `docs/build` directory. Open `docs/build/index.html` to view the documentation. Helpful documentation build commands ------------------------------------ Clean the documentation build: ``` $ make clean ``` Test and check the links found in the documentation: ``` $ make linkcheck ``` Documentation on Read The Docs ------------------------------ The mypy documentation is hosted on Read The Docs, and the latest version can be found at https://mypy.readthedocs.io/en/latest. mypy-0.560/docs/requirements-docs.txt0000644€tŠÔÚ€2›s®0000000005213215007205024026 0ustar jukkaDROPBOX\Domain Users00000000000000Sphinx >= 1.4.4 sphinx-rtd-theme >= 0.1.9 mypy-0.560/docs/source/0000755€tŠÔÚ€2›s®0000000000013215007242021120 5ustar jukkaDROPBOX\Domain Users00000000000000mypy-0.560/docs/source/additional_features.rst0000644€tŠÔÚ€2›s®0000000041713215007205025661 0ustar jukkaDROPBOX\Domain Users00000000000000Additional features ------------------- Several mypy features are not currently covered by this tutorial, including the following: - inheritance between generic classes - compatibility and subtyping of generic types, including covariance of generic types - ``super()`` mypy-0.560/docs/source/basics.rst0000644€tŠÔÚ€2›s®0000001502213215007205023115 0ustar jukkaDROPBOX\Domain Users00000000000000Basics ====== This chapter introduces some core concepts of mypy, including function annotations, the ``typing`` module and library stubs. Read it carefully, as the rest of documentation may not make much sense otherwise. Function signatures ******************* A function without a type annotation is considered dynamically typed: .. code-block:: python def greeting(name): return 'Hello, {}'.format(name) You can declare the signature of a function using the Python 3 annotation syntax (Python 2 is discussed later in :ref:`python2`). This makes the function statically typed, and that causes type checker report type errors within the function. Here's a version of the above function that is statically typed and will be type checked: .. code-block:: python def greeting(name: str) -> str: return 'Hello, {}'.format(name) If a function does not explicitly return a value we give the return type as ``None``. Using a ``None`` result in a statically typed context results in a type check error: .. code-block:: python def p() -> None: print('hello') a = p() # Type check error: p has None return value Arguments with default values can be annotated as follows: .. code-block:: python def greeting(name: str, prefix: str = 'Mr.') -> str: return 'Hello, {} {}'.format(name, prefix) Mixing dynamic and static typing ******************************** Mixing dynamic and static typing within a single file is often useful. For example, if you are migrating existing Python code to static typing, it may be easiest to do this incrementally, such as by migrating a few functions at a time. Also, when prototyping a new feature, you may decide to first implement the relevant code using dynamic typing and only add type signatures later, when the code is more stable. .. code-block:: python def f(): 1 + 'x' # No static type error (dynamically typed) def g() -> None: 1 + 'x' # Type check error (statically typed) .. note:: The earlier stages of mypy, known as the semantic analysis, may report errors even for dynamically typed functions. However, you should not rely on this, as this may change in the future. The typing module ***************** The ``typing`` module contains many definitions that are useful in statically typed code. You typically use ``from ... import`` to import them (we'll explain ``Iterable`` later in this document): .. code-block:: python from typing import Iterable def greet_all(names: Iterable[str]) -> None: for name in names: print('Hello, {}'.format(name)) For brevity, we often omit the ``typing`` import in code examples, but you should always include it in modules that contain statically typed code. The presence or absence of the ``typing`` module does not affect whether your code is type checked; it is only required when you use one or more special features it defines. Type checking programs ********************** You can type check a program by using the ``mypy`` tool, which is basically a linter -- it checks your program for errors without actually running it:: $ mypy program.py All errors reported by mypy are essentially warnings that you are free to ignore, if you so wish. The next chapter explains how to download and install mypy: :ref:`getting-started`. More command line options are documented in :ref:`command-line`. .. note:: Depending on how mypy is configured, you may have to explicitly use the Python 3 interpreter to run mypy. The mypy tool is an ordinary mypy (and so also Python) program. For example:: $ python3 -m mypy program.py .. _library-stubs: Library stubs and the Typeshed repo *********************************** In order to type check code that uses library modules such as those included in the Python standard library, you need to have library *stubs*. A library stub defines a skeleton of the public interface of the library, including classes, variables and functions and their types, but dummy function bodies. For example, consider this code: .. code-block:: python x = chr(4) Without a library stub, the type checker would have no way of inferring the type of ``x`` and checking that the argument to ``chr`` has a valid type. Mypy incorporates the `typeshed `_ project, which contains library stubs for the Python builtins and the standard library. The stub for the builtins contains a definition like this for ``chr``: .. code-block:: python def chr(code: int) -> str: ... In stub files we don't care about the function bodies, so we use an ellipsis instead. That ``...`` is three literal dots! Mypy complains if it can't find a stub (or a real module) for a library module that you import. You can create a stub easily; here is an overview: * Write a stub file for the library and store it as a ``.pyi`` file in the same directory as the library module. * Alternatively, put your stubs (``.pyi`` files) in a directory reserved for stubs (e.g., ``myproject/stubs``). In this case you have to set the environment variable ``MYPYPATH`` to refer to the directory. For example:: $ export MYPYPATH=~/work/myproject/stubs Use the normal Python file name conventions for modules, e.g. ``csv.pyi`` for module ``csv``. Use a subdirectory with ``__init__.pyi`` for packages. If a directory contains both a ``.py`` and a ``.pyi`` file for the same module, the ``.pyi`` file takes precedence. This way you can easily add annotations for a module even if you don't want to modify the source code. This can be useful, for example, if you use 3rd party open source libraries in your program (and there are no stubs in typeshed yet). That's it! Now you can access the module in mypy programs and type check code that uses the library. If you write a stub for a library module, consider making it available for other programmers that use mypy by contributing it back to the typeshed repo. There is more information about creating stubs in the `mypy wiki `_. The following sections explain the kinds of type annotations you can use in your programs and stub files. .. note:: You may be tempted to point ``MYPYPATH`` to the standard library or to the ``site-packages`` directory where your 3rd party packages are installed. This is almost always a bad idea -- you will likely get tons of error messages about code you didn't write and that mypy can't analyze all that well yet, and in the worst case scenario mypy may crash due to some construct in a 3rd party package that it didn't expect. mypy-0.560/docs/source/builtin_types.rst0000644€tŠÔÚ€2›s®0000000356613215007205024555 0ustar jukkaDROPBOX\Domain Users00000000000000Built-in types ============== These are examples of some of the most common built-in types: =================== =============================== Type Description =================== =============================== ``int`` integer of arbitrary size ``float`` floating point number ``bool`` boolean value ``str`` unicode string ``bytes`` 8-bit string ``object`` an arbitrary object (``object`` is the common base class) ``List[str]`` list of ``str`` objects ``Tuple[int, int]`` tuple of two ``int``s (``Tuple[()]`` is the empty tuple) ``Tuple[int, ...]`` tuple of an arbitrary number of ``int`` objects ``Dict[str, int]`` dictionary from ``str`` keys to ``int`` values ``Iterable[int]`` iterable object containing ints ``Sequence[bool]`` sequence of booleans ``Any`` dynamically typed value with an arbitrary type =================== =============================== The type ``Any`` and type constructors ``List``, ``Dict``, ``Iterable`` and ``Sequence`` are defined in the ``typing`` module. The type ``Dict`` is a *generic* class, signified by type arguments within ``[...]``. For example, ``Dict[int, str]`` is a dictionary from integers to strings and and ``Dict[Any, Any]`` is a dictionary of dynamically typed (arbitrary) values and keys. ``List`` is another generic class. ``Dict`` and ``List`` are aliases for the built-ins ``dict`` and ``list``, respectively. ``Iterable`` and ``Sequence`` are generic abstract base classes that correspond to Python protocols. For example, a ``str`` object or a ``List[str]`` object is valid when ``Iterable[str]`` or ``Sequence[str]`` is expected. Note that even though they are similar to abstract base classes defined in ``abc.collections`` (formerly ``collections``), they are not identical, since the built-in collection type objects do not support indexing. mypy-0.560/docs/source/casts.rst0000644€tŠÔÚ€2›s®0000000240613215007205022770 0ustar jukkaDROPBOX\Domain Users00000000000000.. _casts: Casts ===== Mypy supports type casts that are usually used to coerce a statically typed value to a subtype. Unlike languages such as Java or C#, however, mypy casts are only used as hints for the type checker, and they don't perform a runtime type check. Use the function ``cast`` to perform a cast: .. code-block:: python from typing import cast, List o = [1] # type: object x = cast(List[int], o) # OK y = cast(List[str], o) # OK (cast performs no actual runtime check) To support runtime checking of casts such as the above, we'd have to check the types of all list items, which would be very inefficient for large lists. Use assertions if you want to perform an actual runtime check. Casts are used to silence spurious type checker warnings and give the type checker a little help when it can't quite understand what is going on. You don't need a cast for expressions with type ``Any``, or when assigning to a variable with type ``Any``, as was explained earlier. You can also use ``Any`` as the cast target type -- this lets you perform any operations on the result. For example: .. code-block:: python from typing import cast, Any x = 1 x + 'x' # Type check error y = cast(Any, x) y + 'x' # Type check OK (runtime error) mypy-0.560/docs/source/cheat_sheet.rst0000644€tŠÔÚ€2›s®0000002036613215007205024134 0ustar jukkaDROPBOX\Domain Users00000000000000.. _cheat-sheet-py2: Mypy syntax cheat sheet (Python 2) ================================== This document is a quick cheat sheet showing how the `PEP 484 `_ type language represents various common types in Python 2. .. note:: Technically many of the type annotations shown below are redundant, because mypy can derive them from the type of the expression. So many of the examples have a dual purpose: show how to write the annotation, and show the inferred types. Built-in types ************** .. code-block:: python from typing import List, Set, Dict, Tuple, Text, Optional # For simple built-in types, just use the name of the type. x = 1 # type: int x = 1.0 # type: float x = True # type: bool x = "test" # type: str x = u"test" # type: unicode # For collections, the name of the type is capitalized, and the # name of the type inside the collection is in brackets. x = [1] # type: List[int] x = set([6, 7]) # type: Set[int] # Empty Tuple types are a bit special x = () # type: Tuple[()] # For mappings, we need the types of both keys and values. x = dict(field=2.0) # type: Dict[str, float] # For tuples, we specify the types of all the elements. x = (3, "yes", 7.5) # type: Tuple[int, str, float] # For textual data, use Text. # This is `unicode` in Python 2 and `str` in Python 3. x = ["string", u"unicode"] # type: List[Text] # Use Optional for values that could be None. input_str = f() # type: Optional[str] if input_str is not None: print input_str Functions ********* .. code-block:: python from typing import Callable, Iterable # This is how you annotate a function definition. def stringify(num): # type: (int) -> str """Your function docstring goes here after the type definition.""" return str(num) # This function has no parameters and also returns nothing. Annotations # can also be placed on the same line as their function headers. def greet_world(): # type: () -> None print "Hello, world!" # And here's how you specify multiple arguments. def plus(num1, num2): # type: (int, int) -> int return num1 + num2 # Add type annotations for kwargs as though they were positional args. def f(num1, my_float=3.5): # type: (int, float) -> float return num1 + my_float # An argument can be declared positional-only by giving it a name # starting with two underscores: def quux(__x): # type: (int) -> None pass quux(3) # Fine quux(__x=3) # Error # This is how you annotate a function value. x = f # type: Callable[[int, float], float] # A generator function that yields ints is secretly just a function that # returns an iterable (see below) of ints, so that's how we annotate it. def f(n): # type: (int) -> Iterable[int] i = 0 while i < n: yield i i += 1 # There's alternative syntax for functions with many arguments. def send_email(address, # type: Union[str, List[str]] sender, # type: str cc, # type: Optional[List[str]] bcc, # type: Optional[List[str]] subject='', body=None # type: List[str] ): # type: (...) -> bool When you're puzzled or when things are complicated ************************************************** .. code-block:: python from typing import Union, Any, cast # To find out what type mypy infers for an expression anywhere in # your program, wrap it in reveal_type. Mypy will print an error # message with the type; remove it again before running the code. reveal_type(1) # -> error: Revealed type is 'builtins.int' # Use Union when something could be one of a few types. x = [3, 5, "test", "fun"] # type: List[Union[int, str]] # Use Any if you don't know the type of something or it's too # dynamic to write a type for. x = mystery_function() # type: Any # This is how to deal with varargs. # This makes each positional arg and each keyword arg a 'str'. def call(self, *args, **kwargs): # type: (*str, **str) -> str request = make_request(*args, **kwargs) return self.do_api_query(request) # Use `ignore` to suppress type-checking on a given line, when your # code confuses mypy or runs into an outright bug in mypy. # Good practice is to comment every `ignore` with a bug link # (in mypy, typeshed, or your own code) or an explanation of the issue. x = confusing_function() # type: ignore # https://github.com/python/mypy/issues/1167 # cast is a helper function for mypy that allows for guidance of how to convert types. # it does not cast at runtime a = [4] b = cast(List[int], a) # passes fine c = cast(List[str], a) # passes fine (no runtime check) reveal_type(c) # -> error: Revealed type is 'builtins.list[builtins.str]' print(c) # -> [4] the object is not cast # if you want dynamic attributes on your class, have it override __setattr__ or __getattr__ # in a stub or in your source code. # __setattr__ allows for dynamic assignment to names # __getattr__ allows for dynamic access to names class A: # this will allow assignment to any A.x, if x is the same type as `value` def __setattr__(self, name, value): # type: (str, int) -> None ... a.foo = 42 # works a.bar = 'Ex-parrot' # fails type checking # TODO: explain "Need type annotation for variable" when # initializing with None or an empty container Standard duck types ******************* In typical Python code, many functions that can take a list or a dict as an argument only need their argument to be somehow "list-like" or "dict-like". A specific meaning of "list-like" or "dict-like" (or something-else-like) is called a "duck type", and several duck types that are common in idiomatic Python are standardized. .. code-block:: python from typing import Mapping, MutableMapping, Sequence, Iterable # Use Iterable for generic iterables (anything usable in `for`), # and Sequence where a sequence (supporting `len` and `__getitem__`) is required. def f(iterable_of_ints): # type: (Iterable[int]) -> List[str] return [str(x) for x in iterator_of_ints] f(range(1, 3)) # Mapping describes a dict-like object (with `__getitem__`) that we won't mutate, # and MutableMapping one (with `__setitem__`) that we might. def f(my_dict): # type: (Mapping[int, str]) -> List[int] return list(my_dict.keys()) f({3: 'yes', 4: 'no'}) def f(my_mapping): # type: (MutableMapping[int, str]) -> Set[str] my_dict[5] = 'maybe' return set(my_dict.values()) f({3: 'yes', 4: 'no'}) Classes ******* .. code-block:: python class MyClass(object): # For instance methods, omit `self`. def my_method(self, num, str1): # type: (int, str) -> str return num * str1 # The __init__ method doesn't return anything, so it gets return # type None just like any other method that doesn't return anything. def __init__(self): # type: () -> None pass # User-defined classes are written with just their own names. x = MyClass() # type: MyClass Other stuff *********** .. code-block:: python import sys # typing.Match describes regex matches from the re module. from typing import Match, AnyStr, IO x = re.match(r'[0-9]+', "15") # type: Match[str] # Use AnyStr for functions that should accept any kind of string # without allowing different kinds of strings to mix. def concat(a, b): # type: (AnyStr, AnyStr) -> AnyStr return a + b concat(u"foo", u"bar") # type: unicode concat(b"foo", b"bar") # type: bytes # Use IO[] for functions that should accept or return any # object that comes from an open() call. The IO[] does not # distinguish between reading, writing or other modes. def get_sys_IO(mode='w'): # type: (str) -> IO[str] if mode == 'w': return sys.stdout elif mode == 'r': return sys.stdin else: return sys.stdout # TODO: add TypeVar and a simple generic function mypy-0.560/docs/source/cheat_sheet_py3.rst0000644€tŠÔÚ€2›s®0000002420513215007205024723 0ustar jukkaDROPBOX\Domain Users00000000000000.. _cheat-sheet-py3: Mypy syntax cheat sheet (Python 3) ================================== This document is a quick cheat sheet showing how the `PEP 484 `_ type language represents various common types in Python 3. Unless otherwise noted, the syntax is valid on all versions of Python 3. .. note:: Technically many of the type annotations shown below are redundant, because mypy can derive them from the type of the expression. So many of the examples have a dual purpose: show how to write the annotation, and show the inferred types. Built-in types ************** .. code-block:: python from typing import List, Set, Dict, Tuple, Text, Optional, AnyStr # For simple built-in types, just use the name of the type. x = 1 # type: int x = 1.0 # type: float x = True # type: bool x = "test" # type: str x = u"test" # type: str x = b"test" # type: bytes # For collections, the name of the type is capitalized, and the # name of the type inside the collection is in brackets. x = [1] # type: List[int] x = {6, 7} # type: Set[int] # Empty Tuple types are a bit special x = () # type: Tuple[()] # For mappings, we need the types of both keys and values. x = {'field': 2.0} # type: Dict[str, float] # For tuples, we specify the types of all the elements. x = (3, "yes", 7.5) # type: Tuple[int, str, float] # For textual data, use Text. # This is `unicode` in Python 2 and `str` in Python 3. x = ["string", u"unicode"] # type: List[Text] # Use Optional for values that could be None. input_str = f() # type: Optional[str] if input_str is not None: print(input_str) Functions ********* Python 3 introduces an annotation syntax for function declarations in `PEP 3107 `_. .. code-block:: python from typing import Callable, Iterable, Union, Optional, List # This is how you annotate a function definition. def stringify(num: int) -> str: return str(num) # And here's how you specify multiple arguments. def plus(num1: int, num2: int) -> int: return num1 + num2 # Add type annotations for kwargs as though they were positional args. def f(num1: int, my_float: float = 3.5) -> float: return num1 + my_float # An argument can be declared positional-only by giving it a name # starting with two underscores: def quux(__x: int) -> None: pass quux(3) # Fine quux(__x=3) # Error # This is how you annotate a function value. x = f # type: Callable[[int, float], float] # A generator function that yields ints is secretly just a function that # returns an iterable (see below) of ints, so that's how we annotate it. def f(n: int) -> Iterable[int]: i = 0 while i < n: yield i i += 1 # For a function with many arguments, you can of course split it over multiple lines def send_email(address: Union[str, List[str]], sender: str, cc: Optional[List[str]], bcc: Optional[List[str]], subject='', body: List[str] = None ) -> bool: ... When you're puzzled or when things are complicated ************************************************** .. code-block:: python from typing import Union, Any, List, cast # To find out what type mypy infers for an expression anywhere in # your program, wrap it in reveal_type. Mypy will print an error # message with the type; remove it again before running the code. reveal_type(1) # -> error: Revealed type is 'builtins.int' # Use Union when something could be one of a few types. x = [3, 5, "test", "fun"] # type: List[Union[int, str]] # Use Any if you don't know the type of something or it's too # dynamic to write a type for. x = mystery_function() # type: Any # This is how to deal with varargs. # This makes each positional arg and each keyword arg a 'str'. def call(self, *args: str, **kwargs: str) -> str: request = make_request(*args, **kwargs) return self.do_api_query(request) # Use `ignore` to suppress type-checking on a given line, when your # code confuses mypy or runs into an outright bug in mypy. # Good practice is to comment every `ignore` with a bug link # (in mypy, typeshed, or your own code) or an explanation of the issue. x = confusing_function() # type: ignore # https://github.com/python/mypy/issues/1167 # cast is a helper function for mypy that allows for guidance of how to convert types. # it does not cast at runtime a = [4] b = cast(List[int], a) # passes fine c = cast(List[str], a) # passes fine (no runtime check) reveal_type(c) # -> error: Revealed type is 'builtins.list[builtins.str]' print(c) # -> [4] the object is not cast # if you want dynamic attributes on your class, have it override __setattr__ or __getattr__ # in a stub or in your source code. # __setattr__ allows for dynamic assignment to names # __getattr__ allows for dynamic access to names class A: # this will allow assignment to any A.x, if x is the same type as `value` def __setattr__(self, name: str, value: int) -> None: ... # this will allow access to any A.x, if x is compatible with the return type def __getattr__(self, name: str) -> int: ... a.foo = 42 # works a.bar = 'Ex-parrot' # fails type checking # TODO: explain "Need type annotation for variable" when # initializing with None or an empty container Standard duck types ******************* In typical Python code, many functions that can take a list or a dict as an argument only need their argument to be somehow "list-like" or "dict-like". A specific meaning of "list-like" or "dict-like" (or something-else-like) is called a "duck type", and several duck types that are common in idiomatic Python are standardized. .. code-block:: python from typing import Mapping, MutableMapping, Sequence, Iterable, List, Set # Use Iterable for generic iterables (anything usable in `for`), # and Sequence where a sequence (supporting `len` and `__getitem__`) is required. def f(iterable_of_ints: Iterable[int]) -> List[str]: return [str(x) for x in iterable_of_ints] f(range(1, 3)) # Mapping describes a dict-like object (with `__getitem__`) that we won't mutate, # and MutableMapping one (with `__setitem__`) that we might. def f(my_dict: Mapping[int, str])-> List[int]: return list(my_dict.keys()) f({3: 'yes', 4: 'no'}) def f(my_mapping: MutableMapping[int, str]) -> Set[str]: my_mapping[5] = 'maybe' return set(my_mapping.values()) f({3: 'yes', 4: 'no'}) Classes ******* .. code-block:: python class MyClass: # The __init__ method doesn't return anything, so it gets return # type None just like any other method that doesn't return anything. def __init__(self) -> None: ... # For instance methods, omit `self`. def my_method(self, num: int, str1: str) -> str: return num * str1 # User-defined classes are written with just their own names. x = MyClass() # type: MyClass Other stuff *********** .. code-block:: python import sys import re # typing.Match describes regex matches from the re module. from typing import Match, AnyStr, IO x = re.match(r'[0-9]+', "15") # type: Match[str] # You can use AnyStr to indicate that any string type will work # but not to mix types def full_name(first: AnyStr, last: AnyStr) -> AnyStr: return first+last full_name('Jon','Doe') # same str ok full_name(b'Bill', b'Bit') # same binary ok full_name(b'Terry', 'Trouble') # different str types, fails # Use IO[] for functions that should accept or return any # object that comes from an open() call. The IO[] does not # distinguish between reading, writing or other modes. def get_sys_IO(mode='w') -> IO[str]: if mode == 'w': return sys.stdout elif mode == 'r': return sys.stdin else: return sys.stdout # forward references are useful if you want to reference a class before it is designed def f(foo: A) -> int: # this will fail ... class A: ... # however, using the string 'A', it will pass as long as there is a class of that name later on def f(foo: 'A') -> int: ... # TODO: add TypeVar and a simple generic function Variable Annotation in Python 3.6 with PEP 526 ********************************************** Python 3.6 brings new syntax for annotating variables with `PEP 526 `_. Mypy brings limited support for PEP 526 annotations. .. code-block:: python # annotation is similar to arguments to functions name: str = "Eric Idle" # class instances can be annotated as follows mc : MyClass = MyClass() # tuple packing can be done as follows tu: Tuple[str, ...] = ('a', 'b', 'c') # annotations are not checked at runtime year: int = '1972' # error in type checking, but works at runtime # these are all equivalent hour = 24 # type: int hour: int; hour = 24 hour: int = 24 # you do not (!) need to initialize a variable to annotate it a: int # ok for type checking and runtime # which is useful in conditional branches child: bool if age < 18: child = True else: child = False # annotations for classes are for instance variables (those created in __init__ or __new__) class Battery: charge_percent: int = 100 # this is an instance variable with a default value capacity: int # an instance variable without a default # you can use the ClassVar annotation to make the variable a class variable instead of an instance variable. class Car: seats: ClassVar[int] = 4 passengers: ClassVar[List[str]] # You can also declare the type of an attribute in __init__ class Box: def __init__(self) -> None: self.items: List[str] = [] Please see :ref:`python-36` for more on mypy's compatibility with Python 3.6's new features. mypy-0.560/docs/source/class_basics.rst0000644€tŠÔÚ€2›s®0000003576613215007205024323 0ustar jukkaDROPBOX\Domain Users00000000000000Class basics ============ Instance and class attributes ***************************** Mypy type checker detects if you are trying to access a missing attribute, which is a very common programming error. For this to work correctly, instance and class attributes must be defined or initialized within the class. Mypy infers the types of attributes: .. code-block:: python class A: def __init__(self, x: int) -> None: self.x = x # Attribute x of type int a = A(1) a.x = 2 # OK a.y = 3 # Error: A has no attribute y This is a bit like each class having an implicitly defined ``__slots__`` attribute. This is only enforced during type checking and not when your program is running. You can declare types of variables in the class body explicitly using a type comment: .. code-block:: python class A: x = None # type: List[int] # Declare attribute x of type List[int] a = A() a.x = [1] # OK As in Python, a variable defined in the class body can used as a class or an instance variable. Similarly, you can give explicit types to instance variables defined in a method: .. code-block:: python class A: def __init__(self) -> None: self.x = [] # type: List[int] def f(self) -> None: self.y = 0 # type: Any You can only define an instance variable within a method if you assign to it explicitly using ``self``: .. code-block:: python class A: def __init__(self) -> None: self.y = 1 # Define y a = self a.x = 1 # Error: x not defined Overriding statically typed methods *********************************** When overriding a statically typed method, mypy checks that the override has a compatible signature: .. code-block:: python class A: def f(self, x: int) -> None: ... class B(A): def f(self, x: str) -> None: # Error: type of x incompatible ... class C(A): def f(self, x: int, y: int) -> None: # Error: too many arguments ... class D(A): def f(self, x: int) -> None: # OK ... .. note:: You can also vary return types **covariantly** in overriding. For example, you could override the return type ``object`` with a subtype such as ``int``. You can also override a statically typed method with a dynamically typed one. This allows dynamically typed code to override methods defined in library classes without worrying about their type signatures. There is no runtime enforcement that the method override returns a value that is compatible with the original return type, since annotations have no effect at runtime: .. code-block:: python class A: def inc(self, x: int) -> int: return x + 1 class B(A): def inc(self, x): # Override, dynamically typed return 'hello' b = B() print(b.inc(1)) # hello a = b # type: A print(a.inc(1)) # hello Abstract base classes and multiple inheritance ********************************************** Mypy supports Python abstract base classes (ABCs). Abstract classes have at least one abstract method or property that must be implemented by a subclass. You can define abstract base classes using the ``abc.ABCMeta`` metaclass, and the ``abc.abstractmethod`` and ``abc.abstractproperty`` function decorators. Example: .. code-block:: python from abc import ABCMeta, abstractmethod class A(metaclass=ABCMeta): @abstractmethod def foo(self, x: int) -> None: pass @abstractmethod def bar(self) -> str: pass class B(A): def foo(self, x: int) -> None: ... def bar(self) -> str: return 'x' a = A() # Error: A is abstract b = B() # OK Note that mypy performs checking for unimplemented abstract methods even if you omit the ``ABCMeta`` metaclass. This can be useful if the metaclass would cause runtime metaclass conflicts. A class can inherit any number of classes, both abstract and concrete. As with normal overrides, a dynamically typed method can implement a statically typed method defined in any base class, including an abstract method defined in an abstract base class. You can implement an abstract property using either a normal property or an instance variable. .. _protocol-types: Protocols and structural subtyping ********************************** Mypy supports two ways of deciding whether two classes are compatible as types: nominal subtyping and structural subtyping. *Nominal* subtyping is strictly based on the class hierarchy. If class ``D`` inherits class ``C``, it's also a subtype of ``C``, and instances of ``D`` can be used when ``C`` instances are expected. This form of subtyping is used by default in mypy, since it's easy to understand and produces clear and concise error messages, and since it matches how the native ``isinstance()`` check works -- based on class hierarchy. *Structural* subtyping can also be useful. Class ``D`` is a structural subtype of class ``C`` if the former has all attributes and methods of the latter, and with compatible types. Structural subtyping can be seen as a static equivalent of duck typing, which is well known to Python programmers. Mypy provides support for structural subtyping via protocol classes described below. See `PEP 544 `_ for the detailed specification of protocols and structural subtyping in Python. .. _predefined_protocols: Predefined protocols ******************** The ``typing`` module defines various protocol classes that correspond to common Python protocols, such as ``Iterable[T]``. If a class defines a suitable ``__iter__`` method, mypy understands that it implements the iterable protocol and is compatible with ``Iterable[T]``. For example, ``IntList`` below is iterable, over ``int`` values: .. code-block:: python from typing import Iterator, Iterable, Optional class IntList: def __init__(self, value: int, next: Optional[IntList]) -> None: self.value = value self.next = next def __iter__(self) -> Iterator[int]: current = self while current: yield current.value current = current.next def print_numbered(items: Iterable[int]) -> None: for n, x in enumerate(items): print(n + 1, x) x = IntList(3, IntList(5, None)) print_numbered(x) # OK print_numbered([4, 5]) # Also OK The subsections below introduce all built-in protocols defined in ``typing`` and the signatures of the corresponding methods you need to define to implement each protocol (the signatures can be left out, as always, but mypy won't type check unannotated methods). Iteration protocols ................... The iteration protocols are useful in many contexts. For example, they allow iteration of objects in for loops. ``Iterable[T]`` --------------- The :ref:`example above ` has a simple implementation of an ``__iter__`` method. .. code-block:: python def __iter__(self) -> Iterator[T] ``Iterator[T]`` --------------- .. code-block:: python def __next__(self) -> T def __iter__(self) -> Iterator[T] Collection protocols .................... Many of these are implemented by built-in container types such as ``list`` and ``dict``, and these are also useful for user-defined collection objects. ``Sized`` --------- This is a type for objects that support ``len(x)``. .. code-block:: python def __len__(self) -> int ``Container[T]`` ---------------- This is a type for objects that support the ``in`` operator. .. code-block:: python def __contains__(self, x: object) -> bool ``Collection[T]`` ----------------- .. code-block:: python def __len__(self) -> int def __iter__(self) -> Iterator[T] def __contains__(self, x: object) -> bool One-off protocols ................. These protocols are typically only useful with a single standard library function or class. ``Reversible[T]`` ----------------- This is a type for objects that support ``reversed(x)``. .. code-block:: python def __reversed__(self) -> Iterator[T] ``SupportsAbs[T]`` ------------------ This is a type for objects that support ``abs(x)``. ``T`` is the type of value returned by ``abs(x)``. .. code-block:: python def __abs__(self) -> T ``SupportsBytes`` ----------------- This is a type for objects that support ``bytes(x)``. .. code-block:: python def __bytes__(self) -> bytes ``SupportsComplex`` ------------------- This is a type for objects that support ``complex(x)``. .. code-block:: python def __complex__(self) -> complex ``SupportsFloat`` ----------------- This is a type for objects that support ``float(x)``. .. code-block:: python def __float__(self) -> float ``SupportsInt`` --------------- This is a type for objects that support ``int(x)``. .. code-block:: python def __int__(self) -> int ``SupportsRound[T]`` -------------------- This is a type for objects that support ``round(x)``. .. code-block:: python def __round__(self) -> T Async protocols ............... These protocols can be useful in async code. ``Awaitable[T]`` ---------------- .. code-block:: python def __await__(self) -> Generator[Any, None, T] ``AsyncIterable[T]`` -------------------- .. code-block:: python def __aiter__(self) -> AsyncIterator[T] ``AsyncIterator[T]`` -------------------- .. code-block:: python def __anext__(self) -> Awaitable[T] def __aiter__(self) -> AsyncIterator[T] Context manager protocols ......................... There are two protocols for context managers -- one for regular context managers and one for async ones. These allow defining objects that can be used in ``with`` and ``async with`` statements. ``ContextManager[T]`` --------------------- .. code-block:: python def __enter__(self) -> T def __exit__(self, exc_type: Optional[Type[BaseException]], exc_value: Optional[BaseException], traceback: Optional[TracebackType]) -> Optional[bool] ``AsyncContextManager[T]`` -------------------------- .. code-block:: python def __aenter__(self) -> Awaitable[T] def __aexit__(self, exc_type: Optional[Type[BaseException]], exc_value: Optional[BaseException], traceback: Optional[TracebackType]) -> Awaitable[Optional[bool]] Simple user-defined protocols ***************************** You can define your own protocol class by inheriting the special ``typing_extensions.Protocol`` class: .. code-block:: python from typing import Iterable from typing_extensions import Protocol class SupportsClose(Protocol): def close(self) -> None: ... # Explicit '...' class Resource: # No SupportsClose base class! # ... some methods ... def close(self) -> None: self.resource.release() def close_all(items: Iterable[SupportsClose]) -> None: for item in items: item.close() close_all([Resource(), open('some/file')]) # Okay! ``Resource`` is a subtype of the ``SupportClose`` protocol since it defines a compatible ``close`` method. Regular file objects returned by ``open()`` are similarly compatible with the protocol, as they support ``close()``. .. note:: The ``Protocol`` base class is currently provided in the ``typing_extensions`` package. Once structural subtyping is mature and `PEP 544 `_ has been accepted, ``Protocol`` will be included in the ``typing`` module. Defining subprotocols and subclassing protocols *********************************************** You can also define subprotocols. Existing protocols can be extended and merged using multiple inheritance. Example: .. code-block:: python # ... continuing from the previous example class SupportsRead(Protocol): def read(self, amount: int) -> bytes: ... class TaggedReadableResource(SupportsClose, SupportsRead, Protocol): label: str class AdvancedResource(Resource): def __init__(self, label: str) -> None: self.label = label def read(self, amount: int) -> bytes: # some implementation ... resource: TaggedReadableResource resource = AdvancedResource('handle with care') # OK Note that inheriting from an existing protocol does not automatically turn the subclass into a protocol -- it just creates a regular (non-protocol) class or ABC that implements the given protocol (or protocols). The ``typing_extensions.Protocol`` base class must always be explicitly present if you are defining a protocol: .. code-block:: python class NewProtocol(SupportsClose): # This is NOT a protocol new_attr: int class Concrete: new_attr: int = 0 def close(self) -> None: ... # Error: nominal subtyping used by default x: NewProtocol = Concrete() # Error! You can also include default implementations of methods in protocols. If you explicitly subclass these protocols you can inherit these default implementations. Explicitly including a protocol as a base class is also a way of documenting that your class implements a particular protocol, and it forces mypy to verify that your class implementation is actually compatible with the protocol. .. note:: You can use Python 3.6 variable annotations (`PEP 526 `_) to declare protocol attributes. On Python 2.7 and earlier Python 3 versions you can use type comments and properties. Recursive protocols ******************* Protocols can be recursive (self-referential) and mutually recursive. This is useful for declaring abstract recursive collections such as trees and linked lists: .. code-block:: python from typing import TypeVar, Optional from typing_extensions import Protocol class TreeLike(Protocol): value: int @property def left(self) -> Optional['TreeLike']: ... @property def right(self) -> Optional['TreeLike']: ... class SimpleTree: def __init__(self, value: int) -> None: self.value = value self.left: Optional['SimpleTree'] = None self.right: Optional['SimpleTree'] = None root = SimpleTree(0) # type: TreeLike # OK Using ``isinstance()`` with protocols ************************************* You can use a protocol class with ``isinstance()`` if you decorate it with the ``typing_extensions.runtime`` class decorator. The decorator adds support for basic runtime structural checks: .. code-block:: python from typing_extensions import Protocol, runtime @runtime class Portable(Protocol): handles: int class Mug: def __init__(self) -> None: self.handles = 1 mug = Mug() if isinstance(mug, Portable): use(mug.handles) # Works statically and at runtime ``isinstance()`` also works with the :ref:`predefined protocols ` in ``typing`` such as ``Iterable``. .. note:: ``isinstance()`` with protocols is not completely safe at runtime. For example, signatures of methods are not checked. The runtime implementation only checks that all protocol members are defined. mypy-0.560/docs/source/command_line.rst0000644€tŠÔÚ€2›s®0000005171113215007205024303 0ustar jukkaDROPBOX\Domain Users00000000000000.. _command-line: The mypy command line ===================== This section documents many of mypy's command line flags. A quick summary of command line flags can always be printed using the ``-h`` flag (or its long form ``--help``):: $ mypy -h usage: mypy [-h] [-v] [-V] [--python-version x.y] [--platform PLATFORM] [-2] [--ignore-missing-imports] [--follow-imports {normal,silent,skip,error}] [--disallow-any-{unimported,expr,decorated,explicit,generics}] [--disallow-untyped-calls] [--disallow-untyped-defs] [--check-untyped-defs] [--disallow-subclassing-any] [--warn-incomplete-stub] [--warn-redundant-casts] [--no-warn-no-return] [--warn-return-any] [--warn-unused-ignores] [--show-error-context] [--no-implicit-optional] [-i] [--quick-and-dirty] [--cache-dir DIR] [--skip-version-check] [--strict-optional] [--strict-optional-whitelist [GLOB [GLOB ...]]] [--junit-xml JUNIT_XML] [--pdb] [--show-traceback] [--stats] [--inferstats] [--custom-typing MODULE] [--custom-typeshed-dir DIR] [--scripts-are-modules] [--config-file CONFIG_FILE] [--show-column-numbers] [--find-occurrences CLASS.MEMBER] [--strict] [--shadow-file SOURCE_FILE SHADOW_FILE] [--any-exprs-report DIR] [--cobertura-xml-report DIR] [--html-report DIR] [--linecount-report DIR] [--linecoverage-report DIR] [--memory-xml-report DIR] [--txt-report DIR] [--xml-report DIR] [--xslt-html-report DIR] [--xslt-txt-report DIR] [-m MODULE] [-c PROGRAM_TEXT] [-p PACKAGE] [files [files ...]] (etc., too long to show everything here) Specifying files and directories to be checked ********************************************** You've already seen ``mypy program.py`` as a way to type check the file ``program.py``. More generally you can pass any number of files and directories on the command line and they will all be type checked together. - Files ending in ``.py`` (and stub files ending in ``.pyi``) are checked as Python modules. - Files not ending in ``.py`` or ``.pyi`` are assumed to be Python scripts and checked as such. - Directories representing Python packages (i.e. containing a ``__init__.py[i]`` file) are checked as Python packages; all submodules and subpackages will be checked (subpackages must themselves have a ``__init__.py[i]`` file). - Directories that don't represent Python packages (i.e. not directly containing an ``__init__.py[i]`` file) are checked as follows: - All ``*.py[i]`` files contained directly therein are checked as toplevel Python modules; - All packages contained directly therein (i.e. immediate subdirectories with an ``__init__.py[i]`` file) are checked as toplevel Python packages. One more thing about checking modules and packages: if the directory *containing* a module or package specified on the command line has an ``__init__.py[i]`` file, mypy assigns these an absolute module name by crawling up the path until no ``__init__.py[i]`` file is found. For example, suppose we run the command ``mypy foo/bar/baz.py`` where ``foo/bar/__init__.py`` exists but ``foo/__init__.py`` does not. Then the module name assumed is ``bar.baz`` and the directory ``foo`` is added to mypy's module search path. On the other hand, if ``foo/bar/__init__.py`` did not exist, ``foo/bar`` would be added to the module search path instead, and the module name assumed is just ``baz``. If a script (a file not ending in ``.py[i]``) is processed, the module name assumed is always ``__main__`` (matching the behavior of the Python interpreter). Other ways of specifying code to be checked ******************************************* The flag ``-m`` (long form: ``--module``) lets you specify a module name to be found using the default module search path. The module name may contain dots. For example:: $ mypy -m html.parser will type check the module ``html.parser`` (this happens to be a library stub). The flag ``-p`` (long form: ``--package``) is similar to ``-m`` but you give it a package name and it will type check all submodules and subpackages (recursively) of that package. (If you pass a package name to ``-m`` it will just type check the package's ``__init__.py`` and anything imported from there.) For example:: $ mypy -p html will type check the entire ``html`` package (of library stubs). Finally the flag ``-c`` (long form: ``--command``) will take a string from the command line and type check it as a small program. For example:: $ mypy -c 'x = [1, 2]; print(x())' will type check that little program (and complain that ``List[int]`` is not callable). Reading a list of files from a file *********************************** Finally, any command-line argument starting with ``@`` reads additional command-line arguments from the file following the ``@`` character. This is primarily useful if you have a file containing a list of files that you want to be type-checked: instead of using shell syntax like:: mypy $(cat file_of_files) you can use this instead:: mypy @file_of_files Such a file can also contain other flags, but a preferred way of reading flags (not files) from a file is to use a :ref:`configuration file `. .. _finding-imports: How imports are found ********************* When mypy encounters an `import` statement it tries to find the module on the file system, similar to the way Python finds it. However, there are some differences. First, mypy has its own search path. This is computed from the following items: - The ``MYPYPATH`` environment variable (a colon-separated list of directories). - The directories containing the sources given on the command line (see below). - The relevant directories of the `typeshed `_ repo. For sources given on the command line, the path is adjusted by crawling up from the given file or package to the nearest directory that does not contain an ``__init__.py`` or ``__init__.pyi`` file. Second, mypy searches for stub files in addition to regular Python files and packages. The rules for searching a module ``foo`` are as follows: - The search looks in each of the directories in the search path (see above) until a match is found. - If a package named ``foo`` is found (i.e. a directory ``foo`` containing an ``__init__.py`` or ``__init__.pyi`` file) that's a match. - If a stub file named ``foo.pyi`` is found, that's a match. - If a Python module named ``foo.py`` is found, that's a match. These matches are tried in order, so that if multiple matches are found in the same directory on the search path (e.g. a package and a Python file, or a stub file and a Python file) the first one in the above list wins. In particular, if a Python file and a stub file are both present in the same directory on the search path, only the stub file is used. (However, if the files are in different directories, the one found in the earlier directory is used.) NOTE: These rules are relevant to the following section too: the ``--follow-imports`` flag described below is applied *after* the above algorithm has determined which package, stub or module to use. .. _follow-imports: Following imports or not? ************************* When you're first attacking a large existing codebase with mypy, you may only want to check selected files. For example, you may only want to check those files to which you have already added annotations. This is easily accomplished using a shell pipeline like this:: mypy $(find . -name \*.py | xargs grep -l '# type:') (While there are many improvements possible to make this example more robust, this is not the place for a tutorial in shell programming.) However, by default mypy doggedly tries to :ref:`follow imports `. This may cause several types of problems that you may want to silence during your initial conquest: - Your code may import library modules for which no stub files exist yet. This can cause a lot of errors like the following:: main.py:1: error: No library stub file for standard library module 'antigravity' main.py:2: error: No library stub file for module 'flask' main.py:3: error: Cannot find module named 'sir_not_appearing_in_this_film' If you see only a few of these you may be able to silence them by putting ``# type: ignore`` on the respective ``import`` statements, but it's usually easier to silence all such errors by using :ref:`--ignore-missing-imports `. - Your project's directory structure may hinder mypy in finding certain modules that are part of your project, e.g. modules hidden away in a subdirectory that's not a package. You can usually deal with this by setting the ``MYPYPATH`` variable (see :ref:`finding-imports`). - When following imports mypy may find a module that's part of your project but which you haven't annotated yet, mypy may report errors for the top level code in that module (where the top level includes class bodies and function/method default values). Here the ``--follow-imports`` flag comes in handy. The ``--follow-imports`` flag takes a mandatory string value that can take one of four values. It only applies to modules for which a ``.py`` file is found (but no corresponding ``.pyi`` stub file) and that are not given on the command line. Passing a package or directory on the command line implies all modules in that package or directory. The four possible values are: - ``normal`` (the default) follow imports normally and type check all top level code (as well as the bodies of all functions and methods with at least one type annotation in the signature). - ``silent`` follow imports normally and even "type check" them normally, but *suppress any error messages*. This is typically the best option for a new codebase. - ``skip`` *don't* follow imports, silently replacing the module (and everything imported *from* it) with an object of type ``Any``. (This option used to be known as ``--silent-imports`` and while it is very powerful it can also cause hard-to-debug errors, hence the recommendation of using ``silent`` instead.) - ``error`` the same behavior as ``skip`` but not quite as silent -- it flags the import as an error, like this:: main.py:1: note: Import of 'submodule' ignored main.py:1: note: (Using --follow-imports=error, module not passed on command line) .. _disallow-any: Disallow Any Flags ****************** The ``--disallow-any`` family of flags disallows various types of ``Any`` in a module. The following options are available: - ``--disallow-any-unimported`` disallows usage of types that come from unfollowed imports (such types become aliases for ``Any``). Unfollowed imports occur either when the imported module does not exist or when ``--follow-imports=skip`` is set. - ``--disallow-any-expr`` disallows all expressions in the module that have type ``Any``. If an expression of type ``Any`` appears anywhere in the module mypy will output an error unless the expression is immediately used as an argument to ``cast`` or assigned to a variable with an explicit type annotation. In addition, declaring a variable of type ``Any`` or casting to type ``Any`` is not allowed. Note that calling functions that take parameters of type ``Any`` is still allowed. - ``--disallow-any-decorated`` disallows functions that have ``Any`` in their signature after decorator transformation. - ``--disallow-any-explicit`` disallows explicit ``Any`` in type positions such as type annotations and generic type parameters. - ``--disallow-any-generics`` disallows usage of generic types that do not specify explicit type parameters. Moreover, built-in collections (such as ``list`` and ``dict``) become disallowed as you should use their aliases from the typing module (such as ``List[int]`` and ``Dict[str, str]``). Additional command line flags ***************************** Here are some more useful flags: .. _ignore-missing-imports: - ``--ignore-missing-imports`` suppresses error messages about imports that cannot be resolved (see :ref:`follow-imports` for some examples). - ``--strict-optional`` enables experimental strict checking of ``Optional[...]`` types and ``None`` values. Without this option, mypy doesn't generally check the use of ``None`` values -- they are valid everywhere. See :ref:`strict_optional` for more about this feature. - ``--strict-optional-whitelist`` attempts to suppress strict Optional-related errors in non-whitelisted files. Takes an arbitrary number of globs as the whitelist. This option is intended to be used to incrementally roll out ``--strict-optional`` to a large codebase that already has mypy annotations. However, this flag comes with some significant caveats. It does not suppress all errors caused by turning on ``--strict-optional``, only most of them, so there may still be a bit of upfront work to be done before it can be used in CI. It will also suppress some errors that would be caught in a non-strict-Optional run. Therefore, when using this flag, you should also re-check your code without ``--strict-optional`` to ensure new type errors are not introduced. - ``--disallow-untyped-defs`` reports an error whenever it encounters a function definition without type annotations. - ``--check-untyped-defs`` is less severe than the previous option -- it type checks the body of every function, regardless of whether it has type annotations. (By default the bodies of functions without annotations are not type checked.) It will assume all arguments have type ``Any`` and always infer ``Any`` as the return type. - ``--disallow-incomplete-defs`` reports an error whenever it encounters a partly annotated function definition. - ``--disallow-untyped-calls`` reports an error whenever a function with type annotations calls a function defined without annotations. - ``--disallow-untyped-decorators`` reports an error whenever a function with type annotations is decorated with a decorator without annotations. .. _disallow-subclassing-any: - ``--disallow-subclassing-any`` reports an error whenever a class subclasses a value of type ``Any``. This may occur when the base class is imported from a module that doesn't exist (when using :ref:`--ignore-missing-imports `) or is ignored due to :ref:`--follow-imports=skip ` or a ``# type: ignore`` comment on the ``import`` statement. Since the module is silenced, the imported class is given a type of ``Any``. By default mypy will assume that the subclass correctly inherited the base class even though that may not actually be the case. This flag makes mypy raise an error instead. .. _incremental: - ``--incremental`` is an experimental option that enables a module cache. When enabled, mypy caches results from previous runs to speed up type checking. Incremental mode can help when most parts of your program haven't changed since the previous mypy run. A companion flag is ``--cache-dir DIR``, which specifies where the cache files are written. By default this is ``.mypy_cache`` in the current directory. While the cache is only read in incremental mode, it is written even in non-incremental mode, in order to "warm" the cache. To disable writing the cache, use ``--cache-dir=/dev/null`` (UNIX) or ``--cache-dir=nul`` (Windows). Cache files belonging to a different mypy version are ignored. .. _quick-mode: - ``--quick-and-dirty`` is an experimental, unsafe variant of :ref:`incremental mode `. Quick mode is faster than regular incremental mode, because it only re-checks modules that were modified since their cache file was last written (regular incremental mode also re-checks all modules that depend on one or more modules that were re-checked). Quick mode is unsafe because it may miss problems caused by a change in a dependency. Quick mode updates the cache, but regular incremental mode ignores cache files written by quick mode. - ``--python-version X.Y`` will make mypy typecheck your code as if it were run under Python version X.Y. Without this option, mypy will default to using whatever version of Python is running mypy. Note that the ``-2`` and ``--py2`` flags are aliases for ``--python-version 2.7``. See :ref:`version_and_platform_checks` for more about this feature. - ``--platform PLATFORM`` will make mypy typecheck your code as if it were run under the the given operating system. Without this option, mypy will default to using whatever operating system you are currently using. See :ref:`version_and_platform_checks` for more about this feature. - ``--show-column-numbers`` will add column offsets to error messages, for example, the following indicates an error in line 12, column 9 (note that column offsets are 0-based): .. code-block:: python main.py:12:9: error: Unsupported operand types for / ("int" and "str") - ``--scripts-are-modules`` will give command line arguments that appear to be scripts (i.e. files whose name does not end in ``.py``) a module name derived from the script name rather than the fixed name ``__main__``. This allows checking more than one script in a single mypy invocation. (The default ``__main__`` is technically more correct, but if you have many scripts that import a large package, the behavior enabled by this flag is often more convenient.) - ``--custom-typeshed-dir DIR`` specifies the directory where mypy looks for typeshed stubs, instead of the typeshed that ships with mypy. This is primarily intended to make it easier to test typeshed changes before submitting them upstream, but also allows you to use a forked version of typeshed. .. _config-file-flag: - ``--config-file CONFIG_FILE`` causes configuration settings to be read from the given file. By default settings are read from ``mypy.ini`` or ``setup.cfg`` in the current directory. Settings override mypy's built-in defaults and command line flags can override settings. See :ref:`config-file` for the syntax of configuration files. - ``--junit-xml JUNIT_XML`` will make mypy generate a JUnit XML test result document with type checking results. This can make it easier to integrate mypy with continuous integration (CI) tools. - ``--find-occurrences CLASS.MEMBER`` will make mypy print out all usages of a class member based on static type information. This feature is experimental. - ``--cobertura-xml-report DIR`` causes mypy to generate a Cobertura XML type checking coverage report. - ``--warn-no-return`` causes mypy to generate errors for missing return statements on some execution paths. Mypy doesn't generate these errors for functions with ``None`` or ``Any`` return types. Mypy also currently ignores functions with an empty body or a body that is just ellipsis (``...``), since these can be valid as abstract methods. This option is on by default. - ``--warn-return-any`` causes mypy to generate a warning when returning a value with type ``Any`` from a function declared with a non- ``Any`` return type. - ``--strict`` mode enables all optional error checking flags. You can see the list of flags enabled by strict mode in the full ``mypy -h`` output. .. _shadow-file: - ``--shadow-file SOURCE_FILE SHADOW_FILE`` makes mypy typecheck SHADOW_FILE in place of SOURCE_FILE. Primarily intended for tooling. Allows tooling to make transformations to a file before type checking without having to change the file in-place. (For example, tooling could use this to display the type of an expression by wrapping it with a call to reveal_type in the shadow file and then parsing the output.) .. _no-implicit-optional: - ``--no-implicit-optional`` causes mypy to stop treating arguments with a ``None`` default value as having an implicit ``Optional[...]`` type. For the remaining flags you can read the full ``mypy -h`` output. .. note:: Command line flags are liable to change between releases. .. _integrating-mypy: Integrating mypy into another Python application ************************************************ It is possible to integrate mypy into another Python 3 application by importing ``mypy.api`` and calling the ``run`` function with a parameter of type ``List[str]``, containing what normally would have been the command line arguments to mypy. Function ``run`` returns a ``Tuple[str, str, int]``, namely ``(, , )``, in which ```` is what mypy normally writes to ``sys.stdout``, ```` is what mypy normally writes to ``sys.stderr`` and ``exit_status`` is the exit status mypy normally returns to the operating system. A trivial example of using the api is the following:: import sys from mypy import api result = api.run(sys.argv[1:]) if result[0]: print('\nType checking report:\n') print(result[0]) # stdout if result[1]: print('\nError report:\n') print(result[1]) # stderr print ('\nExit status:', result[2]) mypy-0.560/docs/source/common_issues.rst0000644€tŠÔÚ€2›s®0000003630313215007206024542 0ustar jukkaDROPBOX\Domain Users00000000000000.. _common_issues: Common issues ============= This section has examples of cases when you need to update your code to use static typing, and ideas for working around issues if mypy doesn't work as expected. Statically typed code is often identical to normal Python code, but sometimes you need to do things slightly differently. Can't install mypy using pip ---------------------------- If installation fails, you've probably hit one of these issues: * Mypy needs Python 3.4 or later to run. * You may have to run pip like this: ``python3 -m pip install mypy``. .. _annotations_needed: No errors reported for obviously wrong code ------------------------------------------- There are several common reasons why obviously wrong code is not flagged as an error. - **The function containing the error is not annotated.** Functions that do not have any annotations (neither for any argument nor for the return type) are not type-checked, and even the most blatant type errors (e.g. ``2 + 'a'``) pass silently. The solution is to add annotations. Example: .. code-block:: python def foo(a): return '(' + a.split() + ')' # No error! This gives no error even though ``a.split()`` is "obviously" a list (the author probably meant ``a.strip()``). The error is reported once you add annotations: .. code-block:: python def foo(a: str) -> str: return '(' + a.split() + ')' # error: Unsupported operand types for + ("str" and List[str]) If you don't know what types to add, you can use ``Any``, but beware: - **One of the values involved has type ``Any``.** Extending the above example, if we were to leave out the annotation for ``a``, we'd get no error: .. code-block:: python def foo(a) -> str: return '(' + a.split() + ')' # No error! The reason is that if the type of ``a`` is unknown, the type of ``a.split()`` is also unknown, so it is inferred as having type ``Any``, and it is no error to add a string to an ``Any``. If you're having trouble debugging such situations, :ref:`reveal_type() ` might come in handy. Note that sometimes library stubs have imprecise type information, e.g. the ``pow()`` builtin returns ``Any`` (see `typeshed issue 285 `_ for the reason). - **Some imports may be silently ignored**. Another source of unexpected ``Any`` values are the :ref:`"--ignore-missing-imports" ` and :ref:`"--follow-imports=skip" ` flags. When you use ``--ignore-missing-imports``, any imported module that cannot be found is silently replaced with ``Any``. When using ``--follow-imports=skip`` the same is true for modules for which a ``.py`` file is found but that are not specified on the command line. (If a ``.pyi`` stub is found it is always processed normally, regardless of the value of ``--follow-imports``.) To help debug the former situation (no module found at all) leave out ``--ignore-missing-imports``; to get clarity about the latter use ``--follow-imports=error``. You can read up about these and other useful flags in :ref:`command-line`. .. _silencing_checker: Spurious errors and locally silencing the checker ------------------------------------------------- You can use a ``# type: ignore`` comment to silence the type checker on a particular line. For example, let's say our code is using the C extension module ``frobnicate``, and there's no stub available. Mypy will complain about this, as it has no information about the module: .. code-block:: python import frobnicate # Error: No module "frobnicate" frobnicate.start() You can add a ``# type: ignore`` comment to tell mypy to ignore this error: .. code-block:: python import frobnicate # type: ignore frobnicate.start() # Okay! The second line is now fine, since the ignore comment causes the name ``frobnicate`` to get an implicit ``Any`` type. .. note:: The ``# type: ignore`` comment will only assign the implicit ``Any`` type if mypy cannot find information about that particular module. So, if we did have a stub available for ``frobnicate`` then mypy would ignore the ``# type: ignore`` comment and typecheck the stub as usual. Types of empty collections -------------------------- You often need to specify the type when you assign an empty list or dict to a new variable, as mentioned earlier: .. code-block:: python a = [] # type: List[int] Without the annotation mypy can't always figure out the precise type of ``a``. You can use a simple empty list literal in a dynamically typed function (as the type of ``a`` would be implicitly ``Any`` and need not be inferred), if type of the variable has been declared or inferred before, or if you perform a simple modification operation in the same scope (such as ``append`` for a list): .. code-block:: python a = [] # Okay because followed by append, inferred type List[int] for i in range(n): a.append(i * i) However, in more complex cases an explicit type annotation can be required (mypy will tell you this). Often the annotation can make your code easier to understand, so it doesn't only help mypy but everybody who is reading the code! Redefinitions with incompatible types ------------------------------------- Each name within a function only has a single 'declared' type. You can reuse for loop indices etc., but if you want to use a variable with multiple types within a single function, you may need to declare it with the ``Any`` type. .. code-block:: python def f() -> None: n = 1 ... n = 'x' # Type error: n has type int .. note:: This limitation could be lifted in a future mypy release. Note that you can redefine a variable with a more *precise* or a more concrete type. For example, you can redefine a sequence (which does not support ``sort()``) as a list and sort it in-place: .. code-block:: python def f(x: Sequence[int]) -> None: # Type of x is Sequence[int] here; we don't know the concrete type. x = list(x) # Type of x is List[int] here. x.sort() # Okay! .. _variance: Invariance vs covariance ------------------------ Most mutable generic collections are invariant, and mypy considers all user-defined generic classes invariant by default (see :ref:`variance-of-generics` for motivation). This could lead to some unexpected errors when combined with type inference. For example: .. code-block:: python class A: ... class B(A): ... lst = [A(), A()] # Inferred type is List[A] new_lst = [B(), B()] # inferred type is List[B] lst = new_lst # mypy will complain about this, because List is invariant Possible strategies in such situations are: * Use an explicit type annotation: .. code-block:: python new_lst: List[A] = [B(), B()] lst = new_lst # OK * Make a copy of the right hand side: .. code-block:: python lst = list(new_lst) # Also OK * Use immutable collections as annotations whenever possible: .. code-block:: python def f_bad(x: List[A]) -> A: return x[0] f_bad(new_lst) # Fails def f_good(x: Sequence[A]) -> A: return x[0] f_good(new_lst) # OK Covariant subtyping of mutable protocol members is rejected ----------------------------------------------------------- Mypy rejects this because this is potentially unsafe. Consider this example: .. code-block:: python from typing_extensions import Protocol class P(Protocol): x: float def fun(arg: P) -> None: arg.x = 3.14 class C: x = 42 c = C() fun(c) # This is not safe c.x << 5 # Since this will fail! To work around this problem consider whether "mutating" is actually part of a protocol. If not, then one can use a ``@property`` in the protocol definition: .. code-block:: python from typing_extensions import Protocol class P(Protocol): @property def x(self) -> float: pass def fun(arg: P) -> None: ... class C: x = 42 fun(C()) # OK Declaring a supertype as variable type -------------------------------------- Sometimes the inferred type is a subtype (subclass) of the desired type. The type inference uses the first assignment to infer the type of a name (assume here that ``Shape`` is the base class of both ``Circle`` and ``Triangle``): .. code-block:: python shape = Circle() # Infer shape to be Circle ... shape = Triangle() # Type error: Triangle is not a Circle You can just give an explicit type for the variable in cases such the above example: .. code-block:: python shape = Circle() # type: Shape # The variable s can be any Shape, # not just Circle ... shape = Triangle() # OK Complex type tests ------------------ Mypy can usually infer the types correctly when using ``isinstance()`` type tests, but for other kinds of checks you may need to add an explicit type cast: .. code-block:: python def f(o: object) -> None: if type(o) is int: o = cast(int, o) g(o + 1) # This would be an error without the cast ... else: ... .. note:: Note that the ``object`` type used in the above example is similar to ``Object`` in Java: it only supports operations defined for *all* objects, such as equality and ``isinstance()``. The type ``Any``, in contrast, supports all operations, even if they may fail at runtime. The cast above would have been unnecessary if the type of ``o`` was ``Any``. Mypy can't infer the type of ``o`` after the ``type()`` check because it only knows about ``isinstance()`` (and the latter is better style anyway). We can write the above code without a cast by using ``isinstance()``: .. code-block:: python def f(o: object) -> None: if isinstance(o, int): # Mypy understands isinstance checks g(o + 1) # Okay; type of o is inferred as int here ... Type inference in mypy is designed to work well in common cases, to be predictable and to let the type checker give useful error messages. More powerful type inference strategies often have complex and difficult-to-predict failure modes and could result in very confusing error messages. The tradeoff is that you as a programmer sometimes have to give the type checker a little help. .. _version_and_platform_checks: Python version and system platform checks ----------------------------------------- Mypy supports the ability to perform Python version checks and platform checks (e.g. Windows vs Posix), ignoring code paths that won't be run on the targeted Python version or platform. This allows you to more effectively typecheck code that supports multiple versions of Python or multiple operating systems. More specifically, mypy will understand the use of ``sys.version_info`` and ``sys.platform`` checks within ``if/elif/else`` statements. For example: .. code-block:: python import sys # Distinguishing between different versions of Python: if sys.version_info >= (3, 5): # Python 3.5+ specific definitions and imports elif sys.version_info[0] >= 3: # Python 3 specific definitions and imports else: # Python 2 specific definitions and imports # Distinguishing between different operating systems: if sys.platform.startswith("linux"): # Linux-specific code elif sys.platform == "darwin": # Mac-specific code elif sys.platform == "win32": # Windows-specific code else: # Other systems .. note:: Mypy currently does not support more complex checks, and does not assign any special meaning when assigning a ``sys.version_info`` or ``sys.platform`` check to a variable. This may change in future versions of mypy. By default, mypy will use your current version of Python and your current operating system as default values for ``sys.version_info`` and ``sys.platform``. To target a different Python version, use the ``--python-version X.Y`` flag. For example, to verify your code typechecks if were run using Python 2, pass in ``--python-version 2.7`` from the command line. Note that you do not need to have Python 2.7 installed to perform this check. To target a different operating system, use the ``--platform PLATFORM`` flag. For example, to verify your code typechecks if it were run in Windows, pass in ``--platform win32``. See the documentation for `sys.platform `_ for examples of valid platform parameters. .. _reveal-type: Displaying the type of an expression ------------------------------------ You can use ``reveal_type(expr)`` to ask mypy to display the inferred static type of an expression. This can be useful when you don't quite understand how mypy handles a particular piece of code. Example: .. code-block:: python reveal_type((1, 'hello')) # Revealed type is 'Tuple[builtins.int, builtins.str]' .. note:: ``reveal_type`` is only understood by mypy and doesn't exist in Python, if you try to run your program. You'll have to remove any ``reveal_type`` calls before you can run your code. ``reveal_type`` is always available and you don't need to import it. .. _import-cycles: Import cycles ------------- An import cycle occurs where module A imports module B and module B imports module A (perhaps indirectly, e.g. ``A -> B -> C -> A``). Sometimes in order to add type annotations you have to add extra imports to a module and those imports cause cycles that didn't exist before. If those cycles become a problem when running your program, there's a trick: if the import is only needed for type annotations in forward references (string literals) or comments, you can write the imports inside ``if TYPE_CHECKING:`` so that they are not executed at runtime. Example: File ``foo.py``: .. code-block:: python from typing import List, TYPE_CHECKING if TYPE_CHECKING: import bar def listify(arg: 'bar.BarClass') -> 'List[bar.BarClass]': return [arg] File ``bar.py``: .. code-block:: python from typing import List from foo import listify class BarClass: def listifyme(self) -> 'List[BarClass]': return listify(self) .. note:: The ``TYPE_CHECKING`` constant defined by the ``typing`` module is ``False`` at runtime but ``True`` while type checking. Python 3.5.1 doesn't have ``typing.TYPE_CHECKING``. An alternative is to define a constant named ``MYPY`` that has the value ``False`` at runtime. Mypy considers it to be ``True`` when type checking. Here's the above example modified to use ``MYPY``: .. code-block:: python from typing import List MYPY = False if MYPY: import bar def listify(arg: 'bar.BarClass') -> 'List[bar.BarClass]': return [arg] .. _silencing-linters: Silencing linters ----------------- In some cases, linters will complain about unused imports or code. In these cases, you can silence them with a comment after type comments, or on the same line as the import: .. code-block:: python # to silence complaints about unused imports from typing import List # noqa a = None # type: List[int] To silence the linter on the same line as a type comment put the linter comment *after* the type comment: .. code-block:: python a = some_complex_thing() # type: ignore # noqa mypy-0.560/docs/source/conf.py0000644€tŠÔÚ€2›s®0000002030313215007205022414 0ustar jukkaDROPBOX\Domain Users00000000000000# -*- coding: utf-8 -*- # # Mypy documentation build configuration file, created by # sphinx-quickstart on Sun Sep 14 19:50:35 2014. # # This file is execfile()d with the current directory set to its # containing dir. # # Note that not all possible configuration values are present in this # autogenerated file. # # All configuration values have a default; values that are commented out # serve to show the default. import sys import os # If extensions (or modules to document with autodoc) are in another directory, # add these directories to sys.path here. If the directory is relative to the # documentation root, use os.path.abspath to make it absolute, like shown here. sys.path.insert(0, os.path.abspath('../..')) from mypy.version import __version__ as mypy_version # -- General configuration ------------------------------------------------ # If your documentation needs a minimal Sphinx version, state it here. #needs_sphinx = '1.0' # Add any Sphinx extension module names here, as strings. They can be # extensions coming with Sphinx (named 'sphinx.ext.*') or your custom # ones. extensions = [] # Add any paths that contain templates here, relative to this directory. templates_path = ['_templates'] # The suffix of source filenames. source_suffix = '.rst' # The encoding of source files. #source_encoding = 'utf-8-sig' # The master toctree document. master_doc = 'index' # General information about the project. project = u'Mypy' copyright = u'2016, Jukka Lehtosalo' # The version info for the project you're documenting, acts as replacement for # |version| and |release|, also used in various other places throughout the # built documents. # # The short X.Y version. version = mypy_version.split('-')[0] # The full version, including alpha/beta/rc tags. release = mypy_version # The language for content autogenerated by Sphinx. Refer to documentation # for a list of supported languages. #language = None # There are two options for replacing |today|: either, you set today to some # non-false value, then it is used: #today = '' # Else, today_fmt is used as the format for a strftime call. #today_fmt = '%B %d, %Y' # List of patterns, relative to source directory, that match files and # directories to ignore when looking for source files. exclude_patterns = [] # The reST default role (used for this markup: `text`) to use for all # documents. #default_role = None # If true, '()' will be appended to :func: etc. cross-reference text. #add_function_parentheses = True # If true, the current module name will be prepended to all description # unit titles (such as .. function::). #add_module_names = True # If true, sectionauthor and moduleauthor directives will be shown in the # output. They are ignored by default. #show_authors = False # The name of the Pygments (syntax highlighting) style to use. pygments_style = 'sphinx' # A list of ignored prefixes for module index sorting. #modindex_common_prefix = [] # If true, keep warnings as "system message" paragraphs in the built documents. #keep_warnings = False # -- Options for HTML output ---------------------------------------------- # The theme to use for HTML and HTML Help pages. See the documentation for # a list of builtin themes. try: import sphinx_rtd_theme except: html_theme = 'default' else: html_theme = 'sphinx_rtd_theme' html_theme_path = [sphinx_rtd_theme.get_html_theme_path()] # Theme options are theme-specific and customize the look and feel of a theme # further. For a list of options available for each theme, see the # documentation. #html_theme_options = {} # Add any paths that contain custom themes here, relative to this directory. #html_theme_path = [] # The name for this set of Sphinx documents. If None, it defaults to # " v documentation". #html_title = None # A shorter title for the navigation bar. Default is the same as html_title. #html_short_title = None # The name of an image file (relative to this directory) to place at the top # of the sidebar. #html_logo = None # The name of an image file (within the static path) to use as favicon of the # docs. This file should be a Windows icon file (.ico) being 16x16 or 32x32 # pixels large. #html_favicon = None # Add any paths that contain custom static files (such as style sheets) here, # relative to this directory. They are copied after the builtin static files, # so a file named "default.css" will overwrite the builtin "default.css". #html_static_path = ['_static'] # Add any extra paths that contain custom files (such as robots.txt or # .htaccess) here, relative to this directory. These files are copied # directly to the root of the documentation. #html_extra_path = [] # If not '', a 'Last updated on:' timestamp is inserted at every page bottom, # using the given strftime format. #html_last_updated_fmt = '%b %d, %Y' # If true, SmartyPants will be used to convert quotes and dashes to # typographically correct entities. #html_use_smartypants = True # Custom sidebar templates, maps document names to template names. #html_sidebars = {} # Additional templates that should be rendered to pages, maps page names to # template names. #html_additional_pages = {} # If false, no module index is generated. #html_domain_indices = True # If false, no index is generated. #html_use_index = True # If true, the index is split into individual pages for each letter. #html_split_index = False # If true, links to the reST sources are added to the pages. #html_show_sourcelink = True # If true, "Created using Sphinx" is shown in the HTML footer. Default is True. #html_show_sphinx = True # If true, "(C) Copyright ..." is shown in the HTML footer. Default is True. #html_show_copyright = True # If true, an OpenSearch description file will be output, and all pages will # contain a tag referring to it. The value of this option must be the # base URL from which the finished HTML is served. #html_use_opensearch = '' # This is the file name suffix for HTML files (e.g. ".xhtml"). #html_file_suffix = None # Output file base name for HTML help builder. htmlhelp_basename = 'Mypydoc' # -- Options for LaTeX output --------------------------------------------- latex_elements = { # The paper size ('letterpaper' or 'a4paper'). #'papersize': 'letterpaper', # The font size ('10pt', '11pt' or '12pt'). #'pointsize': '10pt', # Additional stuff for the LaTeX preamble. #'preamble': '', } # Grouping the document tree into LaTeX files. List of tuples # (source start file, target name, title, # author, documentclass [howto, manual, or own class]). latex_documents = [ ('index', 'Mypy.tex', u'Mypy Documentation', u'Jukka', 'manual'), ] # The name of an image file (relative to this directory) to place at the top of # the title page. #latex_logo = None # For "manual" documents, if this is true, then toplevel headings are parts, # not chapters. #latex_use_parts = False # If true, show page references after internal links. #latex_show_pagerefs = False # If true, show URL addresses after external links. #latex_show_urls = False # Documents to append as an appendix to all manuals. #latex_appendices = [] # If false, no module index is generated. #latex_domain_indices = True # -- Options for manual page output --------------------------------------- # One entry per manual page. List of tuples # (source start file, name, description, authors, manual section). man_pages = [ ('index', 'mypy', u'Mypy Documentation', [u'Jukka Lehtosalo'], 1) ] # If true, show URL addresses after external links. #man_show_urls = False # -- Options for Texinfo output ------------------------------------------- # Grouping the document tree into Texinfo files. List of tuples # (source start file, target name, title, author, # dir menu entry, description, category) texinfo_documents = [ ('index', 'Mypy', u'Mypy Documentation', u'Jukka', 'Mypy', 'One line description of project.', 'Miscellaneous'), ] # Documents to append as an appendix to all manuals. #texinfo_appendices = [] # If false, no module index is generated. #texinfo_domain_indices = True # How to display URL addresses: 'footnote', 'no', or 'inline'. #texinfo_show_urls = 'footnote' # If true, do not generate a @detailmenu in the "Top" node's menu. #texinfo_no_detailmenu = False rst_prolog = '.. |...| unicode:: U+2026 .. ellipsis\n' mypy-0.560/docs/source/config_file.rst0000644€tŠÔÚ€2›s®0000002220313215007205024114 0ustar jukkaDROPBOX\Domain Users00000000000000.. _config-file: The mypy configuration file =========================== Mypy supports reading configuration settings from a file. By default it uses the file ``mypy.ini`` (with fallback to ``setup.cfg``) in the current directory; the ``--config-file`` command-line flag can be used to read a different file instead (see :ref:`--config-file `). It is important to understand that there is no merging of configuration files, as it would lead to ambiguity. The ``--config-file`` flag has the highest precedence and must be correct; otherwise mypy will report an error and exit. Without command line option, mypy will look for defaults, but will use only one of them. The first one to read is ``mypy.ini``, and then ``setup.cfg``. Most flags correspond closely to :ref:`command-line flags ` but there are some differences in flag names and some flags may take a different value based on the module being processed. The configuration file format is the usual `ini file `_ format. It should contain section names in square brackets and flag settings of the form `NAME = VALUE`. Comments start with ``#`` characters. - A section named ``[mypy]`` must be present. This specifies the global flags. The ``setup.cfg`` file is an exception to this. - Additional sections named ``[mypy-PATTERN1,PATTERN2,...]`` may be present, where ``PATTERN1``, ``PATTERN2`` etc. are `fnmatch patterns `_ separated by commas. These sections specify additional flags that only apply to *modules* whose name matches at least one of the patterns. .. note:: The ``warn_unused_configs`` flag may be useful to debug misspelled section names. Global flags ************ The following global flags may only be set in the global section (``[mypy]``). - ``python_version`` (string) specifies the Python version used to parse and check the target program. The format is ``DIGIT.DIGIT`` for example ``2.7``. The default is the version of the Python interpreter used to run mypy. - ``platform`` (string) specifies the OS platform for the target program, for example ``darwin`` or ``win32`` (meaning OS X or Windows, respectively). The default is the current platform as revealed by Python's ``sys.platform`` variable. - ``custom_typing_module`` (string) specifies the name of an alternative module which is to be considered equivalent to the ``typing`` module. - ``custom_typeshed_dir`` (string) specifies the name of an alternative directory which is used to look for stubs instead of the default ``typeshed`` directory. - ``mypy_path`` (string) specifies the paths to use, after trying the paths from ``MYPYPATH`` environment variable. Useful if you'd like to keep stubs in your repo, along with the config file. - ``warn_incomplete_stub`` (Boolean, default False) warns for missing type annotation in typeshed. This is only relevant in combination with ``check_untyped_defs``. - ``warn_redundant_casts`` (Boolean, default False) warns about casting an expression to its inferred type. - ``warn_unused_ignores`` (Boolean, default False) warns about unneeded ``# type: ignore`` comments. - ``warn_unused_configs`` (Boolean, default False) warns about per-module sections in the config file that didn't match any files processed in the current run. - ``strict_optional`` (Boolean, default False) enables experimental strict Optional checks. - ``scripts_are_modules`` (Boolean, default False) makes script ``x`` become module ``x`` instead of ``__main__``. This is useful when checking multiple scripts in a single run. - ``verbosity`` (integer, default 0) controls how much debug output will be generated. Higher numbers are more verbose. - ``pdb`` (Boolean, default False) invokes pdb on fatal error. - ``show_traceback`` (Boolean, default False) shows traceback on fatal error. - ``dump_type_stats`` (Boolean, default False) dumps stats about type definitions. - ``dump_inference_stats`` (Boolean, default False) dumps stats about type inference. - ``incremental`` (Boolean, default False) enables :ref:`incremental mode `. - ``cache_dir`` (string, default ``.mypy_cache``) stores module cache info in the given folder in :ref:`incremental mode `. The cache is only read in incremental mode, but it is always written unless the value is set to ``/dev/null`` (UNIX) or ``nul`` (Windows). - ``quick_and_dirty`` (Boolean, default False) enables :ref:`quick mode `. - ``show_error_context`` (Boolean, default False) shows context notes before errors. - ``show_column_numbers`` (Boolean, default False) shows column numbers in error messages. .. _per-module-flags: Per-module flags **************** The following flags may vary per module. They may also be specified in the global section; the global section provides defaults which are overridden by the pattern sections matching the module name. .. note:: If multiple pattern sections match a module they are processed in order of their occurrence in the config file. - ``follow_imports`` (string, default ``normal``) directs what to do with imports when the imported module is found as a ``.py`` file and not part of the files, modules and packages on the command line. The four possible values are ``normal``, ``silent``, ``skip`` and ``error``. For explanations see the discussion for the :ref:`--follow-imports ` command line flag. Note that if pattern matching is used, the pattern should match the name of the *imported* module, not the module containing the import statement. - ``ignore_missing_imports`` (Boolean, default False) suppress error messages about imports that cannot be resolved. Note that if pattern matching is used, the pattern should match the name of the *imported* module, not the module containing the import statement. - ``silent_imports`` (Boolean, deprecated) equivalent to ``follow_imports=skip`` plus ``ignore_missing_imports=True``. - ``almost_silent`` (Boolean, deprecated) equivalent to ``follow_imports=skip``. - ``disallow_any_unimported`` (Boolean, default false) disallows usage of types that come from unfollowed imports (such types become aliases for ``Any``). - ``disallow_any_expr`` (Boolean, default false) disallows all expressions in the module that have type ``Any``. - ``disallow_any_decorated`` (Boolean, default false) disallows functions that have ``Any`` in their signature after decorator transformation. - ``disallow_any_explicit`` (Boolean, default false) disallows explicit ``Any`` in type positions such as type annotations and generic type parameters. - ``disallow_any_generics`` (Boolean, default false) disallows usage of generic types that do not specify explicit type parameters. - ``disallow_subclassing_any`` (Boolean, default False) disallows subclassing a value of type ``Any``. See :ref:`--disallow-subclassing-any ` option. - ``disallow_untyped_calls`` (Boolean, default False) disallows calling functions without type annotations from functions with type annotations. - ``disallow_untyped_defs`` (Boolean, default False) disallows defining functions without type annotations or with incomplete type annotations. - ``check_untyped_defs`` (Boolean, default False) type-checks the interior of functions without type annotations. - ``debug_cache`` (Boolean, default False) writes the incremental cache JSON files using a more readable, but slower format. - ``show_none_errors`` (Boolean, default True) shows errors related to strict ``None`` checking, if the global ``strict_optional`` flag is enabled. - ``ignore_errors`` (Boolean, default False) ignores all non-fatal errors. - ``warn_no_return`` (Boolean, default True) shows errors for missing return statements on some execution paths. - ``warn_return_any`` (Boolean, default False) shows a warning when returning a value with type ``Any`` from a function declared with a non- ``Any`` return type. - ``strict_boolean`` (Boolean, default False) makes using non-boolean expressions in conditions an error. - ``no_implicit_optional`` (Boolean, default false) changes the treatment of arguments with a default value of None by not implicitly making their type Optional Examples ******** You might put this in your ``mypy.ini`` file at the root of your repo: .. code-block:: text [mypy] python_version = 2.7 [mypy-foo.*] disallow_untyped_defs = True This automatically sets ``--python-version 2.7`` (a.k.a. ``--py2``) for all mypy runs in this tree, and also selectively turns on the ``--disallow-untyped-defs`` flag for all modules in the ``foo`` package. This issues an error for function definitions without type annotations in that subdirectory only. If you would like to ignore specific imports, instead of ignoring all missing imports with ``--ignore-missing-imports``, use a section of the configuration file per module such as the following to ignore missing imports from ``lib_module``: .. code-block:: text [mypy-lib_module] ignore_missing_imports = True .. note:: Configuration flags are liable to change between releases. mypy-0.560/docs/source/duck_type_compatibility.rst0000644€tŠÔÚ€2›s®0000000303313215007205026570 0ustar jukkaDROPBOX\Domain Users00000000000000Duck type compatibility ----------------------- In Python, certain types are compatible even though they aren't subclasses of each other. For example, ``int`` objects are valid whenever ``float`` objects are expected. Mypy supports this idiom via *duck type compatibility*. As of now, this is only supported for a small set of built-in types: * ``int`` is duck type compatible with ``float`` and ``complex``. * ``float`` is duck type compatible with ``complex``. * In Python 2, ``str`` is duck type compatible with ``unicode``. .. note:: Mypy support for Python 2 is still work in progress. For example, mypy considers an ``int`` object to be valid whenever a ``float`` object is expected. Thus code like this is nice and clean and also behaves as expected: .. code-block:: python def degrees_to_radians(x: float) -> float: return math.pi * degrees / 180 n = 90 # Inferred type 'int' print(degrees_to_radians(n)) # Okay! .. note:: Note that in Python 2 a ``str`` object with non-ASCII characters is often *not valid* when a unicode string is expected. The mypy type system does not consider a string with non-ASCII values as a separate type so some programs with this kind of error will silently pass type checking. In Python 3 ``str`` and ``bytes`` are separate, unrelated types and this kind of error is easy to detect. This a good reason for preferring Python 3 over Python 2! See :ref:`text-and-anystr` for details on how to enforce that a value must be a unicode string in a cross-compatible way. mypy-0.560/docs/source/dynamic_typing.rst0000644€tŠÔÚ€2›s®0000000476513215007205024703 0ustar jukkaDROPBOX\Domain Users00000000000000.. _dynamic_typing: Dynamically typed code ====================== As mentioned earlier, bodies of functions that don't have have any explicit types in their function annotation are dynamically typed (operations are checked at runtime). Code outside functions is statically typed by default, and types of variables are inferred. This does usually the right thing, but you can also make any variable dynamically typed by defining it explicitly with the type ``Any``: .. code-block:: python from typing import Any s = 1 # Statically typed (type int) d = 1 # type: Any # Dynamically typed (type Any) s = 'x' # Type check error d = 'x' # OK Operations on Any values ------------------------ You can do anything using a value with type ``Any``, and type checker does not complain: .. code-block:: python def f(x: Any) -> int: # All of these are valid! x.foobar(1, y=2) print(x[3] + 'f') if x: x.z = x(2) open(x).read() return x Values derived from an ``Any`` value also often have the type ``Any`` implicitly, as mypy can't infer a more precise result type. For example, if you get the attribute of an ``Any`` value or call a ``Any`` value the result is ``Any``: .. code-block:: python def f(x: Any) -> None: y = x.foo() # y has type Any y.bar() # Okay as well! ``Any`` types may propagate through your program, making type checking less effective, unless you are careful. Any vs. object -------------- The type ``object`` is another type that can have an instance of arbitrary type as a value. Unlike ``Any``, ``object`` is an ordinary static type (it is similar to ``Object`` in Java), and only operations valid for *all* types are accepted for ``object`` values. These are all valid: .. code-block:: python def f(o: object) -> None: if o: print(o) print(isinstance(o, int)) o = 2 o = 'foo' These are, however, flagged as errors, since not all objects support these operations: .. code-block:: python def f(o: object) -> None: o.foo() # Error! o + 2 # Error! open(o) # Error! n = 1 # type: int n = o # Error! You can use ``cast()`` (see chapter :ref:`casts`) or ``isinstance`` to go from a general type such as ``object`` to a more specific type (subtype) such as ``int``. ``cast()`` is not needed with dynamically typed values (values with type ``Any``). mypy-0.560/docs/source/faq.rst0000644€tŠÔÚ€2›s®0000002756513215007205022437 0ustar jukkaDROPBOX\Domain Users00000000000000Frequently Asked Questions ========================== Why have both dynamic and static typing? **************************************** Dynamic typing can be flexible, powerful, convenient and easy. But it's not always the best approach; there are good reasons why many developers choose to use statically typed languages. Here are some potential benefits of mypy-style static typing: - Static typing can make programs easier to understand and maintain. Type declarations can serve as machine-checked documentation. This is important as code is typically read much more often than modified, and this is especially important for large and complex programs. - Static typing can help you find bugs earlier and with less testing and debugging. Especially in large and complex projects this can be a major time-saver. - Static typing can help you find difficult-to-find bugs before your code goes into production. This can improve reliability and reduce the number of security issues. - Static typing makes it practical to build very useful development tools that can improve programming productivity or software quality, including IDEs with precise and reliable code completion, static analysis tools, etc. - You can get the benefits of both dynamic and static typing in a single language. Dynamic typing can be perfect for a small project or for writing the UI of your program, for example. As your program grows, you can adapt tricky application logic to static typing to help maintenance. See also the `front page `_ of the mypy web site. Would my project benefit from static typing? ******************************************** For many projects dynamic typing is perfectly fine (we think that Python is a great language). But sometimes your projects demand bigger guns, and that's when mypy may come in handy. If some of these ring true for your projects, mypy (and static typing) may be useful: - Your project is large or complex. - Your codebase must be maintained for a long time. - Multiple developers are working on the same code. - Running tests takes a lot of time or work (type checking may help you find errors early in development, reducing the number of testing iterations). - Some project members (devs or management) don't like dynamic typing, but others prefer dynamic typing and Python syntax. Mypy could be a solution that everybody finds easy to accept. - You want to future-proof your project even if currently none of the above really apply. Can I use mypy to type check my existing Python code? ***************************************************** It depends. Compatibility is pretty good, but some Python features are not yet implemented or fully supported. The ultimate goal is to make using mypy practical for most Python code. Code that uses complex introspection or metaprogramming may be impractical to type check, but it should still be possible to use static typing in other parts of a program. Will static typing make my programs run faster? *********************************************** Mypy only does static type checking and it does not improve performance. It has a minimal performance impact. In the future, there could be other tools that can compile statically typed mypy code to C modules or to efficient JVM bytecode, for example, but this is outside the scope of the mypy project. It may also be possible to modify existing Python VMs to take advantage of static type information, but whether this is feasible is still unknown. This is nontrivial since the runtime types do not necessarily correspond to the static types. How do I type check my Python 2 code? ************************************* You can use a `comment-based function annotation syntax `_ and use the ``--py2`` command-line option to type check your Python 2 code. You'll also need to install ``typing`` for Python 2 via ``pip install typing``. Is mypy free? ************* Yes. Mypy is free software, and it can also be used for commercial and proprietary projects. Mypy is available under the MIT license. Can I use structural subtyping? ******************************* Mypy provides support for both `nominal subtyping `_ and `structural subtyping `_. Some argue that structural subtyping is better suited for languages with duck typing such as Python. Mypy however primarily uses nominal subtyping, leaving structural subtyping mostly opt-in (except for built-in protocols such as ``Iterable`` that always support structural subtyping). Here are some reasons why: 1. It is easy to generate short and informative error messages when using a nominal type system. This is especially important when using type inference. 2. Python provides built-in support for nominal ``isinstance()`` tests and they are widely used in programs. Only limited support for structural ``isinstance()`` is available, and it's less type safe than nominal type tests. 3. Many programmers are already familiar with static, nominal subtyping and it has been successfully used in languages such as Java, C++ and C#. Fewer languages use structural subtyping. However, structural subtyping can also be useful. For example, a "public API" may be more flexible if it is typed with protocols. Also, using protocol types removes the necessity to explicitly declare implementations of ABCs. As a rule of thumb, we recommend using nominal classes where possible, and protocols where necessary. For more details about protocol types and structural subtyping see :ref:`protocol-types` and `PEP 544 `_. I like Python and I have no need for static typing ************************************************** That wasn't really a question, was it? Mypy is not aimed at replacing Python. The goal is to give more options for Python programmers, to make Python a more competitive alternative to other statically typed languages in large projects, to improve programmer productivity and to improve software quality. How are mypy programs different from normal Python? *************************************************** Since you use a vanilla Python implementation to run mypy programs, mypy programs are also Python programs. The type checker may give warnings for some valid Python code, but the code is still always runnable. Also, some Python features and syntax are still not supported by mypy, but this is gradually improving. The obvious difference is the availability of static type checking. The section :ref:`common_issues` mentions some modifications to Python code that may be required to make code type check without errors. Also, your code must make attributes explicit and use a explicit protocol representation. For example, you may want to subclass an Abstract Base Class such as ``typing.Iterable``. Mypy will support modular, efficient type checking, and this seems to rule out type checking some language features, such as arbitrary runtime addition of methods. However, it is likely that many of these features will be supported in a restricted form (for example, runtime modification is only supported for classes or methods registered as dynamic or 'patchable'). How is mypy different from PyPy? ******************************** *This answer relates to PyPy as a Python implementation. See also the answer related to RPython below.* Mypy and PyPy are orthogonal. Mypy does static type checking, i.e. it is basically a linter, but static typing has no runtime effect, whereas the PyPy is an Python implementation. You can use PyPy to run mypy programs. How is mypy different from Cython? ********************************** `Cython `_ is a variant of Python that supports compilation to CPython C modules. It can give major speedups to certain classes of programs compared to CPython, and it provides static typing (though this is different from mypy). Mypy differs in the following aspects, among others: - Cython is much more focused on performance than mypy. Mypy is only about static type checking, and increasing performance is not a direct goal. - The mypy syntax is arguably simpler and more "Pythonic" (no cdef/cpdef, etc.) for statically typed code. - The mypy syntax is compatible with Python. Mypy programs are normal Python programs that can be run using any Python implementation. Cython has many incompatible extensions to Python syntax, and Cython programs generally cannot be run without first compiling them to CPython extension modules via C. Cython also has a pure Python mode, but it seems to support only a subset of Cython functionality, and the syntax is quite verbose. - Mypy has a different set of type system features. For example, mypy has genericity (parametric polymorphism), function types and bidirectional type inference, which are not supported by Cython. (Cython has fused types that are different but related to mypy generics. Mypy also has a similar feature as an extension of generics.) - The mypy type checker knows about the static types of many Python stdlib modules and can effectively type check code that uses them. - Cython supports accessing C functions directly and many features are defined in terms of translating them to C or C++. Mypy just uses Python semantics, and mypy does not deal with accessing C library functionality. How is mypy different from Nuitka? ********************************** `Nuitka `_ is a static compiler that can translate Python programs to C++. Nuitka integrates with the CPython runtime. Nuitka has additional future goals, such as using type inference and whole-program analysis to further speed up code. Here are some differences: - Nuitka is primarily focused on speeding up Python code. Mypy focuses on static type checking and facilitating better tools. - Whole-program analysis tends to be slow and scale poorly to large or complex programs. It is still unclear if Nuitka can solve these issues. Mypy does not use whole-program analysis and will support modular type checking (though this has not been implemented yet). How is mypy different from RPython or Shed Skin? ************************************************ `RPython `_ and `Shed Skin `_ are basically statically typed subsets of Python. Mypy does the following important things differently: - RPython is primarily designed for implementing virtual machines; mypy is a general-purpose tool. - Mypy supports both static and dynamic typing. Dynamically typed and statically typed code can be freely mixed and can interact seamlessly. - Mypy aims to support (in the future) fast and modular type checking. Both RPython and Shed Skin use whole-program type inference which is very slow, does not scale well to large programs and often produces confusing error messages. Mypy can support modularity since it only uses local type inference; static type checking depends on having type annotations for functions signatures. - Mypy will support introspection, dynamic loading of code and many other dynamic language features (though using these may make static typing less effective). RPython and Shed Skin only support a restricted Python subset without several of these features. - Mypy supports user-defined generic types. Mypy is a cool project. Can I help? *********************************** Any help is much appreciated! `Contact `_ the developers if you would like to contribute. Any help related to development, design, publicity, documentation, testing, web site maintenance, financing, etc. can be helpful. You can learn a lot by contributing, and anybody can help, even beginners! However, some knowledge of compilers and/or type systems is essential if you want to work on mypy internals. mypy-0.560/docs/source/function_overloading.rst0000644€tŠÔÚ€2›s®0000001130113215007205026063 0ustar jukkaDROPBOX\Domain Users00000000000000.. _function-overloading: Function Overloading ==================== Sometimes the types in a function depend on each other in ways that can't be captured with a ``Union``. For example, the ``__getitem__`` (``[]`` bracket indexing) method can take an integer and return a single item, or take a ``slice`` and return a ``Sequence`` of items. You might be tempted to annotate it like so: .. code-block:: python from typing import Sequence, TypeVar, Union T = TypeVar('T') class MyList(Sequence[T]): def __getitem__(self, index: Union[int, slice]) -> Union[T, Sequence[T]]: if isinstance(index, int): ... # Return a T here elif isinstance(index, slice): ... # Return a sequence of Ts here else: raise TypeError(...) But this is too loose, as it implies that when you pass in an ``int`` you might sometimes get out a single item and sometimes a sequence. The return type depends on the parameter type in a way that can't be expressed using a type variable. Instead, we can use `overloading `_ to give the same function multiple type annotations (signatures) and accurately describe the function's behavior. .. code-block:: python from typing import overload, Sequence, TypeVar, Union T = TypeVar('T') class MyList(Sequence[T]): # The @overload definitions are just for the type checker, # and overwritten by the real implementation below. @overload def __getitem__(self, index: int) -> T: pass # Don't put code here # All overloads and the implementation must be adjacent # in the source file, and overload order may matter: # when two overloads may overlap, the more specific one # should come first. @overload def __getitem__(self, index: slice) -> Sequence[T]: pass # Don't put code here # The implementation goes last, without @overload. # It may or may not have type hints; if it does, # these are checked against the overload definitions # as well as against the implementation body. def __getitem__(self, index: Union[int, slice]) -> Union[T, Sequence[T]]: # This is exactly the same as before. if isinstance(index, int): ... # Return a T here elif isinstance(index, slice): ... # Return a sequence of Ts here else: raise TypeError(...) Calls to overloaded functions are type checked against the variants, not against the implementation. A call like ``my_list[5]`` would have type ``T``, not ``Union[T, Sequence[T]]`` because it matches the first overloaded definition, and ignores the type annotations on the implementation of ``__getitem__``. The code in the body of the definition of ``__getitem__`` is checked against the annotations on the the corresponding declaration. In this case the body is checked with ``index: Union[int, slice]`` and a return type ``Union[T, Sequence[T]]``. If there are no annotations on the corresponding definition, then code in the function body is not type checked. The annotations on the function body must be compatible with the types given for the overloaded variants listed above it. The type checker will verify that all the types listed the overloaded variants are compatible with the types given for the implementation. In this case it checks that the parameter type ``int`` and the return type ``T`` are compatible with ``Union[int, slice]`` and ``Union[T, Sequence[T]]`` for the first variant. For the second variant it verifies that the parameter type ``slice`` are the return type ``Sequence[T]`` are compatible with ``Union[int, slice]`` and ``Union[T, Sequence[T]]``. Overloaded function variants are still ordinary Python functions and they still define a single runtime object. There is no automatic dispatch happening, and you must manually handle the different types in the implementation (usually with :func:`isinstance` checks, as shown in the example). The overload variants must be adjacent in the code. This makes code clearer, as you don't have to hunt for overload variants across the file. Overloads in stub files are exactly the same, except there is no implementation. .. note:: As generic type variables are erased at runtime when constructing instances of generic types, an overloaded function cannot have variants that only differ in a generic type argument, e.g. ``List[int]`` and ``List[str]``. .. note:: If you just need to constrain a type variable to certain types or subtypes, you can use a :ref:`value restriction `. mypy-0.560/docs/source/generics.rst0000644€tŠÔÚ€2›s®0000004631613215007205023462 0ustar jukkaDROPBOX\Domain Users00000000000000Generics ======== .. _generic-classes: Defining generic classes ************************ The built-in collection classes are generic classes. Generic types have one or more type parameters, which can be arbitrary types. For example, ``Dict[int, str]`` has the type parameters ``int`` and ``str``, and ``List[int]`` has a type parameter ``int``. Programs can also define new generic classes. Here is a very simple generic class that represents a stack: .. code-block:: python from typing import TypeVar, Generic T = TypeVar('T') class Stack(Generic[T]): def __init__(self) -> None: # Create an empty list with items of type T self.items = [] # type: List[T] def push(self, item: T) -> None: self.items.append(item) def pop(self) -> T: return self.items.pop() def empty(self) -> bool: return not self.items The ``Stack`` class can be used to represent a stack of any type: ``Stack[int]``, ``Stack[Tuple[int, str]]``, etc. Using ``Stack`` is similar to built-in container types: .. code-block:: python # Construct an empty Stack[int] instance stack = Stack[int]() stack.push(2) stack.pop() stack.push('x') # Type error Type inference works for user-defined generic types as well: .. code-block:: python def process(stack: Stack[int]) -> None: ... process(Stack()) # Argument has inferred type Stack[int] Construction of instances of generic types is also type checked: .. code-block:: python class Box(Generic[T]): def __init__(self, content: T) -> None: self.content = content Box(1) # OK, inferred type is Box[int] Box[int](1) # Also OK s = 'some string' Box[int](s) # Type error Generic class internals *********************** You may wonder what happens at runtime when you index ``Stack``. Actually, indexing ``Stack`` returns essentially a copy of ``Stack`` that returns instances of the original class on instantiation: >>> print(Stack) __main__.Stack >>> print(Stack[int]) __main__.Stack[int] >>> print(Stack[int]().__class__) __main__.Stack Note that built-in types ``list``, ``dict`` and so on do not support indexing in Python. This is why we have the aliases ``List``, ``Dict`` and so on in the ``typing`` module. Indexing these aliases gives you a class that directly inherits from the target class in Python: >>> from typing import List >>> List[int] typing.List[int] >>> List[int].__bases__ (, typing.MutableSequence) Generic types could be instantiated or subclassed as usual classes, but the above examples illustrate that type variables are erased at runtime. Generic ``Stack`` instances are just ordinary Python objects, and they have no extra runtime overhead or magic due to being generic, other than a metaclass that overloads the indexing operator. .. _generic-subclasses: Defining sub-classes of generic classes *************************************** User-defined generic classes and generic classes defined in ``typing`` can be used as base classes for another classes, both generic and non-generic. For example: .. code-block:: python from typing import Generic, TypeVar, Mapping, Iterator, Dict KT = TypeVar('KT') VT = TypeVar('VT') class MyMap(Mapping[KT, VT]]): # This is a generic subclass of Mapping def __getitem__(self, k: KT) -> VT: ... # Implementations omitted def __iter__(self) -> Iterator[KT]: ... def __len__(self) -> int: ... items: MyMap[str, int] # Okay class StrDict(Dict[str, str]): # This is a non-generic subclass of Dict def __str__(self) -> str: return 'StrDict({})'.format(super().__str__()) data: StrDict[int, int] # Error! StrDict is not generic data2: StrDict # OK class Receiver(Generic[T]): def accept(self, value: T) -> None: ... class AdvancedReceiver(Receiver[T]): ... .. note:: You have to add an explicit ``Mapping`` base class if you want mypy to consider a user-defined class as a mapping (and ``Sequence`` for sequences, etc.). This is because mypy doesn't use *structural subtyping* for these ABCs, unlike simpler protocols like ``Iterable``, which use :ref:`structural subtyping `. ``Generic[...]`` can be omitted from bases if there are other base classes that include type variables, such as ``Mapping[KT, VT]`` in the above example. If you include ``Generic[...]`` in bases, then it should list all type variables present in other bases (or more, if needed). The order of type variables is defined by the following rules: * If ``Generic[...]`` is present, then the order of variables is always determined by their order in ``Generic[...]``. * If there are no ``Generic[...]`` in bases, then all type variables are collected in the lexicographic order (i.e. by first appearance). For example: .. code-block:: python from typing import Generic, TypeVar, Any T = TypeVar('T') S = TypeVar('S') U = TypeVar('U') class One(Generic[T]): ... class Another(Generic[T]): ... class First(One[T], Another[S]): ... class Second(One[T], Another[S], Generic[S, U, T]): ... x: First[int, str] # Here T is bound to int, S is bound to str y: Second[int, str, Any] # Here T is Any, S is int, and U is str .. _generic-functions: Generic functions ***************** Generic type variables can also be used to define generic functions: .. code-block:: python from typing import TypeVar, Sequence T = TypeVar('T') # Declare type variable def first(seq: Sequence[T]) -> T: # Generic function return seq[0] As with generic classes, the type variable can be replaced with any type. That means ``first`` can be used with any sequence type, and the return type is derived from the sequence item type. For example: .. code-block:: python # Assume first defined as above. s = first('foo') # s has type str. n = first([1, 2, 3]) # n has type int. Note also that a single definition of a type variable (such as ``T`` above) can be used in multiple generic functions or classes. In this example we use the same type variable in two generic functions: .. code-block:: python from typing import TypeVar, Sequence T = TypeVar('T') # Declare type variable def first(seq: Sequence[T]) -> T: return seq[0] def last(seq: Sequence[T]) -> T: return seq[-1] .. _generic-methods-and-generic-self: Generic methods and generic self ******************************** You can also define generic methods — just use a type variable in the method signature that is different from class type variables. In particular, ``self`` may also be generic, allowing a method to return the most precise type known at the point of access. .. note:: This feature is experimental. Checking code with type annotations for self arguments is still not fully implemented. Mypy may disallow valid code or allow unsafe code. In this way, for example, you can typecheck chaining of setter methods: .. code-block:: python from typing import TypeVar T = TypeVar('T', bound='Shape') class Shape: def set_scale(self: T, scale: float) -> T: self.scale = scale return self class Circle(Shape): def set_radius(self, r: float) -> 'Circle': self.radius = r return self class Square(Shape): def set_width(self, w: float) -> 'Square': self.width = w return self circle = Circle().set_scale(0.5).set_radius(2.7) # type: Circle square = Square().set_scale(0.5).set_width(3.2) # type: Square Without using generic ``self``, the last two lines could not be type-checked properly. Other uses are factory methods, such as copy and deserialization. For class methods, you can also define generic ``cls``, using ``Type[T]``: .. code-block:: python from typing import TypeVar, Tuple, Type T = TypeVar('T', bound='Friend') class Friend: other = None # type: Friend @classmethod def make_pair(cls: Type[T]) -> Tuple[T, T]: a, b = cls(), cls() a.other = b b.other = a return a, b class SuperFriend(Friend): pass a, b = SuperFriend.make_pair() Note that when overriding a method with generic ``self``, you must either return a generic ``self`` too, or return an instance of the current class. In the latter case, you must implement this method in all future subclasses. Note also that mypy cannot always verify that the implementation of a copy or a deserialization method returns the actual type of self. Therefore you may need to silence mypy inside these methods (but not at the call site), possibly by making use of the ``Any`` type. .. _variance-of-generics: Variance of generic types ************************* There are three main kinds of generic types with respect to subtype relations between them: invariant, covariant, and contravariant. Assuming that we have a pair of types types ``A`` and ``B`` and ``B`` is a subtype of ``A``, these are defined as follows: * A generic class ``MyCovGen[T, ...]`` is called covariant in type variable ``T`` if ``MyCovGen[B, ...]`` is always a subtype of ``MyCovGen[A, ...]``. * A generic class ``MyContraGen[T, ...]`` is called contravariant in type variable ``T`` if ``MyContraGen[A, ...]`` is always a subtype of ``MyContraGen[B, ...]``. * A generic class ``MyInvGen[T, ...]`` is called invariant in ``T`` if neither of the above is true. Let us illustrate this by few simple examples: * ``Union`` is covariant in all variables: ``Union[Cat, int]`` is a subtype of ``Union[Animal, int]``, ``Union[Dog, int]`` is also a subtype of ``Union[Animal, int]``, etc. Most immutable containers such as ``Sequence`` and ``FrozenSet`` are also covariant. * ``Callable`` is an example of type that behaves contravariant in types of arguments, namely ``Callable[[Employee], int]`` is a subtype of ``Callable[[Manager], int]``. To understand this, consider a function: .. code-block:: python def salaries(staff: List[Manager], accountant: Callable[[Manager], int]) -> List[int]: ... This function needs a callable that can calculate a salary for managers, and if we give it a callable that can calculate a salary for an arbitrary employee, it's still safe. * ``List`` is an invariant generic type. Naively, one would think that it is covariant, but let us consider this code: .. code-block:: python class Shape: pass class Circle(Shape): def rotate(self): ... def add_one(things: List[Shape]) -> None: things.append(Shape()) my_things: List[Circle] = [] add_one(my_things) # This may appear safe, but... my_things[0].rotate() # ...this will fail Another example of invariant type is ``Dict``. Most mutable containers are invariant. By default, mypy assumes that all user-defined generics are invariant. To declare a given generic class as covariant or contravariant use type variables defined with special keyword arguments ``covariant`` or ``contravariant``. For example: .. code-block:: python from typing import Generic, TypeVar T_co = TypeVar('T_co', covariant=True) class Box(Generic[T_co]): # this type is declared covariant def __init__(self, content: T_co) -> None: self._content = content def get_content(self) -> T_co: return self._content def look_into(box: Box[Animal]): ... my_box = Box(Cat()) look_into(my_box) # OK, but mypy would complain here for an invariant type .. _type-variable-value-restriction: Type variables with value restriction ************************************* By default, a type variable can be replaced with any type. However, sometimes it's useful to have a type variable that can only have some specific types as its value. A typical example is a type variable that can only have values ``str`` and ``bytes``: .. code-block:: python from typing import TypeVar AnyStr = TypeVar('AnyStr', str, bytes) This is actually such a common type variable that ``AnyStr`` is defined in ``typing`` and we don't need to define it ourselves. We can use ``AnyStr`` to define a function that can concatenate two strings or bytes objects, but it can't be called with other argument types: .. code-block:: python from typing import AnyStr def concat(x: AnyStr, y: AnyStr) -> AnyStr: return x + y concat('a', 'b') # Okay concat(b'a', b'b') # Okay concat(1, 2) # Error! Note that this is different from a union type, since combinations of ``str`` and ``bytes`` are not accepted: .. code-block:: python concat('string', b'bytes') # Error! In this case, this is exactly what we want, since it's not possible to concatenate a string and a bytes object! The type checker will reject this function: .. code-block:: python def union_concat(x: Union[str, bytes], y: Union[str, bytes]) -> Union[str, bytes]: return x + y # Error: can't concatenate str and bytes Another interesting special case is calling ``concat()`` with a subtype of ``str``: .. code-block:: python class S(str): pass ss = concat(S('foo'), S('bar'))) You may expect that the type of ``ss`` is ``S``, but the type is actually ``str``: a subtype gets promoted to one of the valid values for the type variable, which in this case is ``str``. This is thus subtly different from *bounded quantification* in languages such as Java, where the return type would be ``S``. The way mypy implements this is correct for ``concat``, since ``concat`` actually returns a ``str`` instance in the above example: .. code-block:: python >>> print(type(ss)) You can also use a ``TypeVar`` with a restricted set of possible values when defining a generic class. For example, mypy uses the type ``typing.Pattern[AnyStr]`` for the return value of ``re.compile``, since regular expressions can be based on a string or a bytes pattern. .. _type-variable-upper-bound: Type variables with upper bounds ******************************** A type variable can also be restricted to having values that are subtypes of a specific type. This type is called the upper bound of the type variable, and is specified with the ``bound=...`` keyword argument to ``TypeVar``. .. code-block:: python from typing import TypeVar, SupportsAbs T = TypeVar('T', bound=SupportsAbs[float]) In the definition of a generic function that uses such a type variable ``T``, the type represented by ``T`` is assumed to be a subtype of its upper bound, so the function can use methods of the upper bound on values of type ``T``. .. code-block:: python def largest_in_absolute_value(*xs: T) -> T: return max(xs, key=abs) # Okay, because T is a subtype of SupportsAbs[float]. In a call to such a function, the type ``T`` must be replaced by a type that is a subtype of its upper bound. Continuing the example above, .. code-block:: python largest_in_absolute_value(-3.5, 2) # Okay, has type float. largest_in_absolute_value(5+6j, 7) # Okay, has type complex. largest_in_absolute_value('a', 'b') # Error: 'str' is not a subtype of SupportsAbs[float]. Type parameters of generic classes may also have upper bounds, which restrict the valid values for the type parameter in the same way. A type variable may not have both a value restriction (see :ref:`type-variable-value-restriction`) and an upper bound. .. _declaring-decorators: Declaring decorators ******************** One common application of type variable upper bounds is in declaring a decorator that preserves the signature of the function it decorates, regardless of that signature. Here's a complete example: .. code-block:: python from typing import Any, Callable, TypeVar, Tuple, cast FuncType = Callable[..., Any] F = TypeVar('F', bound=FuncType) # A decorator that preserves the signature. def my_decorator(func: F) -> F: def wrapper(*args, **kwds): print("Calling", func) return func(*args, **kwds) return cast(F, wrapper) # A decorated function. @my_decorator def foo(a: int) -> str: return str(a) # Another. @my_decorator def bar(x: float, y: float) -> Tuple[float, float, bool]: return (x, y, x > y) a = foo(12) reveal_type(a) # str b = bar(3.14, 0) reveal_type(b) # Tuple[float, float, bool] foo('x') # Type check error: incompatible type "str"; expected "int" From the final block we see that the signatures of the decorated functions ``foo()`` and ``bar()`` are the same as those of the original functions (before the decorator is applied). The bound on ``F`` is used so that calling the decorator on a non-function (e.g. ``my_decorator(1)``) will be rejected. Also note that the ``wrapper()`` function is not type-checked. Wrapper functions are typically small enough that this is not a big problem. This is also the reason for the ``cast()`` call in the ``return`` statement in ``my_decorator()``. See :ref:`casts`. Generic protocols ***************** Mypy supports generic protocols (see also :ref:`protocol-types`). Several :ref:`predefined protocols ` are generic, such as ``Iterable[T]``, and you can define additional generic protocols. Generic protocols mostly follow the normal rules for generic classes. Example: .. code-block:: python from typing import TypeVar from typing_extensions import Protocol T = TypeVar('T') class Box(Protocol[T]): content: T def do_stuff(one: Box[str], other: Box[bytes]) -> None: ... class StringWrapper: def __init__(self, content: str) -> None: self.content = content class BytesWrapper: def __init__(self, content: bytes) -> None: self.content = content do_stuff(StringWrapper('one'), BytesWrapper(b'other')) # OK x: Box[float] = ... y: Box[int] = ... x = y # Error -- Box is invariant The main difference between generic protocols and ordinary generic classes is that mypy checks that the declared variances of generic type variables in a protocol match how they are used in the protocol definition. The protocol in this example is rejected, since the type variable ``T`` is used covariantly as a return type, but the type variable is invariant: .. code-block:: python from typing import TypeVar from typing_extensions import Protocol T = TypeVar('T') class ReadOnlyBox(Protocol[T]): # Error: covariant type variable expected def content(self) -> T: ... This example correctly uses a covariant type variable: .. code-block:: python from typing import TypeVar from typing_extensions import Protocol T_co = TypeVar('T_co', covariant=True) class ReadOnlyBox(Protocol[T_co]): # OK def content(self) -> T_co: ... ax: ReadOnlyBox[float] = ... ay: ReadOnlyBox[int] = ... ax = ay # OK -- ReadOnlyBox is covariant See :ref:`variance-of-generics` for more about variance. Generic protocols can also be recursive. Example: .. code-block:: python T = TypeVar('T') class Linked(Protocol[T]): val: T def next(self) -> 'Linked[T]': ... class L: val: int ... # details omitted def next(self) -> 'L': ... # details omitted def last(seq: Linked[T]) -> T: ... # implementation omitted result = last(L()) # Inferred type of 'result' is 'int' mypy-0.560/docs/source/getting_started.rst0000644€tŠÔÚ€2›s®0000000107313215007205025041 0ustar jukkaDROPBOX\Domain Users00000000000000.. _getting-started: Getting started =============== Installation ************ Mypy requires Python 3.4 or later. Once you've `installed Python 3 `_, you can install mypy with: .. code-block:: text $ python3 -m pip install mypy Installing from source ********************** To install mypy from source, clone the github repository and then run ``pip install`` locally: .. code-block:: text $ git clone --recurse-submodules https://github.com/python/mypy.git $ cd mypy $ sudo python3 -m pip install --upgrade . mypy-0.560/docs/source/index.rst0000644€tŠÔÚ€2›s®0000000147513215007205022767 0ustar jukkaDROPBOX\Domain Users00000000000000.. Mypy documentation master file, created by sphinx-quickstart on Sun Sep 14 19:50:35 2014. You can adapt this file completely to your liking, but it should at least contain the root `toctree` directive. Welcome to Mypy documentation! ============================== Mypy is a static type checker for Python. .. toctree:: :maxdepth: 2 introduction basics getting_started builtin_types python2 type_inference_and_annotations kinds_of_types class_basics dynamic_typing function_overloading casts duck_type_compatibility common_issues generics supported_python_features additional_features command_line config_file python36 faq cheat_sheet cheat_sheet_py3 revision_history Indices and tables ================== * :ref:`genindex` * :ref:`search` mypy-0.560/docs/source/introduction.rst0000644€tŠÔÚ€2›s®0000000255413215007205024400 0ustar jukkaDROPBOX\Domain Users00000000000000Introduction ============ Mypy is a static type checker for Python. If you sprinkle your code with type annotations, mypy can type check your code and find common bugs. As mypy is a static analyzer, or a lint-like tool, your code's type annotations are just hints and don't interfere when running your program. You run your program with a standard Python interpreter, and the annotations are treated primarily as comments. Using the Python 3 function annotation syntax (using the PEP 484 notation) or a comment-based annotation syntax for Python 2 code, you will be able to efficiently annotate your code and use mypy to check the code for common errors. Mypy has a powerful, easy-to-use, type system with modern features such as type inference, generics, function types, tuple types and union types. As a developer, you decide how to use mypy in your workflow. You can always escape to dynamic typing as mypy's approach to static typing doesn't restrict what you can do in your programs. Using mypy will make your programs easier to debug, maintain, and understand. This documentation provides a short introduction to mypy. It will help you get started writing statically typed code. Knowledge of Python and a statically typed object-oriented language, such as Java, are assumed. .. note:: Mypy is still experimental. There will be changes that break backward compatibility. mypy-0.560/docs/source/kinds_of_types.rst0000644€tŠÔÚ€2›s®0000012115213215007205024673 0ustar jukkaDROPBOX\Domain Users00000000000000Kinds of types ============== User-defined types ****************** Each class is also a type. Any instance of a subclass is also compatible with all superclasses. All values are compatible with the ``object`` type (and also the ``Any`` type). .. code-block:: python class A: def f(self) -> int: # Type of self inferred (A) return 2 class B(A): def f(self) -> int: return 3 def g(self) -> int: return 4 a = B() # type: A # OK (explicit type for a; override type inference) print(a.f()) # 3 a.g() # Type check error: A has no method g The Any type ************ A value with the ``Any`` type is dynamically typed. Mypy doesn't know anything about the possible runtime types of such value. Any operations are permitted on the value, and the operations are checked at runtime, similar to normal Python code without type annotations. ``Any`` is compatible with every other type, and vice versa. No implicit type check is inserted when assigning a value of type ``Any`` to a variable with a more precise type: .. code-block:: python a = None # type: Any s = '' # type: str a = 2 # OK s = a # OK Declared (and inferred) types are *erased* at runtime. They are basically treated as comments, and thus the above code does not generate a runtime error, even though ``s`` gets an ``int`` value when the program is run. Note that the declared type of ``s`` is actually ``str``! If you do not define a function return value or argument types, these default to ``Any``: .. code-block:: python def show_heading(s) -> None: print('=== ' + s + ' ===') # No static type checking, as s has type Any show_heading(1) # OK (runtime error only; mypy won't generate an error) You should give a statically typed function an explicit ``None`` return type even if it doesn't return a value, as this lets mypy catch additional type errors: .. code-block:: python def wait(t: float): # Implicit Any return value print('Waiting...') time.sleep(t) if wait(2) > 1: # Mypy doesn't catch this error! ... If we had used an explicit ``None`` return type, mypy would have caught the error: .. code-block:: python def wait(t: float) -> None: print('Waiting...') time.sleep(t) if wait(2) > 1: # Error: can't compare None and int ... The ``Any`` type is discussed in more detail in section :ref:`dynamic_typing`. .. note:: A function without any types in the signature is dynamically typed. The body of a dynamically typed function is not checked statically, and local variables have implicit ``Any`` types. This makes it easier to migrate legacy Python code to mypy, as mypy won't complain about dynamically typed functions. .. _tuple-types: Tuple types *********** The type ``Tuple[T1, ..., Tn]`` represents a tuple with the item types ``T1``, ..., ``Tn``: .. code-block:: python def f(t: Tuple[int, str]) -> None: t = 1, 'foo' # OK t = 'foo', 1 # Type check error A tuple type of this kind has exactly a specific number of items (2 in the above example). Tuples can also be used as immutable, varying-length sequences. You can use the type ``Tuple[T, ...]`` (with a literal ``...`` -- it's part of the syntax) for this purpose. Example: .. code-block:: python def print_squared(t: Tuple[int, ...]) -> None: for n in t: print(n, n ** 2) print_squared(()) # OK print_squared((1, 3, 5)) # OK print_squared([1, 2]) # Error: only a tuple is valid .. note:: Usually it's a better idea to use ``Sequence[T]`` instead of ``Tuple[T, ...]``, as ``Sequence`` is also compatible with lists and other non-tuple sequences. .. note:: ``Tuple[...]`` is not valid as a base class outside stub files. This is a limitation of the ``typing`` module. One way to work around this is to use a named tuple as a base class (see section :ref:`named-tuples`). .. _callable-types: Callable types (and lambdas) **************************** You can pass around function objects and bound methods in statically typed code. The type of a function that accepts arguments ``A1``, ..., ``An`` and returns ``Rt`` is ``Callable[[A1, ..., An], Rt]``. Example: .. code-block:: python from typing import Callable def twice(i: int, next: Callable[[int], int]) -> int: return next(next(i)) def add(i: int) -> int: return i + 1 print(twice(3, add)) # 5 You can only have positional arguments, and only ones without default values, in callable types. These cover the vast majority of uses of callable types, but sometimes this isn't quite enough. Mypy recognizes a special form ``Callable[..., T]`` (with a literal ``...``) which can be used in less typical cases. It is compatible with arbitrary callable objects that return a type compatible with ``T``, independent of the number, types or kinds of arguments. Mypy lets you call such callable values with arbitrary arguments, without any checking -- in this respect they are treated similar to a ``(*args: Any, **kwargs: Any)`` function signature. Example: .. code-block:: python from typing import Callable def arbitrary_call(f: Callable[..., int]) -> int: return f('x') + f(y=2) # OK arbitrary_call(ord) # No static error, but fails at runtime arbitrary_call(open) # Error: does not return an int arbitrary_call(1) # Error: 'int' is not callable Lambdas are also supported. The lambda argument and return value types cannot be given explicitly; they are always inferred based on context using bidirectional type inference: .. code-block:: python l = map(lambda x: x + 1, [1, 2, 3]) # Infer x as int and l as List[int] If you want to give the argument or return value types explicitly, use an ordinary, perhaps nested function definition. .. _extended_callable: Extended Callable types *********************** As an experimental mypy extension, you can specify ``Callable`` types that support keyword arguments, optional arguments, and more. Where you specify the arguments of a Callable, you can choose to supply just the type of a nameless positional argument, or an "argument specifier" representing a more complicated form of argument. This allows one to more closely emulate the full range of possibilities given by the ``def`` statement in Python. As an example, here's a complicated function definition and the corresponding ``Callable``: .. code-block:: python from typing import Callable from mypy_extensions import (Arg, DefaultArg, NamedArg, DefaultNamedArg, VarArg, KwArg) def func(__a: int, # This convention is for nameless arguments b: int, c: int = 0, *args: int, d: int, e: int = 0, **kwargs: int) -> int: ... F = Callable[[int, # Or Arg(int) Arg(int, 'b'), DefaultArg(int, 'c'), VarArg(int), NamedArg(int, 'd'), DefaultNamedArg(int, 'e'), KwArg(int)], int] f: F = func Argument specifiers are special function calls that can specify the following aspects of an argument: - its type (the only thing that the basic format supports) - its name (if it has one) - whether it may be omitted - whether it may or must be passed using a keyword - whether it is a ``*args`` argument (representing the remaining positional arguments) - whether it is a ``**kwargs`` argument (representing the remaining keyword arguments) The following functions are available in ``mypy_extensions`` for this purpose: .. code-block:: python def Arg(type=Any, name=None): # A normal, mandatory, positional argument. # If the name is specified it may be passed as a keyword. def DefaultArg(type=Any, name=None): # An optional positional argument (i.e. with a default value). # If the name is specified it may be passed as a keyword. def NamedArg(type=Any, name=None): # A mandatory keyword-only argument. def DefaultNamedArg(type=Any, name=None): # An optional keyword-only argument (i.e. with a default value). def VarArg(type=Any): # A *args-style variadic positional argument. # A single VarArg() specifier represents all remaining # positional arguments. def KwArg(type=Any): # A **kwargs-style variadic keyword argument. # A single KwArg() specifier represents all remaining # keyword arguments. In all cases, the ``type`` argument defaults to ``Any``, and if the ``name`` argument is omitted the argument has no name (the name is required for ``NamedArg`` and ``DefaultNamedArg``). A basic ``Callable`` such as .. code-block:: python MyFunc = Callable[[int, str, int], float] is equivalent to the following: .. code-block:: python MyFunc = Callable[[Arg(int), Arg(str), Arg(int)], float] A ``Callable`` with unspecified argument types, such as .. code-block:: python MyOtherFunc = Callable[..., int] is (roughly) equivalent to .. code-block:: python MyOtherFunc = Callable[[VarArg(), KwArg()], int] .. note:: This feature is experimental. Details of the implementation may change and there may be unknown limitations. **IMPORTANT:** Each of the functions above currently just returns its ``type`` argument, so the information contained in the argument specifiers is not available at runtime. This limitation is necessary for backwards compatibility with the existing ``typing.py`` module as present in the Python 3.5+ standard library and distributed via PyPI. .. _union-types: Union types *********** Python functions often accept values of two or more different types. You can use overloading to model this in statically typed code, but union types can make code like this easier to write. Use the ``Union[T1, ..., Tn]`` type constructor to construct a union type. For example, the type ``Union[int, str]`` is compatible with both integers and strings. You can use an ``isinstance()`` check to narrow down the type to a specific type: .. code-block:: python from typing import Union def f(x: Union[int, str]) -> None: x + 1 # Error: str + int is not valid if isinstance(x, int): # Here type of x is int. x + 1 # OK else: # Here type of x is str. x + 'a' # OK f(1) # OK f('x') # OK f(1.1) # Error .. _optional: The type of None and optional types *********************************** Mypy treats the type of ``None`` as special. ``None`` is a valid value for every type, which resembles ``null`` in Java. Unlike Java, mypy doesn't treat primitives types specially: ``None`` is also valid for primitive types such as ``int`` and ``float``. .. note:: See :ref:`strict_optional` for an experimental mode which allows mypy to check ``None`` values precisely. When initializing a variable as ``None``, ``None`` is usually an empty place-holder value, and the actual value has a different type. This is why you need to annotate an attribute in a case like this: .. code-block:: python class A: def __init__(self) -> None: self.count = None # type: int Mypy will complain if you omit the type annotation, as it wouldn't be able to infer a non-trivial type for the ``count`` attribute otherwise. Mypy generally uses the first assignment to a variable to infer the type of the variable. However, if you assign both a ``None`` value and a non-``None`` value in the same scope, mypy can often do the right thing: .. code-block:: python def f(i: int) -> None: n = None # Inferred type int because of the assignment below if i > 0: n = i ... Often it's useful to know whether a variable can be ``None``. For example, this function accepts a ``None`` argument, but it's not obvious from its signature: .. code-block:: python def greeting(name: str) -> str: if name: return 'Hello, {}'.format(name) else: return 'Hello, stranger' print(greeting('Python')) # Okay! print(greeting(None)) # Also okay! Mypy lets you use ``Optional[t]`` to document that ``None`` is a valid argument type: .. code-block:: python from typing import Optional def greeting(name: Optional[str]) -> str: if name: return 'Hello, {}'.format(name) else: return 'Hello, stranger' Mypy treats this as semantically equivalent to the previous example, since ``None`` is implicitly valid for any type, but it's much more useful for a programmer who is reading the code. You can equivalently use ``Union[str, None]``, but ``Optional`` is shorter and more idiomatic. .. note:: ``None`` is also used as the return type for functions that don't return a value, i.e. that implicitly return ``None``. Mypy doesn't use ``NoneType`` for this, since it would look awkward, even though that is the real name of the type of ``None`` (try ``type(None)`` in the interactive interpreter to see for yourself). .. _strict_optional: Experimental strict optional type and None checking *************************************************** Currently, ``None`` is a valid value for each type, similar to ``null`` or ``NULL`` in many languages. However, you can use the experimental ``--strict-optional`` command line option to tell mypy that types should not include ``None`` by default. The ``Optional`` type modifier is then used to define a type variant that includes ``None``, such as ``Optional[int]``: .. code-block:: python from typing import Optional def f() -> Optional[int]: return None # OK def g() -> int: ... return None # Error: None not compatible with int Also, most operations will not be allowed on unguarded ``None`` or ``Optional`` values: .. code-block:: python def f(x: Optional[int]) -> int: return x + 1 # Error: Cannot add None and int Instead, an explicit ``None`` check is required. Mypy has powerful type inference that lets you use regular Python idioms to guard against ``None`` values. For example, mypy recognizes ``is None`` checks: .. code-block:: python def f(x: Optional[int]) -> int: if x is None: return 0 else: # The inferred type of x is just int here. return x + 1 Mypy will infer the type of ``x`` to be ``int`` in the else block due to the check against ``None`` in the if condition. .. note:: ``--strict-optional`` is experimental and still has known issues. .. _noreturn: The NoReturn type ***************** Mypy provides support for functions that never return. For example, a function that unconditionally raises an exception: .. code-block:: python from mypy_extensions import NoReturn def stop() -> NoReturn: raise Exception('no way') Mypy will ensure that functions annotated as returning ``NoReturn`` truly never return, either implicitly or explicitly. Mypy will also recognize that the code after calls to such functions is unreachable and will behave accordingly: .. code-block:: python def f(x: int) -> int: if x == 0: return x stop() return 'whatever works' # No error in an unreachable block Install ``mypy_extensions`` using pip to use ``NoReturn`` in your code. Python 3 command line: .. code-block:: text python3 -m pip install --upgrade mypy-extensions This works for Python 2: .. code-block:: text pip install --upgrade mypy-extensions Class name forward references ***************************** Python does not allow references to a class object before the class is defined. Thus this code does not work as expected: .. code-block:: python def f(x: A) -> None: # Error: Name A not defined .... class A: ... In cases like these you can enter the type as a string literal — this is a *forward reference*: .. code-block:: python def f(x: 'A') -> None: # OK ... class A: ... Of course, instead of using a string literal type, you could move the function definition after the class definition. This is not always desirable or even possible, though. Any type can be entered as a string literal, and you can combine string-literal types with non-string-literal types freely: .. code-block:: python def f(a: List['A']) -> None: ... # OK def g(n: 'int') -> None: ... # OK, though not useful class A: pass String literal types are never needed in ``# type:`` comments. String literal types must be defined (or imported) later *in the same module*. They cannot be used to leave cross-module references unresolved. (For dealing with import cycles, see :ref:`import-cycles`.) .. _type-aliases: Type aliases ************ In certain situations, type names may end up being long and painful to type: .. code-block:: python def f() -> Union[List[Dict[Tuple[int, str], Set[int]]], Tuple[str, List[str]]]: ... When cases like this arise, you can define a type alias by simply assigning the type to a variable: .. code-block:: python AliasType = Union[List[Dict[Tuple[int, str], Set[int]]], Tuple[str, List[str]]] # Now we can use AliasType in place of the full name: def f() -> AliasType: ... Type aliases can be generic, in this case they could be used in two variants: Subscripted aliases are equivalent to original types with substituted type variables, number of type arguments must match the number of free type variables in generic type alias. Unsubscripted aliases are treated as original types with free variables replaced with ``Any``. Examples (following `PEP 484 `_): .. code-block:: python from typing import TypeVar, Iterable, Tuple, Union, Callable S = TypeVar('S') TInt = Tuple[int, S] UInt = Union[S, int] CBack = Callable[..., S] def response(query: str) -> UInt[str]: # Same as Union[str, int] ... def activate(cb: CBack[S]) -> S: # Same as Callable[..., S] ... table_entry: TInt # Same as Tuple[int, Any] T = TypeVar('T', int, float, complex) Vec = Iterable[Tuple[T, T]] def inproduct(v: Vec[T]) -> T: return sum(x*y for x, y in v) def dilate(v: Vec[T], scale: T) -> Vec[T]: return ((x * scale, y * scale) for x, y in v) v1: Vec[int] = [] # Same as Iterable[Tuple[int, int]] v2: Vec = [] # Same as Iterable[Tuple[Any, Any]] v3: Vec[int, int] = [] # Error: Invalid alias, too many type arguments! Type aliases can be imported from modules like any names. Aliases can target another aliases (although building complex chains of aliases is not recommended, this impedes code readability, thus defeating the purpose of using aliases). Following previous examples: .. code-block:: python from typing import TypeVar, Generic, Optional from first_example import AliasType from second_example import Vec def fun() -> AliasType: ... T = TypeVar('T') class NewVec(Generic[T], Vec[T]): ... for i, j in NewVec[int](): ... OIntVec = Optional[Vec[int]] .. note:: A type alias does not create a new type. It's just a shorthand notation for another type -- it's equivalent to the target type. For generic type aliases this means that variance of type variables used for alias definition does not apply to aliases. A parameterized generic alias is treated simply as an original type with the corresponding type variables substituted. .. _newtypes: NewTypes ******** (Freely after `PEP 484 `_.) There are also situations where a programmer might want to avoid logical errors by creating simple classes. For example: .. code-block:: python class UserId(int): pass get_by_user_id(user_id: UserId): ... However, this approach introduces some runtime overhead. To avoid this, the typing module provides a helper function ``NewType`` that creates simple unique types with almost zero runtime overhead. Mypy will treat the statement ``Derived = NewType('Derived', Base)`` as being roughly equivalent to the following definition: .. code-block:: python class Derived(Base): def __init__(self, _x: Base) -> None: ... However, at runtime, ``NewType('Derived', Base)`` will return a dummy function that simply returns its argument: .. code-block:: python def Derived(_x): return _x Mypy will require explicit casts from ``int`` where ``UserId`` is expected, while implicitly casting from ``UserId`` where ``int`` is expected. Examples: .. code-block:: python from typing import NewType UserId = NewType('UserId', int) def name_by_id(user_id: UserId) -> str: ... UserId('user') # Fails type check name_by_id(42) # Fails type check name_by_id(UserId(42)) # OK num = UserId(5) + 1 # type: int ``NewType`` accepts exactly two arguments. The first argument must be a string literal containing the name of the new type and must equal the name of the variable to which the new type is assigned. The second argument must be a properly subclassable class, i.e., not a type construct like ``Union``, etc. The function returned by ``NewType`` accepts only one argument; this is equivalent to supporting only one constructor accepting an instance of the base class (see above). Example: .. code-block:: python from typing import NewType class PacketId: def __init__(self, major: int, minor: int) -> None: self._major = major self._minor = minor TcpPacketId = NewType('TcpPacketId', PacketId) packet = PacketId(100, 100) tcp_packet = TcpPacketId(packet) # OK tcp_packet = TcpPacketId(127, 0) # Fails in type checker and at runtime Both ``isinstance`` and ``issubclass``, as well as subclassing will fail for ``NewType('Derived', Base)`` since function objects don't support these operations. .. note:: Note that unlike type aliases, ``NewType`` will create an entirely new and unique type when used. The intended purpose of ``NewType`` is to help you detect cases where you accidentally mixed together the old base type and the new derived type. For example, the following will successfully typecheck when using type aliases: .. code-block:: python UserId = int def name_by_id(user_id: UserId) -> str: ... name_by_id(3) # ints and UserId are synonymous But a similar example using ``NewType`` will not typecheck: .. code-block:: python from typing import NewType UserId = NewType('UserId', int) def name_by_id(user_id: UserId) -> str: ... name_by_id(3) # int is not the same as UserId .. _named-tuples: Named tuples ************ Mypy recognizes named tuples and can type check code that defines or uses them. In this example, we can detect code trying to access a missing attribute: .. code-block:: python Point = namedtuple('Point', ['x', 'y']) p = Point(x=1, y=2) print(p.z) # Error: Point has no attribute 'z' If you use ``namedtuple`` to define your named tuple, all the items are assumed to have ``Any`` types. That is, mypy doesn't know anything about item types. You can use ``typing.NamedTuple`` to also define item types: .. code-block:: python from typing import NamedTuple Point = NamedTuple('Point', [('x', int), ('y', int)]) p = Point(x=1, y='x') # Argument has incompatible type "str"; expected "int" Python 3.6 will have an alternative, class-based syntax for named tuples with types. Mypy supports it already: .. code-block:: python from typing import NamedTuple class Point(NamedTuple): x: int y: int p = Point(x=1, y='x') # Argument has incompatible type "str"; expected "int" .. _type-of-class: The type of class objects ************************* (Freely after `PEP 484 `_.) Sometimes you want to talk about class objects that inherit from a given class. This can be spelled as ``Type[C]`` where ``C`` is a class. In other words, when ``C`` is the name of a class, using ``C`` to annotate an argument declares that the argument is an instance of ``C`` (or of a subclass of ``C``), but using ``Type[C]`` as an argument annotation declares that the argument is a class object deriving from ``C`` (or ``C`` itself). For example, assume the following classes: .. code-block:: python class User: # Defines fields like name, email class BasicUser(User): def upgrade(self): """Upgrade to Pro""" class ProUser(User): def pay(self): """Pay bill""" Note that ``ProUser`` doesn't inherit from ``BasicUser``. Here's a function that creates an instance of one of these classes if you pass it the right class object: .. code-block:: python def new_user(user_class): user = user_class() # (Here we could write the user object to a database) return user How would we annotate this function? Without ``Type[]`` the best we could do would be: .. code-block:: python def new_user(user_class: type) -> User: # Same implementation as before This seems reasonable, except that in the following example, mypy doesn't see that the ``buyer`` variable has type ``ProUser``: .. code-block:: python buyer = new_user(ProUser) buyer.pay() # Rejected, not a method on User However, using ``Type[]`` and a type variable with an upper bound (see :ref:`type-variable-upper-bound`) we can do better: .. code-block:: python U = TypeVar('U', bound=User) def new_user(user_class: Type[U]) -> U: # Same implementation as before Now mypy will infer the correct type of the result when we call ``new_user()`` with a specific subclass of ``User``: .. code-block:: python beginner = new_user(BasicUser) # Inferred type is BasicUser beginner.upgrade() # OK .. note:: The value corresponding to ``Type[C]`` must be an actual class object that's a subtype of ``C``. Its constructor must be compatible with the constructor of ``C``. If ``C`` is a type variable, its upper bound must be a class object. For more details about ``Type[]`` see `PEP 484 `_. .. _text-and-anystr: Text and AnyStr *************** Sometimes you may want to write a function which will accept only unicode strings. This can be challenging to do in a codebase intended to run in both Python 2 and Python 3 since ``str`` means something different in both versions and ``unicode`` is not a keyword in Python 3. To help solve this issue, use ``typing.Text`` which is aliased to ``unicode`` in Python 2 and to ``str`` in Python 3. This allows you to indicate that a function should accept only unicode strings in a cross-compatible way: .. code-block:: python from typing import Text def unicode_only(s: Text) -> Text: return s + u'\u2713' In other cases, you may want to write a function that will work with any kind of string but will not let you mix two different string types. To do so use ``typing.AnyStr``: .. code-block:: python from typing import AnyStr def concat(x: AnyStr, y: AnyStr) -> AnyStr: return x + y concat('a', 'b') # Okay concat(b'a', b'b') # Okay concat('a', b'b') # Error: cannot mix bytes and unicode For more details, see :ref:`type-variable-value-restriction`. .. note:: How ``bytes``, ``str``, and ``unicode`` are handled between Python 2 and Python 3 may change in future versions of mypy. .. _generators: Generators ********** A basic generator that only yields values can be annotated as having a return type of either ``Iterator[YieldType]`` or ``Iterable[YieldType]``. For example: .. code-block:: python def squares(n: int) -> Iterator[int]: for i in range(n): yield i * i If you want your generator to accept values via the ``send`` method or return a value, you should use the ``Generator[YieldType, SendType, ReturnType]`` generic type instead. For example: .. code-block:: python def echo_round() -> Generator[int, float, str]: sent = yield 0 while sent >= 0: sent = yield round(sent) return 'Done' Note that unlike many other generics in the typing module, the ``SendType`` of ``Generator`` behaves contravariantly, not covariantly or invariantly. If you do not plan on receiving or returning values, then set the ``SendType`` or ``ReturnType`` to ``None``, as appropriate. For example, we could have annotated the first example as the following: .. code-block:: python def squares(n: int) -> Generator[int, None, None]: for i in range(n): yield i * i .. _async-and-await: Typing async/await ****************** Mypy supports the ability to type coroutines that use the ``async/await`` syntax introduced in Python 3.5. For more information regarding coroutines and this new syntax, see `PEP 492 `_. Functions defined using ``async def`` are typed just like normal functions. The return type annotation should be the same as the type of the value you expect to get back when ``await``-ing the coroutine. .. code-block:: python import asyncio async def format_string(tag: str, count: int) -> str: return 'T-minus {} ({})'.format(count, tag) async def countdown_1(tag: str, count: int) -> str: while count > 0: my_str = await format_string(tag, count) # has type 'str' print(my_str) await asyncio.sleep(0.1) count -= 1 return "Blastoff!" loop = asyncio.get_event_loop() loop.run_until_complete(countdown_1("Millennium Falcon", 5)) loop.close() The result of calling an ``async def`` function *without awaiting* will be a value of type ``Awaitable[T]``: .. code-block:: python my_coroutine = countdown_1("Millennium Falcon", 5) reveal_type(my_coroutine) # has type 'Awaitable[str]' .. note:: :ref:`reveal_type() ` displays the inferred static type of an expression. If you want to use coroutines in older versions of Python that do not support the ``async def`` syntax, you can instead use the ``@asyncio.coroutine`` decorator to convert a generator into a coroutine. Note that we set the ``YieldType`` of the generator to be ``Any`` in the following example. This is because the exact yield type is an implementation detail of the coroutine runner (e.g. the ``asyncio`` event loop) and your coroutine shouldn't have to know or care about what precisely that type is. .. code-block:: python from typing import Any, Generator import asyncio @asyncio.coroutine def countdown_2(tag: str, count: int) -> Generator[Any, None, str]: while count > 0: print('T-minus {} ({})'.format(count, tag)) yield from asyncio.sleep(0.1) count -= 1 return "Blastoff!" loop = asyncio.get_event_loop() loop.run_until_complete(countdown_2("USS Enterprise", 5)) loop.close() As before, the result of calling a generator decorated with ``@asyncio.coroutine`` will be a value of type ``Awaitable[T]``. .. note:: At runtime, you are allowed to add the ``@asyncio.coroutine`` decorator to both functions and generators. This is useful when you want to mark a work-in-progress function as a coroutine, but have not yet added ``yield`` or ``yield from`` statements: .. code-block:: python import asyncio @asyncio.coroutine def serialize(obj: object) -> str: # todo: add yield/yield from to turn this into a generator return "placeholder" However, mypy currently does not support converting functions into coroutines. Support for this feature will be added in a future version, but for now, you can manually force the function to be a generator by doing something like this: .. code-block:: python from typing import Generator import asyncio @asyncio.coroutine def serialize(obj: object) -> Generator[None, None, str]: # todo: add yield/yield from to turn this into a generator if False: yield return "placeholder" You may also choose to create a subclass of ``Awaitable`` instead: .. code-block:: python from typing import Any, Awaitable, Generator import asyncio class MyAwaitable(Awaitable[str]): def __init__(self, tag: str, count: int) -> None: self.tag = tag self.count = count def __await__(self) -> Generator[Any, None, str]: for i in range(n, 0, -1): print('T-minus {} ({})'.format(i, tag)) yield from asyncio.sleep(0.1) return "Blastoff!" def countdown_3(tag: str, count: int) -> Awaitable[str]: return MyAwaitable(tag, count) loop = asyncio.get_event_loop() loop.run_until_complete(countdown_3("Heart of Gold", 5)) loop.close() To create an iterable coroutine, subclass ``AsyncIterator``: .. code-block:: python from typing import Optional, AsyncIterator import asyncio class arange(AsyncIterator[int]): def __init__(self, start: int, stop: int, step: int) -> None: self.start = start self.stop = stop self.step = step self.count = start - step def __aiter__(self) -> AsyncIterator[int]: return self async def __anext__(self) -> int: self.count += self.step if self.count == self.stop: raise StopAsyncIteration else: return self.count async def countdown_4(tag: str, n: int) -> str: async for i in arange(n, 0, -1): print('T-minus {} ({})'.format(i, tag)) await asyncio.sleep(0.1) return "Blastoff!" loop = asyncio.get_event_loop() loop.run_until_complete(countdown_4("Serenity", 5)) loop.close() For a more concrete example, the mypy repo has a toy webcrawler that demonstrates how to work with coroutines. One version `uses async/await `_ and one `uses yield from `_. .. _typeddict: TypedDict ********* .. note:: TypedDict is an officially supported feature, but it is still experimental. Python programs often use dictionaries with string keys to represent objects. Here is a typical example: .. code-block:: python movie = {'name': 'Blade Runner', 'year': 1982} Only a fixed set of string keys is expected (``'name'`` and ``'year'`` above), and each key has an independent value type (``str`` for ``'name'`` and ``int`` for ``'year'`` above). We've previously seen the ``Dict[K, V]`` type, which lets you declare uniform dictionary types, where every value has the same type, and arbitrary keys are supported. This is clearly not a good fit for ``movie`` above. Instead, you can use a ``TypedDict`` to give a precise type for objects like ``movie``, where the type of each dictionary value depends on the key: .. code-block:: python from mypy_extensions import TypedDict Movie = TypedDict('Movie', {'name': str, 'year': int}) movie = {'name': 'Blade Runner', 'year': 1982} # type: Movie ``Movie`` is a TypedDict type with two items: ``'name'`` (with type ``str``) and ``'year'`` (with type ``int``). Note that we used an explicit type annotation for the ``movie`` variable. This type annotation is important -- without it, mypy will try to infer a regular, uniform ``Dict`` type for ``movie``, which is not what we want here. .. note:: If you pass a TypedDict object as an argument to a function, no type annotation is usually necessary since mypy can infer the desired type based on the declared argument type. Also, if an assignment target has been previously defined, and it has a TypedDict type, mypy will treat the assigned value as a TypedDict, not ``Dict``. Now mypy will recognize these as valid: .. code-block:: python name = movie['name'] # Okay; type of name is str year = movie['year'] # Okay; type of year is int Mypy will detect an invalid key as an error: .. code-block:: python director = movie['director'] # Error: 'director' is not a valid key Mypy will also reject a runtime-computed expression as a key, as it can't verify that it's a valid key. You can only use string literals as TypedDict keys. The ``TypedDict`` type object can also act as a constructor. It returns a normal ``dict`` object at runtime -- a ``TypedDict`` does not define a new runtime type: .. code-block:: python toy_story = Movie(name='Toy Story', year=1995) This is equivalent to just constructing a dictionary directly using ``{ ... }`` or ``dict(key=value, ...)``. The constructor form is sometimes convenient, since it can be used without a type annotation, and it also makes the type of the object explicit. Like all types, TypedDicts can be used as components to build arbitrarily complex types. For example, you can define nested TypedDicts and containers with TypedDict items. Unlike most other types, mypy uses structural compatibility checking (or structural subtyping) with TypedDicts. A TypedDict object with extra items is compatible with a narrower TypedDict, assuming item types are compatible (*totality* also affects subtyping, as discussed below). .. note:: You need to install ``mypy_extensions`` using pip to use ``TypedDict``: .. code-block:: text python3 -m pip install --upgrade mypy-extensions Or, if you are using Python 2: .. code-block:: text pip install --upgrade mypy-extensions Totality -------- By default mypy ensures that a TypedDict object has all the specified keys. This will be flagged as an error: .. code-block:: python # Error: 'year' missing toy_story = {'name': 'Toy Story'} # type: Movie Sometimes you want to allow keys to be left out when creating a TypedDict object. You can provide the ``total=False`` argument to ``TypedDict(...)`` to achieve this: .. code-block:: python GuiOptions = TypedDict( 'GuiOptions', {'language': str, 'color': str}, total=False) options = {} # type: GuiOptions # Okay options['language'] = 'en' You may need to use ``get()`` to access items of a partial (non-total) TypedDict, since indexing using ``[]`` could fail at runtime. However, mypy still lets use ``[]`` with a partial TypedDict -- you just need to be careful with it, as it could result in a ``KeyError``. Requiring ``get()`` everywhere would be too cumbersome. (Note that you are free to use ``get()`` with total TypedDicts as well.) Keys that aren't required are shown with a ``?`` in error messages: .. code-block:: python # Revealed type is 'TypedDict('GuiOptions', {'language'?: builtins.str, # 'color'?: builtins.str})' reveal_type(options) Totality also affects structural compatibility. You can't use a partial TypedDict when a total one is expected. Also, a total typed dict is not valid when a partial one is expected. Class-based syntax ------------------ Python 3.6 supports an alternative, class-based syntax to define a TypedDict. This means that your code must be checked as if it were Python 3.6 (using the ``--python-version`` flag on the command line, for example). Simply running mypy on Python 3.6 is insufficient. .. code-block:: python from mypy_extensions import TypedDict class Movie(TypedDict): name: str year: int The above definition is equivalent to the original ``Movie`` definition. It doesn't actually define a real class. This syntax also supports a form of inheritance -- subclasses can define additional items. However, this is primarily a notational shortcut. Since mypy uses structural compatibility with TypedDicts, inheritance is not required for compatibility. Here is an example of inheritance: .. code-block:: python class Movie(TypedDict): name: str year: int class BookBasedMovie(Movie): based_on: str Now ``BookBasedMovie`` has keys ``name``, ``year`` and ``based_on``. Mixing required and non-required items -------------------------------------- In addition to allowing reuse across TypedDict types, inheritance also allows you to mix required and non-required (using ``total=False``) items in a single TypedDict. Example: .. code-block:: python class MovieBase(TypedDict): name: str year: int class Movie(MovieBase, total=False): based_on: str Now ``Movie`` has required keys ``name`` and ``year``, while ``based_on`` can be left out when constructing an object. A TypedDict with a mix of required and non-required keys, such as ``Movie`` above, will only be compatible with another TypedDict if all required keys in the other TypedDict are required keys in the first TypedDict, and all non-required keys of the other TypedDict are also non-required keys in the first TypedDict. mypy-0.560/docs/source/python2.rst0000644€tŠÔÚ€2›s®0000001111213215007205023250 0ustar jukkaDROPBOX\Domain Users00000000000000.. _python2: Type checking Python 2 code =========================== For code that needs to be Python 2.7 compatible, function type annotations are given in comments, since the function annotation syntax was introduced in Python 3. The comment-based syntax is specified in `PEP 484 `_. Run mypy in Python 2 mode by using the ``--py2`` option:: $ mypy --py2 program.py To run your program, you must have the ``typing`` module in your Python 2 module search path. Use ``pip install typing`` to install the module. This also works for Python 3 versions prior to 3.5 that don't include ``typing`` in the standard library. The example below illustrates the Python 2 function type annotation syntax. This syntax is also valid in Python 3 mode: .. code-block:: python from typing import List def hello(): # type: () -> None print 'hello' class Example: def method(self, lst, opt=0, *args, **kwargs): # type: (List[str], int, *str, **bool) -> int """Docstring comes after type comment.""" ... It's worth going through these details carefully to avoid surprises: - You don't provide an annotation for the ``self`` / ``cls`` variable of methods. - Docstring always comes *after* the type comment. - For ``*args`` and ``**kwargs`` the type should be prefixed with ``*`` or ``**``, respectively (except when using the multi-line annotation syntax described below). Again, the above example illustrates this. - Things like ``Any`` must be imported from ``typing``, even if they are only used in comments. - In Python 2 mode ``str`` is implicitly promoted to ``unicode``, similar to how ``int`` is compatible with ``float``. This is unlike ``bytes`` and ``str`` in Python 3, which are incompatible. ``bytes`` in Python 2 is equivalent to ``str``. (This might change in the future.) .. _multi_line_annotation: Multi-line Python 2 function annotations ---------------------------------------- Mypy also supports a multi-line comment annotation syntax. You can provide a separate annotation for each argument using the variable annotation syntax. When using the single-line annotation syntax described above, functions with long argument lists tend to result in overly long type comments and it's often tricky to see which argument type corresponds to which argument. The alternative, multi-line annotation syntax makes long annotations easier to read and write. Here is an example (from PEP 484): .. code-block:: python def send_email(address, # type: Union[str, List[str]] sender, # type: str cc, # type: Optional[List[str]] bcc, # type: Optional[List[str]] subject='', body=None # type: List[str] ): # type: (...) -> bool """Send an email message. Return True if successful.""" You write a separate annotation for each function argument on the same line as the argument. Each annotation must be on a separate line. If you leave out an annotation for an argument, it defaults to ``Any``. You provide a return type annotation in the body of the function using the form ``# type: (...) -> rt``, where ``rt`` is the return type. Note that the return type annotation contains literal three dots. Note that when using multi-line comments, you do not need to prefix the types of your ``*arg`` and ``**kwarg`` parameters with ``*`` or ``**``. For example, here is how you would annotate the first example using multi-line comments. .. code-block:: python from typing import List class Example: def method(self, lst, # type: List[str] opt=0, # type: int *args, # type: str **kwargs # type: bool ): # type: (...) -> int """Docstring comes after type comment.""" ... Additional notes ---------------- - You should include types for arguments with default values in the annotation. The ``opt`` argument of ``method`` in the example at the beginning of this section is an example of this. - The annotation can be on the same line as the function header or on the following line. - The type syntax for variables is the same as for Python 3. - You don't need to use string literal escapes for forward references within comments. - Mypy uses a separate set of library stub files in `typeshed `_ for Python 2. Library support may vary between Python 2 and Python 3. mypy-0.560/docs/source/python36.rst0000644€tŠÔÚ€2›s®0000000613713215007205023352 0ustar jukkaDROPBOX\Domain Users00000000000000.. _python-36: New features in Python 3.6 ========================== Python 3.6 was `released `_ in December 2016. As of mypy 0.510 all language features new in Python 3.6 are supported. Syntax for variable annotations (`PEP 526 `_) --------------------------------------------------------------------------------------- Python 3.6 feature: variables (in global, class or local scope) can now have type annotations using either of the two forms: .. code-block:: python from typing import Optional foo: Optional[int] bar: List[str] = [] Mypy fully supports this syntax, interpreting them as equivalent to .. code-block:: python foo = None # type: Optional[int] bar = [] # type: List[str] .. _class-var: An additional feature defined in PEP 526 is also supported: you can mark names intended to be used as class variables with ``ClassVar``. In a pinch you can also use ClassVar in ``# type`` comments. Example: .. code-block:: python from typing import ClassVar class C: x: int # instance variable y: ClassVar[int] # class variable z = None # type: ClassVar[int] def foo(self) -> None: self.x = 0 # OK self.y = 0 # Error: Cannot assign to class variable "y" via instance C.y = 0 # This is OK Literal string formatting (`PEP 498 `_) --------------------------------------------------------------------------------- Python 3.6 feature: string literals of the form ``f"text {expression} text"`` evaluate ``expression`` using the current evaluation context (locals and globals). Mypy fully supports this syntax and type-checks the ``expression``. Underscores in numeric literals (`PEP 515 `_) --------------------------------------------------------------------------------------- Python 3.6 feature: numeric literals can contain underscores, e.g. ``1_000_000``. Mypy fully supports this syntax: .. code-block:: python precise_val = 1_000_000.000_000_1 hexes: List[int] = [] hexes.append(0x_FF_FF_FF_FF) .. _async_generators_and_comprehensions: Asynchronous generators (`PEP 525 `_) and comprehensions (`PEP 530 `_) ---------------------------------------------------------------------------------------------------------------------------------------------------------- Python 3.6 allows coroutines defined with ``async def`` (PEP 492) to be generators, i.e. contain ``yield`` expressions, and introduces a syntax for asynchronous comprehensions. Mypy fully supports these features, for example: .. code-block:: python from typing import AsyncIterator async def gen() -> AsyncIterator[bytes]: lst = [b async for b in gen()] # Inferred type is "List[bytes]" yield 'no way' # Error: Incompatible types (got "str", expected "bytes") New named tuple syntax ---------------------- Python 3.6 supports an alternative syntax for named tuples. See :ref:`named-tuples`. mypy-0.560/docs/source/revision_history.rst0000644€tŠÔÚ€2›s®0000001546513215007206025304 0ustar jukkaDROPBOX\Domain Users00000000000000Revision history ================ List of major changes: - December 2017 * Publish ``mypy`` version 0.560 on PyPI. * Various types in ``typing`` that used to be ABCs :ref:`are now protocols ` and support :ref:`structural subtyping `. * Explain how to :ref:`silence invalid complaints ` by linters about unused imports due to type comments. - November 2017 * Publish ``mypy`` version 0.550 on PyPI. * Running mypy now requires Python 3.4 or higher. However Python 3.3 is still valid for the target of the analysis (i.e. the ``--python-version`` flag). * Split ``--disallow-any`` flag into :ref:`separate boolean flags `. * The ``--old-html-report`` flag was removed. - October 2017 * Publish ``mypy`` version 0.540 on PyPI. * Publish ``mypy`` version 0.530 on PyPI. - August-September 2017 * Add :ref:`protocol-types`. * Other updates to :ref:`command-line`: * Add ``--warn-unused-configs``. * Add ``--disallow-untyped-decorators``. * Add ``--disallow-incomplete-defs``. - July 2017 * Publish ``mypy`` version 0.521 on PyPI. * Publish ``mypy`` version 0.520 on PyPI. * Add :ref:`fine-grained control of Any types `. * Add :ref:`typeddict`. * Other updates to :ref:`command-line`: * Add ``--no-implicit-optional``. * Add ``--shadow-file``. * Add ``--no-incremental``. - May 2017 * Publish ``mypy`` version 0.510 on PyPI. * Remove option ``--no-fast-parser``. * Deprecate option ``--strict-boolean``. * Drop support for Python 3.2 as type checking target. * Add support for :ref:`overloaded functions with implementations `. * Add :ref:`extended_callable`. * Add :ref:`async_generators_and_comprehensions`. * Add :ref:`ClassVar `. * Add :ref:`quick mode `. - March 2017 * Publish ``mypy`` version 0.500 on PyPI. * Add :ref:`noreturn`. * Add :ref:`generic-subclasses`. * Add :ref:`variance-of-generics`. * Add :ref:`variance`. * Updates to :ref:`python-36`. * Updates to :ref:`integrating-mypy`. * Updates to :ref:`command-line`: * Add option ``--warn-return-any``. * Add option ``--strict-boolean``. * Add option ``--strict``. * Updates to :ref:`config-file`: * ``warn_no_return`` is on by default. * Read settings from ``setup.cfg`` if ``mypy.ini`` does not exist. * Add option ``warn_return_any``. * Add option ``strict_boolean``. - January 2017 * Publish ``mypy`` version 0.470 on PyPI. * Change package name from ``mypy-lang`` to ``mypy``. * Add :ref:`integrating-mypy`. * Add :ref:`cheat-sheet-py3`. * Major update to :ref:`finding-imports`. * Add :ref:`--ignore-missing-imports `. * Updates to :ref:`config-file`. * Document underscore support in numeric literals. * Document that arguments prefixed with ``__`` are positional-only. * Document that ``--hide-error-context`` is now on by default, and there is a new flag ``--show-error-context``. * Add ``ignore_errors`` to :ref:`per-module-flags`. - November 2016 * Publish ``mypy-lang`` version 0.4.6 on PyPI. * Add :ref:`getting-started`. * Add :ref:`generic-methods-and-generic-self` (experimental). * Add :ref:`declaring-decorators`. * Discuss generic type aliases in :ref:`type-aliases`. * Discuss Python 3.6 named tuple syntax in :ref:`named-tuples`. * Updates to :ref:`common_issues`. * Updates to :ref:`python-36`. * Updates to :ref:`command-line`: * ``--custom-typeshed-dir`` * ``--junit-xml`` * ``--find-occurrences`` * ``--cobertura-xml-report`` * ``--warn-no-return`` * Updates to :ref:`config-file`: * Sections with fnmatch patterns now use module name patterns (previously they were path patterns). * Added ``custom_typeshed_dir``, ``mypy_path`` and ``show_column_numbers``. * Mention the magic ``MYPY`` constant in :ref:`import-cycles`. - October 2016 * Publish ``mypy-lang`` version 0.4.5 on PyPI. * Add :ref:`python-36`. * Add :ref:`config-file`. * Updates to :ref:`command-line`: ``--strict-optional-white-list``, ``--disallow-subclassing-any``, ``--config-file``, ``@flagfile``, ``--hide-error-context`` (replaces ``--suppress-error-context``), ``--show-column-numbers`` and ``--scripts-are-modules``. * Mention ``typing.TYPE_CHECKING`` in :ref:`import-cycles`. - August 2016 * Publish ``mypy-lang`` version 0.4.4 on PyPI. * Add :ref:`newtypes`. * Add :ref:`async-and-await`. * Add :ref:`text-and-anystr`. * Add :ref:`version_and_platform_checks`. - July 2016 * Publish ``mypy-lang`` version 0.4.3 on PyPI. * Add :ref:`strict_optional`. * Add :ref:`multi_line_annotation`. - June 2016 * Publish ``mypy-lang`` version 0.4.2 on PyPI. * Add :ref:`type-of-class`. * Add :ref:`cheat-sheet-py2`. * Add :ref:`reveal-type`. - May 2016 * Publish ``mypy-lang`` version 0.4 on PyPI. * Add :ref:`type-variable-upper-bound`. * Document :ref:`command-line`. - Feb 2016 * Publish ``mypy-lang`` version 0.3.1 on PyPI. * Document Python 2 support. - Nov 2015 Add :ref:`library-stubs`. - Jun 2015 Remove ``Undefined`` and ``Dynamic``, as they are not in PEP 484. - Apr 2015 Publish ``mypy-lang`` version 0.2.0 on PyPI. - Mar 2015 Update documentation to reflect PEP 484: * Add :ref:`named-tuples` and :ref:`optional`. * Do not mention type application syntax (for example, ``List[int]()``), as it's no longer supported, due to PEP 484 compatibility. * Rename ``typevar`` to ``TypeVar``. * Document ``# type: ignore`` which allows locally ignoring spurious errors (:ref:`silencing_checker`). * No longer mention ``Any(x)`` as a valid cast, as it will be phased out soon. * Mention the new ``.pyi`` stub file extension. Stubs can live in the same directory as the rest of the program. - Jan 2015 Mypy moves closer to PEP 484: * Add :ref:`type-aliases`. * Update discussion of overloading -- it's now only supported in stubs. * Rename ``Function[...]`` to ``Callable[...]``. - Dec 2014 Publish mypy version 0.1.0 on PyPI. - Oct 2014 Major restructuring. Split the HTML documentation into multiple pages. - Sep 2014 Migrated docs to Sphinx. - Aug 2014 Don't discuss native semantics. There is only Python semantics. - Jul 2013 Rewrite to use new syntax. Shift focus to discussing Python semantics. Add more content, including short discussions of :ref:`generic-functions` and :ref:`union-types`. mypy-0.560/docs/source/supported_python_features.rst0000644€tŠÔÚ€2›s®0000000167213215007205027203 0ustar jukkaDROPBOX\Domain Users00000000000000Supported Python features and modules ===================================== A list of unsupported Python features is maintained in the mypy wiki: - `Unsupported Python features `_ Runtime definition of methods and functions ******************************************* By default, mypy will complain if you add a function to a class or module outside its definition -- but only if this is visible to the type checker. This only affects static checking, as mypy performs no additional type checking at runtime. You can easily work around this. For example, you can use dynamically typed code or values with ``Any`` types, or you can use ``setattr`` or other introspection features. However, you need to be careful if you decide to do this. If used indiscriminately, you may have difficulty using static typing effectively, since the type checker cannot see functions defined at runtime. mypy-0.560/docs/source/type_inference_and_annotations.rst0000644€tŠÔÚ€2›s®0000001370313215007205030113 0ustar jukkaDROPBOX\Domain Users00000000000000Type inference and type annotations =================================== Type inference ************** The initial assignment defines a variable. If you do not explicitly specify the type of the variable, mypy infers the type based on the static type of the value expression: .. code-block:: python i = 1 # Infer type int for i l = [1, 2] # Infer type List[int] for l Type inference is bidirectional and takes context into account. For example, the following is valid: .. code-block:: python def f(l: List[object]) -> None: l = [1, 2] # Infer type List[object] for [1, 2] In an assignment, the type context is determined by the assignment target. In this case this is ``l``, which has the type ``List[object]``. The value expression ``[1, 2]`` is type checked in this context and given the type ``List[object]``. In the previous example we introduced a new variable ``l``, and here the type context was empty. Note that the following is not valid, since ``List[int]`` is not compatible with ``List[object]``: .. code-block:: python def f(l: List[object], k: List[int]) -> None: l = k # Type check error: incompatible types in assignment The reason why the above assignment is disallowed is that allowing the assignment could result in non-int values stored in a list of ``int``: .. code-block:: python def f(l: List[object], k: List[int]) -> None: l = k l.append('x') print(k[-1]) # Ouch; a string in List[int] You can still run the above program; it prints ``x``. This illustrates the fact that static types are used during type checking, but they do not affect the runtime behavior of programs. You can run programs with type check failures, which is often very handy when performing a large refactoring. Thus you can always 'work around' the type system, and it doesn't really limit what you can do in your program. Type inference is not used in dynamically typed functions (those without an explicit return type) — every local variable type defaults to ``Any``, which is discussed later. Explicit types for variables **************************** You can override the inferred type of a variable by using a special type comment after an assignment statement: .. code-block:: python x = 1 # type: Union[int, str] Without the type comment, the type of ``x`` would be just ``int``. We use an annotation to give it a more general type ``Union[int, str]``. Mypy checks that the type of the initializer is compatible with the declared type. The following example is not valid, since the initializer is a floating point number, and this is incompatible with the declared type: .. code-block:: python x = 1.1 # type: Union[int, str] # Error! Python 3.6 introduced a new syntax for variable annotations, which resembles function annotations: .. code-block:: python x: Union[int, str] = 1 We'll use both syntax variants in examples. The syntax variants are mostly interchangeable, but the Python 3.6 syntax allows defining the type of a variable without initialization, which is not possible with the comment-based syntax: .. code-block:: python x: str # Declare type of 'x' without initialization .. note:: The best way to think about this is that the type comment sets the type of the variable, not the type of the expression. To force the type of an expression you can use ``cast(, )``. Explicit types for collections ****************************** The type checker cannot always infer the type of a list or a dictionary. This often arises when creating an empty list or dictionary and assigning it to a new variable that doesn't have an explicit variable type. In these cases you can give the type explicitly using a type annotation comment: .. code-block:: python l = [] # type: List[int] # Create empty list with type List[int] d = {} # type: Dict[str, int] # Create empty dictionary (str -> int) Similarly, you can also give an explicit type when creating an empty set: .. code-block:: python s = set() # type: Set[int] Declaring multiple variable types at a time ******************************************* You can declare more than a single variable at a time. In order to nicely work with multiple assignment, you must give each variable a type separately: .. code-block:: python i, found = 0, False # type: int, bool You can optionally use parentheses around the types, assignment targets and assigned expression: .. code-block:: python i, found = 0, False # type: (int, bool) # OK (i, found) = 0, False # type: int, bool # OK i, found = (0, False) # type: int, bool # OK (i, found) = (0, False) # type: (int, bool) # OK Starred expressions ******************* In most cases, mypy can infer the type of starred expressions from the right-hand side of an assignment, but not always: .. code-block:: python a, *bs = 1, 2, 3 # OK p, q, *rs = 1, 2 # Error: Type of rs cannot be inferred On first line, the type of ``bs`` is inferred to be ``List[int]``. However, on the second line, mypy cannot infer the type of ``rs``, because there is no right-hand side value for ``rs`` to infer the type from. In cases like these, the starred expression needs to be annotated with a starred type: .. code-block:: python p, q, *rs = 1, 2 # type: int, int, *List[int] Here, the type of ``rs`` is set to ``List[int]``. Types in stub files ******************* :ref:`Stub files ` are written in normal Python 3 syntax, but generally leaving out runtime logic like variable initializers, function bodies, and default arguments, replacing them with ellipses. In this example, each ellipsis ``...`` is literally written in the stub file as three dots: .. code-block:: python x = ... # type: int def afunc(code: str) -> int: ... def afunc(a: int, b: int=...) -> int: ... .. note:: The ellipsis ``...`` is also used with a different meaning in :ref:`callable types ` and :ref:`tuple types `. mypy-0.560/extensions/0000755€tŠÔÚ€2›s®0000000000013215007242021067 5ustar jukkaDROPBOX\Domain Users00000000000000mypy-0.560/extensions/LICENSE0000644€tŠÔÚ€2›s®0000000226413215007205022077 0ustar jukkaDROPBOX\Domain Users00000000000000Mypy extensions are licensed under the terms of the MIT license, reproduced below. = = = = = The MIT License Copyright (c) 2016-2017 Jukka Lehtosalo and contributors Permission is hereby granted, free of charge, to any person obtaining a copy of this software and associated documentation files (the "Software"), to deal in the Software without restriction, including without limitation the rights to use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of the Software, and to permit persons to whom the Software is furnished to do so, subject to the following conditions: The above copyright notice and this permission notice shall be included in all copies or substantial portions of the Software. THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. = = = = = mypy-0.560/extensions/MANIFEST.in0000644€tŠÔÚ€2›s®0000000002013215007205022614 0ustar jukkaDROPBOX\Domain Users00000000000000include LICENSE mypy-0.560/extensions/mypy_extensions.py0000644€tŠÔÚ€2›s®0000001105613215007205024720 0ustar jukkaDROPBOX\Domain Users00000000000000"""Defines experimental extensions to the standard "typing" module that are supported by the mypy typechecker. Example usage: from mypy_extensions import TypedDict """ from typing import Any # NOTE: This module must support Python 2.7 in addition to Python 3.x import sys # _type_check is NOT a part of public typing API, it is used here only to mimic # the (convenient) behavior of types provided by typing module. from typing import _type_check # type: ignore def _check_fails(cls, other): try: if sys._getframe(1).f_globals['__name__'] not in ['abc', 'functools', 'typing']: # Typed dicts are only for static structural subtyping. raise TypeError('TypedDict does not support instance and class checks') except (AttributeError, ValueError): pass return False def _dict_new(cls, *args, **kwargs): return dict(*args, **kwargs) def _typeddict_new(cls, _typename, _fields=None, **kwargs): total = kwargs.pop('total', True) if _fields is None: _fields = kwargs elif kwargs: raise TypeError("TypedDict takes either a dict or keyword arguments," " but not both") return _TypedDictMeta(_typename, (), {'__annotations__': dict(_fields), '__total__': total}) class _TypedDictMeta(type): def __new__(cls, name, bases, ns, total=True): # Create new typed dict class object. # This method is called directly when TypedDict is subclassed, # or via _typeddict_new when TypedDict is instantiated. This way # TypedDict supports all three syntaxes described in its docstring. # Subclasses and instanes of TypedDict return actual dictionaries # via _dict_new. ns['__new__'] = _typeddict_new if name == 'TypedDict' else _dict_new tp_dict = super(_TypedDictMeta, cls).__new__(cls, name, (dict,), ns) try: # Setting correct module is necessary to make typed dict classes pickleable. tp_dict.__module__ = sys._getframe(2).f_globals.get('__name__', '__main__') except (AttributeError, ValueError): pass anns = ns.get('__annotations__', {}) msg = "TypedDict('Name', {f0: t0, f1: t1, ...}); each t must be a type" anns = {n: _type_check(tp, msg) for n, tp in anns.items()} for base in bases: anns.update(base.__dict__.get('__annotations__', {})) tp_dict.__annotations__ = anns if not hasattr(tp_dict, '__total__'): tp_dict.__total__ = total return tp_dict __instancecheck__ = __subclasscheck__ = _check_fails TypedDict = _TypedDictMeta('TypedDict', (dict,), {}) TypedDict.__module__ = __name__ TypedDict.__doc__ = \ """A simple typed name space. At runtime it is equivalent to a plain dict. TypedDict creates a dictionary type that expects all of its instances to have a certain set of keys, with each key associated with a value of a consistent type. This expectation is not checked at runtime but is only enforced by typecheckers. Usage:: Point2D = TypedDict('Point2D', {'x': int, 'y': int, 'label': str}) a: Point2D = {'x': 1, 'y': 2, 'label': 'good'} # OK b: Point2D = {'z': 3, 'label': 'bad'} # Fails type check assert Point2D(x=1, y=2, label='first') == dict(x=1, y=2, label='first') The type info could be accessed via Point2D.__annotations__. TypedDict supports two additional equivalent forms:: Point2D = TypedDict('Point2D', x=int, y=int, label=str) class Point2D(TypedDict): x: int y: int label: str The latter syntax is only supported in Python 3.6+, while two other syntax forms work for Python 2.7 and 3.2+ """ # Argument constructors for making more-detailed Callables. These all just # return their type argument, to make them complete noops in terms of the # `typing` module. def Arg(type=Any, name=None): """A normal positional argument""" return type def DefaultArg(type=Any, name=None): """A positional argument with a default value""" return type def NamedArg(type=Any, name=None): """A keyword-only argument""" return type def DefaultNamedArg(type=Any, name=None): """A keyword-only argument with a default value""" return type def VarArg(type=Any): """A *args-style variadic positional argument""" return type def KwArg(type=Any): """A **kwargs-style variadic keyword argument""" return type # Return type that indicates a function does not return class NoReturn: pass mypy-0.560/extensions/README.md0000644€tŠÔÚ€2›s®0000000025313215007205022345 0ustar jukkaDROPBOX\Domain Users00000000000000Mypy Extensions =============== The "mypy_extensions" module defines experimental extensions to the standard "typing" module that are supported by the mypy typechecker. mypy-0.560/extensions/setup.cfg0000644€tŠÔÚ€2›s®0000000003213215007205022702 0ustar jukkaDROPBOX\Domain Users00000000000000[bdist_wheel] universal=1 mypy-0.560/extensions/setup.py0000644€tŠÔÚ€2›s®0000000261713215007205022606 0ustar jukkaDROPBOX\Domain Users00000000000000# NOTE: This package must support Python 2.7 in addition to Python 3.x from setuptools import setup version = '0.3.0' description = 'Experimental type system extensions for programs checked with the mypy typechecker.' long_description = ''' Mypy Extensions =============== The "mypy_extensions" module defines experimental extensions to the standard "typing" module that are supported by the mypy typechecker. '''.lstrip() classifiers = [ 'Development Status :: 2 - Pre-Alpha', 'Environment :: Console', 'Intended Audience :: Developers', 'License :: OSI Approved :: MIT License', 'Operating System :: POSIX', 'Programming Language :: Python :: 2', 'Programming Language :: Python :: 2.7', 'Programming Language :: Python :: 3', 'Programming Language :: Python :: 3.3', 'Programming Language :: Python :: 3.4', 'Programming Language :: Python :: 3.5', 'Programming Language :: Python :: 3.6', 'Topic :: Software Development', ] setup( name='mypy_extensions', version=version, description=description, long_description=long_description, author='The mypy developers', author_email='jukka.lehtosalo@iki.fi', url='http://www.mypy-lang.org/', license='MIT License', platforms=['POSIX'], py_modules=['mypy_extensions'], classifiers=classifiers, install_requires=[ 'typing >= 3.5.3; python_version < "3.5"', ], ) mypy-0.560/LICENSE0000644€tŠÔÚ€2›s®0000002571713215007205017710 0ustar jukkaDROPBOX\Domain Users00000000000000Mypy is licensed under the terms of the MIT license, reproduced below. = = = = = The MIT License Copyright (c) 2015-2016 Jukka Lehtosalo and contributors Permission is hereby granted, free of charge, to any person obtaining a copy of this software and associated documentation files (the "Software"), to deal in the Software without restriction, including without limitation the rights to use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of the Software, and to permit persons to whom the Software is furnished to do so, subject to the following conditions: The above copyright notice and this permission notice shall be included in all copies or substantial portions of the Software. THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. = = = = = Portions of mypy are licensed under different licenses. The files under stdlib-samples are licensed under the PSF 2 License, reproduced below. = = = = = PYTHON SOFTWARE FOUNDATION LICENSE VERSION 2 -------------------------------------------- 1. This LICENSE AGREEMENT is between the Python Software Foundation ("PSF"), and the Individual or Organization ("Licensee") accessing and otherwise using this software ("Python") in source or binary form and its associated documentation. 2. Subject to the terms and conditions of this License Agreement, PSF hereby grants Licensee a nonexclusive, royalty-free, world-wide license to reproduce, analyze, test, perform and/or display publicly, prepare derivative works, distribute, and otherwise use Python alone or in any derivative version, provided, however, that PSF's License Agreement and PSF's notice of copyright, i.e., "Copyright (c) 2001, 2002, 2003, 2004, 2005, 2006, 2007, 2008, 2009, 2010, 2011, 2012 Python Software Foundation; All Rights Reserved" are retained in Python alone or in any derivative version prepared by Licensee. 3. In the event Licensee prepares a derivative work that is based on or incorporates Python or any part thereof, and wants to make the derivative work available to others as provided herein, then Licensee hereby agrees to include in any such work a brief summary of the changes made to Python. 4. PSF is making Python available to Licensee on an "AS IS" basis. PSF MAKES NO REPRESENTATIONS OR WARRANTIES, EXPRESS OR IMPLIED. BY WAY OF EXAMPLE, BUT NOT LIMITATION, PSF MAKES NO AND DISCLAIMS ANY REPRESENTATION OR WARRANTY OF MERCHANTABILITY OR FITNESS FOR ANY PARTICULAR PURPOSE OR THAT THE USE OF PYTHON WILL NOT INFRINGE ANY THIRD PARTY RIGHTS. 5. PSF SHALL NOT BE LIABLE TO LICENSEE OR ANY OTHER USERS OF PYTHON FOR ANY INCIDENTAL, SPECIAL, OR CONSEQUENTIAL DAMAGES OR LOSS AS A RESULT OF MODIFYING, DISTRIBUTING, OR OTHERWISE USING PYTHON, OR ANY DERIVATIVE THEREOF, EVEN IF ADVISED OF THE POSSIBILITY THEREOF. 6. This License Agreement will automatically terminate upon a material breach of its terms and conditions. 7. Nothing in this License Agreement shall be deemed to create any relationship of agency, partnership, or joint venture between PSF and Licensee. This License Agreement does not grant permission to use PSF trademarks or trade name in a trademark sense to endorse or promote products or services of Licensee, or any third party. 8. By copying, installing or otherwise using Python, Licensee agrees to be bound by the terms and conditions of this License Agreement. BEOPEN.COM LICENSE AGREEMENT FOR PYTHON 2.0 ------------------------------------------- BEOPEN PYTHON OPEN SOURCE LICENSE AGREEMENT VERSION 1 1. This LICENSE AGREEMENT is between BeOpen.com ("BeOpen"), having an office at 160 Saratoga Avenue, Santa Clara, CA 95051, and the Individual or Organization ("Licensee") accessing and otherwise using this software in source or binary form and its associated documentation ("the Software"). 2. Subject to the terms and conditions of this BeOpen Python License Agreement, BeOpen hereby grants Licensee a non-exclusive, royalty-free, world-wide license to reproduce, analyze, test, perform and/or display publicly, prepare derivative works, distribute, and otherwise use the Software alone or in any derivative version, provided, however, that the BeOpen Python License is retained in the Software, alone or in any derivative version prepared by Licensee. 3. BeOpen is making the Software available to Licensee on an "AS IS" basis. BEOPEN MAKES NO REPRESENTATIONS OR WARRANTIES, EXPRESS OR IMPLIED. BY WAY OF EXAMPLE, BUT NOT LIMITATION, BEOPEN MAKES NO AND DISCLAIMS ANY REPRESENTATION OR WARRANTY OF MERCHANTABILITY OR FITNESS FOR ANY PARTICULAR PURPOSE OR THAT THE USE OF THE SOFTWARE WILL NOT INFRINGE ANY THIRD PARTY RIGHTS. 4. BEOPEN SHALL NOT BE LIABLE TO LICENSEE OR ANY OTHER USERS OF THE SOFTWARE FOR ANY INCIDENTAL, SPECIAL, OR CONSEQUENTIAL DAMAGES OR LOSS AS A RESULT OF USING, MODIFYING OR DISTRIBUTING THE SOFTWARE, OR ANY DERIVATIVE THEREOF, EVEN IF ADVISED OF THE POSSIBILITY THEREOF. 5. This License Agreement will automatically terminate upon a material breach of its terms and conditions. 6. This License Agreement shall be governed by and interpreted in all respects by the law of the State of California, excluding conflict of law provisions. Nothing in this License Agreement shall be deemed to create any relationship of agency, partnership, or joint venture between BeOpen and Licensee. This License Agreement does not grant permission to use BeOpen trademarks or trade names in a trademark sense to endorse or promote products or services of Licensee, or any third party. As an exception, the "BeOpen Python" logos available at http://www.pythonlabs.com/logos.html may be used according to the permissions granted on that web page. 7. By copying, installing or otherwise using the software, Licensee agrees to be bound by the terms and conditions of this License Agreement. CNRI LICENSE AGREEMENT FOR PYTHON 1.6.1 --------------------------------------- 1. This LICENSE AGREEMENT is between the Corporation for National Research Initiatives, having an office at 1895 Preston White Drive, Reston, VA 20191 ("CNRI"), and the Individual or Organization ("Licensee") accessing and otherwise using Python 1.6.1 software in source or binary form and its associated documentation. 2. Subject to the terms and conditions of this License Agreement, CNRI hereby grants Licensee a nonexclusive, royalty-free, world-wide license to reproduce, analyze, test, perform and/or display publicly, prepare derivative works, distribute, and otherwise use Python 1.6.1 alone or in any derivative version, provided, however, that CNRI's License Agreement and CNRI's notice of copyright, i.e., "Copyright (c) 1995-2001 Corporation for National Research Initiatives; All Rights Reserved" are retained in Python 1.6.1 alone or in any derivative version prepared by Licensee. Alternately, in lieu of CNRI's License Agreement, Licensee may substitute the following text (omitting the quotes): "Python 1.6.1 is made available subject to the terms and conditions in CNRI's License Agreement. This Agreement together with Python 1.6.1 may be located on the Internet using the following unique, persistent identifier (known as a handle): 1895.22/1013. This Agreement may also be obtained from a proxy server on the Internet using the following URL: http://hdl.handle.net/1895.22/1013". 3. In the event Licensee prepares a derivative work that is based on or incorporates Python 1.6.1 or any part thereof, and wants to make the derivative work available to others as provided herein, then Licensee hereby agrees to include in any such work a brief summary of the changes made to Python 1.6.1. 4. CNRI is making Python 1.6.1 available to Licensee on an "AS IS" basis. CNRI MAKES NO REPRESENTATIONS OR WARRANTIES, EXPRESS OR IMPLIED. BY WAY OF EXAMPLE, BUT NOT LIMITATION, CNRI MAKES NO AND DISCLAIMS ANY REPRESENTATION OR WARRANTY OF MERCHANTABILITY OR FITNESS FOR ANY PARTICULAR PURPOSE OR THAT THE USE OF PYTHON 1.6.1 WILL NOT INFRINGE ANY THIRD PARTY RIGHTS. 5. CNRI SHALL NOT BE LIABLE TO LICENSEE OR ANY OTHER USERS OF PYTHON 1.6.1 FOR ANY INCIDENTAL, SPECIAL, OR CONSEQUENTIAL DAMAGES OR LOSS AS A RESULT OF MODIFYING, DISTRIBUTING, OR OTHERWISE USING PYTHON 1.6.1, OR ANY DERIVATIVE THEREOF, EVEN IF ADVISED OF THE POSSIBILITY THEREOF. 6. This License Agreement will automatically terminate upon a material breach of its terms and conditions. 7. This License Agreement shall be governed by the federal intellectual property law of the United States, including without limitation the federal copyright law, and, to the extent such U.S. federal law does not apply, by the law of the Commonwealth of Virginia, excluding Virginia's conflict of law provisions. Notwithstanding the foregoing, with regard to derivative works based on Python 1.6.1 that incorporate non-separable material that was previously distributed under the GNU General Public License (GPL), the law of the Commonwealth of Virginia shall govern this License Agreement only as to issues arising under or with respect to Paragraphs 4, 5, and 7 of this License Agreement. Nothing in this License Agreement shall be deemed to create any relationship of agency, partnership, or joint venture between CNRI and Licensee. This License Agreement does not grant permission to use CNRI trademarks or trade name in a trademark sense to endorse or promote products or services of Licensee, or any third party. 8. By clicking on the "ACCEPT" button where indicated, or by copying, installing or otherwise using Python 1.6.1, Licensee agrees to be bound by the terms and conditions of this License Agreement. ACCEPT CWI LICENSE AGREEMENT FOR PYTHON 0.9.0 THROUGH 1.2 -------------------------------------------------- Copyright (c) 1991 - 1995, Stichting Mathematisch Centrum Amsterdam, The Netherlands. All rights reserved. Permission to use, copy, modify, and distribute this software and its documentation for any purpose and without fee is hereby granted, provided that the above copyright notice appear in all copies and that both that copyright notice and this permission notice appear in supporting documentation, and that the name of Stichting Mathematisch Centrum or CWI not be used in advertising or publicity pertaining to distribution of the software without specific, written prior permission. STICHTING MATHEMATISCH CENTRUM DISCLAIMS ALL WARRANTIES WITH REGARD TO THIS SOFTWARE, INCLUDING ALL IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS, IN NO EVENT SHALL STICHTING MATHEMATISCH CENTRUM BE LIABLE FOR ANY SPECIAL, INDIRECT OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE. = = = = =mypy-0.560/MANIFEST.in0000644€tŠÔÚ€2›s®0000000026213215007205020425 0ustar jukkaDROPBOX\Domain Users00000000000000recursive-include scripts * recursive-include test-data * recursive-include extensions * recursive-include docs * include runtests.py include mypy_self_check.ini include LICENSE mypy-0.560/mypy/0000755€tŠÔÚ€2›s®0000000000013215007242017666 5ustar jukkaDROPBOX\Domain Users00000000000000mypy-0.560/mypy/__init__.py0000644€tŠÔÚ€2›s®0000000004513215007205021775 0ustar jukkaDROPBOX\Domain Users00000000000000# This page intentionally left blank mypy-0.560/mypy/__main__.py0000644€tŠÔÚ€2›s®0000000024113215007205021754 0ustar jukkaDROPBOX\Domain Users00000000000000"""Mypy type checker command line tool.""" from mypy.main import main def console_entry() -> None: main(None) if __name__ == '__main__': main(None) mypy-0.560/mypy/api.py0000644€tŠÔÚ€2›s®0000000351213215007205021011 0ustar jukkaDROPBOX\Domain Users00000000000000"""This module makes it possible to use mypy as part of a Python application. Since mypy still changes, the API was kept utterly simple and non-intrusive. It just mimics command line activation without starting a new interpreter. So the normal docs about the mypy command line apply. Changes in the command line version of mypy will be immediately useable. Just import this module and then call the 'run' function with a parameter of type List[str], containing what normally would have been the command line arguments to mypy. Function 'run' returns a Tuple[str, str, int], namely (, , ), in which is what mypy normally writes to sys.stdout, is what mypy normally writes to sys.stderr and exit_status is the exit status mypy normally returns to the operating system. Any pretty formatting is left to the caller. Trivial example of code using this module: import sys from mypy import api result = api.run(sys.argv[1:]) if result[0]: print('\nType checking report:\n') print(result[0]) # stdout if result[1]: print('\nError report:\n') print(result[1]) # stderr print ('\nExit status:', result[2]) """ import sys from io import StringIO from typing import List, Tuple from mypy.main import main def run(args: List[str]) -> Tuple[str, str, int]: old_stdout = sys.stdout new_stdout = StringIO() sys.stdout = new_stdout old_stderr = sys.stderr new_stderr = StringIO() sys.stderr = new_stderr try: main(None, args=args) exit_status = 0 except SystemExit as system_exit: exit_status = system_exit.code finally: sys.stdout = old_stdout sys.stderr = old_stderr return new_stdout.getvalue(), new_stderr.getvalue(), exit_status mypy-0.560/mypy/applytype.py0000644€tŠÔÚ€2›s®0000000522213215007205022267 0ustar jukkaDROPBOX\Domain Users00000000000000from typing import List, Dict, Sequence, Optional import mypy.subtypes from mypy.sametypes import is_same_type from mypy.expandtype import expand_type from mypy.types import Type, TypeVarId, TypeVarType, CallableType, AnyType, PartialType from mypy.messages import MessageBuilder from mypy.nodes import Context def apply_generic_arguments(callable: CallableType, orig_types: Sequence[Optional[Type]], msg: MessageBuilder, context: Context) -> CallableType: """Apply generic type arguments to a callable type. For example, applying [int] to 'def [T] (T) -> T' results in 'def (int) -> int'. Note that each type can be None; in this case, it will not be applied. """ tvars = callable.variables assert len(tvars) == len(orig_types) # Check that inferred type variable values are compatible with allowed # values and bounds. Also, promote subtype values to allowed values. types = list(orig_types) for i, type in enumerate(types): assert not isinstance(type, PartialType), "Internal error: must never apply partial type" values = callable.variables[i].values if values and type: if isinstance(type, AnyType): continue if isinstance(type, TypeVarType) and type.values: # Allow substituting T1 for T if every allowed value of T1 # is also a legal value of T. if all(any(is_same_type(v, v1) for v in values) for v1 in type.values): continue for value in values: if mypy.subtypes.is_subtype(type, value): types[i] = value break else: msg.incompatible_typevar_value(callable, type, callable.variables[i].name, context) upper_bound = callable.variables[i].upper_bound if type and not mypy.subtypes.is_subtype(type, upper_bound): msg.incompatible_typevar_value(callable, type, callable.variables[i].name, context) # Create a map from type variable id to target type. id_to_type = {} # type: Dict[TypeVarId, Type] for i, tv in enumerate(tvars): typ = types[i] if typ: id_to_type[tv.id] = typ # Apply arguments to argument types. arg_types = [expand_type(at, id_to_type) for at in callable.arg_types] # The callable may retain some type vars if only some were applied. remaining_tvars = [tv for tv in tvars if tv.id not in id_to_type] return callable.copy_modified( arg_types=arg_types, ret_type=expand_type(callable.ret_type, id_to_type), variables=remaining_tvars, ) mypy-0.560/mypy/binder.py0000644€tŠÔÚ€2›s®0000003623213215007205021510 0ustar jukkaDROPBOX\Domain Users00000000000000from typing import Dict, List, Set, Iterator, Union, Optional, Tuple, cast from contextlib import contextmanager from collections import defaultdict MYPY = False if MYPY: from typing import DefaultDict from mypy.types import Type, AnyType, PartialType, UnionType, TypeOfAny from mypy.subtypes import is_subtype from mypy.join import join_simple from mypy.sametypes import is_same_type from mypy.nodes import Expression, Var, RefExpr from mypy.literals import Key, literal, literal_hash, subkeys from mypy.nodes import IndexExpr, MemberExpr, NameExpr BindableTypes = (IndexExpr, MemberExpr, NameExpr) BindableExpression = Union[IndexExpr, MemberExpr, NameExpr] class Frame(Dict[Key, Type]): """A Frame represents a specific point in the execution of a program. It carries information about the current types of expressions at that point, arising either from assignments to those expressions or the result of isinstance checks. It also records whether it is possible to reach that point at all. This information is not copied into a new Frame when it is pushed onto the stack, so a given Frame only has information about types that were assigned in that frame. """ def __init__(self) -> None: self.unreachable = False class DeclarationsFrame(Dict[Key, Optional[Type]]): """Same as above, but allowed to have None values.""" def __init__(self) -> None: self.unreachable = False if MYPY: # This is the type of stored assignments for union type rvalues. # We use 'if MYPY: ...' since typing-3.5.1 does not have 'DefaultDict' Assigns = DefaultDict[Expression, List[Tuple[Type, Optional[Type]]]] class ConditionalTypeBinder: """Keep track of conditional types of variables. NB: Variables are tracked by literal expression, so it is possible to confuse the binder; for example, ``` class A: a = None # type: Union[int, str] x = A() lst = [x] reveal_type(x.a) # Union[int, str] x.a = 1 reveal_type(x.a) # int reveal_type(lst[0].a) # Union[int, str] lst[0].a = 'a' reveal_type(x.a) # int reveal_type(lst[0].a) # str ``` """ # Stored assignments for situations with tuple/list lvalue and rvalue of union type. # This maps an expression to a list of bound types for every item in the union type. type_assignments = None # type: Optional[Assigns] def __init__(self) -> None: # The stack of frames currently used. These map # literal_hash(expr) -- literals like 'foo.bar' -- # to types. The last element of this list is the # top-most, current frame. Each earlier element # records the state as of when that frame was last # on top of the stack. self.frames = [Frame()] # For frames higher in the stack, we record the set of # Frames that can escape there, either by falling off # the end of the frame or by a loop control construct # or raised exception. The last element of self.frames # has no corresponding element in this list. self.options_on_return = [] # type: List[List[Frame]] # Maps literal_hash(expr) to get_declaration(expr) # for every expr stored in the binder self.declarations = DeclarationsFrame() # Set of other keys to invalidate if a key is changed, e.g. x -> {x.a, x[0]} # Whenever a new key (e.g. x.a.b) is added, we update this self.dependencies = {} # type: Dict[Key, Set[Key]] # Whether the last pop changed the newly top frame on exit self.last_pop_changed = False self.try_frames = set() # type: Set[int] self.break_frames = [] # type: List[int] self.continue_frames = [] # type: List[int] def _add_dependencies(self, key: Key, value: Optional[Key] = None) -> None: if value is None: value = key else: self.dependencies.setdefault(key, set()).add(value) for elt in subkeys(key): self._add_dependencies(elt, value) def push_frame(self) -> Frame: """Push a new frame into the binder.""" f = Frame() self.frames.append(f) self.options_on_return.append([]) return f def _put(self, key: Key, type: Type, index: int=-1) -> None: self.frames[index][key] = type def _get(self, key: Key, index: int=-1) -> Optional[Type]: if index < 0: index += len(self.frames) for i in range(index, -1, -1): if key in self.frames[i]: return self.frames[i][key] return None def put(self, expr: Expression, typ: Type) -> None: if not isinstance(expr, BindableTypes): return if not literal(expr): return key = literal_hash(expr) assert key is not None, 'Internal error: binder tried to put non-literal' if key not in self.declarations: self.declarations[key] = get_declaration(expr) self._add_dependencies(key) self._put(key, typ) def unreachable(self) -> None: self.frames[-1].unreachable = True def get(self, expr: Expression) -> Optional[Type]: key = literal_hash(expr) assert key is not None, 'Internal error: binder tried to get non-literal' return self._get(key) def is_unreachable(self) -> bool: # TODO: Copy the value of unreachable into new frames to avoid # this traversal on every statement? return any(f.unreachable for f in self.frames) def cleanse(self, expr: Expression) -> None: """Remove all references to a Node from the binder.""" key = literal_hash(expr) assert key is not None, 'Internal error: binder tried cleanse non-literal' self._cleanse_key(key) def _cleanse_key(self, key: Key) -> None: """Remove all references to a key from the binder.""" for frame in self.frames: if key in frame: del frame[key] def update_from_options(self, frames: List[Frame]) -> bool: """Update the frame to reflect that each key will be updated as in one of the frames. Return whether any item changes. If a key is declared as AnyType, only update it if all the options are the same. """ frames = [f for f in frames if not f.unreachable] changed = False keys = set(key for f in frames for key in f) for key in keys: current_value = self._get(key) resulting_values = [f.get(key, current_value) for f in frames] if any(x is None for x in resulting_values): # We didn't know anything about key before # (current_value must be None), and we still don't # know anything about key in at least one possible frame. continue type = resulting_values[0] assert type is not None declaration_type = self.declarations.get(key) if isinstance(declaration_type, AnyType): # At this point resulting values can't contain None, see continue above if not all(is_same_type(type, cast(Type, t)) for t in resulting_values[1:]): type = AnyType(TypeOfAny.from_another_any, source_any=declaration_type) else: for other in resulting_values[1:]: assert other is not None type = join_simple(self.declarations[key], type, other) if current_value is None or not is_same_type(type, current_value): self._put(key, type) changed = True self.frames[-1].unreachable = not frames return changed def pop_frame(self, can_skip: bool, fall_through: int) -> Frame: """Pop a frame and return it. See frame_context() for documentation of fall_through. """ if fall_through > 0: self.allow_jump(-fall_through) result = self.frames.pop() options = self.options_on_return.pop() if can_skip: options.insert(0, self.frames[-1]) self.last_pop_changed = self.update_from_options(options) return result @contextmanager def accumulate_type_assignments(self) -> 'Iterator[Assigns]': """Push a new map to collect assigned types in multiassign from union. If this map is not None, actual binding is deferred until all items in the union are processed (a union of collected items is later bound manually by the caller). """ old_assignments = None if self.type_assignments is not None: old_assignments = self.type_assignments self.type_assignments = defaultdict(list) yield self.type_assignments self.type_assignments = old_assignments def assign_type(self, expr: Expression, type: Type, declared_type: Optional[Type], restrict_any: bool = False) -> None: if self.type_assignments is not None: # We are in a multiassign from union, defer the actual binding, # just collect the types. self.type_assignments[expr].append((type, declared_type)) return if not isinstance(expr, BindableTypes): return None if not literal(expr): return self.invalidate_dependencies(expr) if declared_type is None: # Not sure why this happens. It seems to mainly happen in # member initialization. return if not is_subtype(type, declared_type): # Pretty sure this is only happens when there's a type error. # Ideally this function wouldn't be called if the # expression has a type error, though -- do other kinds of # errors cause this function to get called at invalid # times? return enclosing_type = self.most_recent_enclosing_type(expr, type) if (isinstance(enclosing_type, AnyType) and not restrict_any): # If x is Any and y is int, after x = y we do not infer that x is int. # This could be changed. if not isinstance(type, AnyType): # We narrowed type from Any in a recent frame (probably an # isinstance check), but now it is reassigned, so broaden back # to Any (which is the most recent enclosing type) self.put(expr, enclosing_type) elif (isinstance(type, AnyType) and not (isinstance(declared_type, UnionType) and any(isinstance(item, AnyType) for item in declared_type.items))): # Assigning an Any value doesn't affect the type to avoid false negatives, unless # there is an Any item in a declared union type. self.put(expr, declared_type) else: self.put(expr, type) for i in self.try_frames: # XXX This should probably not copy the entire frame, but # just copy this variable into a single stored frame. self.allow_jump(i) def invalidate_dependencies(self, expr: BindableExpression) -> None: """Invalidate knowledge of types that include expr, but not expr itself. For example, when expr is foo.bar, invalidate foo.bar.baz. It is overly conservative: it invalidates globally, including in code paths unreachable from here. """ key = literal_hash(expr) assert key is not None for dep in self.dependencies.get(key, set()): self._cleanse_key(dep) def most_recent_enclosing_type(self, expr: BindableExpression, type: Type) -> Optional[Type]: if isinstance(type, AnyType): return get_declaration(expr) key = literal_hash(expr) assert key is not None enclosers = ([get_declaration(expr)] + [f[key] for f in self.frames if key in f and is_subtype(type, f[key])]) return enclosers[-1] def allow_jump(self, index: int) -> None: # self.frames and self.options_on_return have different lengths # so make sure the index is positive if index < 0: index += len(self.options_on_return) frame = Frame() for f in self.frames[index + 1:]: frame.update(f) if f.unreachable: frame.unreachable = True self.options_on_return[index].append(frame) def handle_break(self) -> None: self.allow_jump(self.break_frames[-1]) self.unreachable() def handle_continue(self) -> None: self.allow_jump(self.continue_frames[-1]) self.unreachable() @contextmanager def frame_context(self, *, can_skip: bool, fall_through: int = 1, break_frame: int = 0, continue_frame: int = 0, try_frame: bool = False) -> Iterator[Frame]: """Return a context manager that pushes/pops frames on enter/exit. If can_skip is True, control flow is allowed to bypass the newly-created frame. If fall_through > 0, then it will allow control flow that falls off the end of the frame to escape to its ancestor `fall_through` levels higher. Otherwise control flow ends at the end of the frame. If break_frame > 0, then 'break' statements within this frame will jump out to the frame break_frame levels higher than the frame created by this call to frame_context. Similarly for continue_frame and 'continue' statements. If try_frame is true, then execution is allowed to jump at any point within the newly created frame (or its descendents) to its parent (i.e., to the frame that was on top before this call to frame_context). After the context manager exits, self.last_pop_changed indicates whether any types changed in the newly-topmost frame as a result of popping this frame. """ assert len(self.frames) > 1 if break_frame: self.break_frames.append(len(self.frames) - break_frame) if continue_frame: self.continue_frames.append(len(self.frames) - continue_frame) if try_frame: self.try_frames.add(len(self.frames) - 1) new_frame = self.push_frame() if try_frame: # An exception may occur immediately self.allow_jump(-1) yield new_frame self.pop_frame(can_skip, fall_through) if break_frame: self.break_frames.pop() if continue_frame: self.continue_frames.pop() if try_frame: self.try_frames.remove(len(self.frames) - 1) @contextmanager def top_frame_context(self) -> Iterator[Frame]: """A variant of frame_context for use at the top level of a namespace (module, function, or class). """ assert len(self.frames) == 1 yield self.push_frame() self.pop_frame(True, 0) def get_declaration(expr: BindableExpression) -> Optional[Type]: if isinstance(expr, RefExpr) and isinstance(expr.node, Var): type = expr.node.type if not isinstance(type, PartialType): return type return None mypy-0.560/mypy/build.py0000644€tŠÔÚ€2›s®0000033367313215007206021356 0ustar jukkaDROPBOX\Domain Users00000000000000"""Facilities to analyze entire programs, including imported modules. Parse and analyze the source files of a program in the correct order (based on file dependencies), and collect the results. This module only directs a build, which is performed in multiple passes per file. The individual passes are implemented in separate modules. The function build() is the main interface to this module. """ # TODO: More consistent terminology, e.g. path/fnam, module/id, state/file import binascii import collections import contextlib from distutils.sysconfig import get_python_lib import gc import hashlib import json import os.path import re import site import stat import sys import time from os.path import dirname, basename import errno from typing import (AbstractSet, Any, cast, Dict, Iterable, Iterator, List, Mapping, NamedTuple, Optional, Set, Tuple, Union, Callable) # Can't use TYPE_CHECKING because it's not in the Python 3.5.1 stdlib MYPY = False if MYPY: from typing import Deque from mypy.nodes import (MODULE_REF, MypyFile, Node, ImportBase, Import, ImportFrom, ImportAll) from mypy.semanal_pass1 import SemanticAnalyzerPass1 from mypy.semanal import SemanticAnalyzerPass2 from mypy.semanal_pass3 import SemanticAnalyzerPass3 from mypy.checker import TypeChecker from mypy.indirection import TypeIndirectionVisitor from mypy.errors import Errors, CompileError, DecodeError, report_internal_error from mypy.report import Reports from mypy import moduleinfo from mypy import util from mypy.fixup import fixup_module_pass_one, fixup_module_pass_two from mypy.nodes import Expression from mypy.options import Options from mypy.parse import parse from mypy.stats import dump_type_stats from mypy.types import Type from mypy.version import __version__ from mypy.plugin import Plugin, DefaultPlugin, ChainedPlugin from mypy.defaults import PYTHON3_VERSION_MIN PYTHON_EXTENSIONS = ['.pyi', '.py'] Graph = Dict[str, 'State'] def getmtime(name: str) -> int: return int(os.path.getmtime(name)) # TODO: Get rid of BuildResult. We might as well return a BuildManager. class BuildResult: """The result of a successful build. Attributes: manager: The build manager. files: Dictionary from module name to related AST node. types: Dictionary from parse tree node to its inferred type. errors: List of error messages. """ def __init__(self, manager: 'BuildManager', graph: Graph) -> None: self.manager = manager self.graph = graph self.files = manager.modules self.types = manager.all_types # Non-empty for tests only or if dumping deps self.errors = manager.errors.messages() class BuildSource: def __init__(self, path: Optional[str], module: Optional[str], text: Optional[str]) -> None: self.path = path self.module = module or '__main__' self.text = text def __repr__(self) -> str: return '' % (self.path, self.module, self.text is not None) class BuildSourceSet: """Efficiently test a file's membership in the set of build sources.""" def __init__(self, sources: List[BuildSource]) -> None: self.source_text_present = False self.source_modules = set() # type: Set[str] self.source_paths = set() # type: Set[str] for source in sources: if source.text is not None: self.source_text_present = True elif source.path: self.source_paths.add(source.path) else: self.source_modules.add(source.module) def is_source(self, file: MypyFile) -> bool: if file.path and file.path in self.source_paths: return True elif file._fullname in self.source_modules: return True elif file.path is None and self.source_text_present: return True else: return False # A dict containing saved cache data from a previous run. This will # be updated in place with newly computed cache data. See dmypy.py. SavedCache = Dict[str, Tuple['CacheMeta', MypyFile, Dict[Expression, Type]]] def build(sources: List[BuildSource], options: Options, alt_lib_path: Optional[str] = None, bin_dir: Optional[str] = None, saved_cache: Optional[SavedCache] = None, ) -> BuildResult: """Analyze a program. A single call to build performs parsing, semantic analysis and optionally type checking for the program *and* all imported modules, recursively. Return BuildResult if successful or only non-blocking errors were found; otherwise raise CompileError. Args: sources: list of sources to build options: build options alt_lib_path: an additional directory for looking up library modules (takes precedence over other directories) bin_dir: directory containing the mypy script, used for finding data directories; if omitted, use '.' as the data directory saved_cache: optional dict with saved cache state for dmypy (read-write!) """ # This seems the most reasonable place to tune garbage collection. gc.set_threshold(50000) data_dir = default_data_dir(bin_dir) find_module_clear_caches() # Determine the default module search path. lib_path = default_lib_path(data_dir, options.python_version, custom_typeshed_dir=options.custom_typeshed_dir) if options.use_builtins_fixtures: # Use stub builtins (to speed up test cases and to make them easier to # debug). This is a test-only feature, so assume our files are laid out # as in the source tree. root_dir = dirname(dirname(__file__)) lib_path.insert(0, os.path.join(root_dir, 'test-data', 'unit', 'lib-stub')) else: for source in sources: if source.path: # Include directory of the program file in the module search path. dir = remove_cwd_prefix_from_path(dirname(source.path)) if dir not in lib_path: lib_path.insert(0, dir) # Do this even if running as a file, for sanity (mainly because with # multiple builds, there could be a mix of files/modules, so its easier # to just define the semantics that we always add the current director # to the lib_path # TODO: Don't do this in some cases; for motivation see see # https://github.com/python/mypy/issues/4195#issuecomment-341915031 lib_path.insert(0, os.getcwd()) # Prepend a config-defined mypy path. lib_path[:0] = options.mypy_path # Add MYPYPATH environment variable to front of library path, if defined. lib_path[:0] = mypy_path() # If provided, insert the caller-supplied extra module path to the # beginning (highest priority) of the search path. if alt_lib_path: lib_path.insert(0, alt_lib_path) reports = Reports(data_dir, options.report_dirs) source_set = BuildSourceSet(sources) errors = Errors(options.show_error_context, options.show_column_numbers) plugin = load_plugins(options, errors) # Construct a build manager object to hold state during the build. # # Ignore current directory prefix in error messages. manager = BuildManager(data_dir, lib_path, ignore_prefix=os.getcwd(), source_set=source_set, reports=reports, options=options, version_id=__version__, plugin=plugin, errors=errors, saved_cache=saved_cache) try: graph = dispatch(sources, manager) return BuildResult(manager, graph) finally: manager.log("Build finished in %.3f seconds with %d modules, and %d errors" % (time.time() - manager.start_time, len(manager.modules), manager.errors.num_messages())) # Finish the HTML or XML reports even if CompileError was raised. reports.finish() def default_data_dir(bin_dir: Optional[str]) -> str: """Returns directory containing typeshed directory Args: bin_dir: directory containing the mypy script """ if not bin_dir: if os.name == 'nt': prefixes = [os.path.join(sys.prefix, 'Lib')] try: prefixes.append(os.path.join(site.getuserbase(), 'lib')) except AttributeError: # getuserbase in not available in virtualenvs prefixes.append(os.path.join(get_python_lib(), 'lib')) for parent in prefixes: data_dir = os.path.join(parent, 'mypy') if os.path.exists(data_dir): return data_dir mypy_package = os.path.dirname(__file__) parent = os.path.dirname(mypy_package) if (os.path.basename(parent) == 'site-packages' or os.path.basename(parent) == 'dist-packages'): # Installed in site-packages or dist-packages, but invoked with python3 -m mypy; # __file__ is .../blah/lib/python3.N/site-packages/mypy/build.py # or .../blah/lib/python3.N/dist-packages/mypy/build.py (Debian) # or .../blah/lib64/python3.N/dist-packages/mypy/build.py (Gentoo) # or .../blah/lib/site-packages/mypy/build.py (Windows) # blah may be a virtualenv or /usr/local. We want .../blah/lib/mypy. lib = parent for i in range(2): lib = os.path.dirname(lib) if os.path.basename(lib) in ('lib', 'lib32', 'lib64'): return os.path.join(os.path.dirname(lib), 'lib/mypy') subdir = os.path.join(parent, 'lib', 'mypy') if os.path.isdir(subdir): # If installed via buildout, the __file__ is # somewhere/mypy/__init__.py and what we want is # somewhere/lib/mypy. return subdir # Default to directory containing this file's parent. return parent base = os.path.basename(bin_dir) dir = os.path.dirname(bin_dir) if (sys.platform == 'win32' and base.lower() == 'scripts' and not os.path.isdir(os.path.join(dir, 'typeshed'))): # Installed, on Windows. return os.path.join(dir, 'Lib', 'mypy') elif base == 'scripts': # Assume that we have a repo check out or unpacked source tarball. return dir elif base == 'bin': # Installed to somewhere (can be under /usr/local or anywhere). return os.path.join(dir, 'lib', 'mypy') elif base == 'python3': # Assume we installed python3 with brew on os x return os.path.join(os.path.dirname(dir), 'lib', 'mypy') elif dir.endswith('python-exec'): # Gentoo uses a python wrapper in /usr/lib to which mypy is a symlink. return os.path.join(os.path.dirname(dir), 'mypy') else: # Don't know where to find the data files! raise RuntimeError("Broken installation: can't determine base dir") def mypy_path() -> List[str]: path_env = os.getenv('MYPYPATH') if not path_env: return [] return path_env.split(os.pathsep) def default_lib_path(data_dir: str, pyversion: Tuple[int, int], custom_typeshed_dir: Optional[str]) -> List[str]: """Return default standard library search paths.""" # IDEA: Make this more portable. path = [] # type: List[str] if custom_typeshed_dir: typeshed_dir = custom_typeshed_dir else: auto = os.path.join(data_dir, 'stubs-auto') if os.path.isdir(auto): data_dir = auto typeshed_dir = os.path.join(data_dir, "typeshed") if pyversion[0] == 3: # We allow a module for e.g. version 3.5 to be in 3.4/. The assumption # is that a module added with 3.4 will still be present in Python 3.5. versions = ["%d.%d" % (pyversion[0], minor) for minor in reversed(range(PYTHON3_VERSION_MIN[1], pyversion[1] + 1))] else: # For Python 2, we only have stubs for 2.7 versions = ["2.7"] # E.g. for Python 3.5, try 3.5/, 3.4/, 3.3/, 3/, 2and3/. for v in versions + [str(pyversion[0]), '2and3']: for lib_type in ['stdlib', 'third_party']: stubdir = os.path.join(typeshed_dir, lib_type, v) if os.path.isdir(stubdir): path.append(stubdir) # Add fallback path that can be used if we have a broken installation. if sys.platform != 'win32': path.append('/usr/local/lib/mypy') if not path: print("Could not resolve typeshed subdirectories. If you are using mypy\n" "from source, you need to run \"git submodule update --init\".\n" "Otherwise your mypy install is broken.\nPython executable is located at " "{0}.\nMypy located at {1}".format(sys.executable, data_dir), file=sys.stderr) sys.exit(1) return path CacheMeta = NamedTuple('CacheMeta', [('id', str), ('path', str), ('memory_only', bool), # no corresponding json files (fine-grained only) ('mtime', int), ('size', int), ('hash', str), ('dependencies', List[str]), # names of imported modules ('data_mtime', int), # mtime of data_json ('data_json', str), # path of .data.json ('suppressed', List[str]), # dependencies that weren't imported ('child_modules', List[str]), # all submodules of the given module ('options', Optional[Dict[str, object]]), # build options ('dep_prios', List[int]), ('interface_hash', str), # hash representing the public interface ('version_id', str), # mypy version for cache invalidation ('ignore_all', bool), # if errors were ignored ]) # NOTE: dependencies + suppressed == all reachable imports; # suppressed contains those reachable imports that were prevented by # silent mode or simply not found. def cache_meta_from_dict(meta: Dict[str, Any], data_json: str) -> CacheMeta: sentinel = None # type: Any # Values to be validated by the caller return CacheMeta( meta.get('id', sentinel), meta.get('path', sentinel), meta.get('memory_only', False), int(meta['mtime']) if 'mtime' in meta else sentinel, meta.get('size', sentinel), meta.get('hash', sentinel), meta.get('dependencies', []), int(meta['data_mtime']) if 'data_mtime' in meta else sentinel, data_json, meta.get('suppressed', []), meta.get('child_modules', []), meta.get('options'), meta.get('dep_prios', []), meta.get('interface_hash', ''), meta.get('version_id', sentinel), meta.get('ignore_all', True), ) # Priorities used for imports. (Here, top-level includes inside a class.) # These are used to determine a more predictable order in which the # nodes in an import cycle are processed. PRI_HIGH = 5 # top-level "from X import blah" PRI_MED = 10 # top-level "import X" PRI_LOW = 20 # either form inside a function PRI_MYPY = 25 # inside "if MYPY" or "if TYPE_CHECKING" PRI_INDIRECT = 30 # an indirect dependency PRI_ALL = 99 # include all priorities def import_priority(imp: ImportBase, toplevel_priority: int) -> int: """Compute import priority from an import node.""" if not imp.is_top_level: # Inside a function return PRI_LOW if imp.is_mypy_only: # Inside "if MYPY" or "if typing.TYPE_CHECKING" return max(PRI_MYPY, toplevel_priority) # A regular import; priority determined by argument. return toplevel_priority def load_plugins(options: Options, errors: Errors) -> Plugin: """Load all configured plugins. Return a plugin that encapsulates all plugins chained together. Always at least include the default plugin (it's last in the chain). """ default_plugin = DefaultPlugin(options) # type: Plugin if not options.config_file: return default_plugin line = find_config_file_line_number(options.config_file, 'mypy', 'plugins') if line == -1: line = 1 # We need to pick some line number that doesn't look too confusing def plugin_error(message: str) -> None: errors.report(line, 0, message) errors.raise_error() custom_plugins = [] # type: List[Plugin] errors.set_file(options.config_file, None) for plugin_path in options.plugins: # Plugin paths are relative to the config file location. plugin_path = os.path.join(os.path.dirname(options.config_file), plugin_path) if not os.path.isfile(plugin_path): plugin_error("Can't find plugin '{}'".format(plugin_path)) plugin_dir = os.path.dirname(plugin_path) fnam = os.path.basename(plugin_path) if not fnam.endswith('.py'): plugin_error("Plugin '{}' does not have a .py extension".format(fnam)) module_name = fnam[:-3] import importlib sys.path.insert(0, plugin_dir) try: m = importlib.import_module(module_name) except Exception: print('Error importing plugin {}\n'.format(plugin_path)) raise # Propagate to display traceback finally: assert sys.path[0] == plugin_dir del sys.path[0] if not hasattr(m, 'plugin'): plugin_error('Plugin \'{}\' does not define entry point function "plugin"'.format( plugin_path)) try: plugin_type = getattr(m, 'plugin')(__version__) except Exception: print('Error calling the plugin(version) entry point of {}\n'.format(plugin_path)) raise # Propagate to display traceback if not isinstance(plugin_type, type): plugin_error( 'Type object expected as the return value of "plugin"; got {!r} (in {})'.format( plugin_type, plugin_path)) if not issubclass(plugin_type, Plugin): plugin_error( 'Return value of "plugin" must be a subclass of "mypy.plugin.Plugin" ' '(in {})'.format(plugin_path)) try: custom_plugins.append(plugin_type(options)) except Exception: print('Error constructing plugin instance of {}\n'.format(plugin_type.__name__)) raise # Propagate to display traceback # Custom plugins take precedence over the default plugin. return ChainedPlugin(options, custom_plugins + [default_plugin]) def find_config_file_line_number(path: str, section: str, setting_name: str) -> int: """Return the approximate location of setting_name within mypy config file. Return -1 if can't determine the line unambiguously. """ in_desired_section = False try: results = [] with open(path) as f: for i, line in enumerate(f): line = line.strip() if line.startswith('[') and line.endswith(']'): current_section = line[1:-1].strip() in_desired_section = (current_section == section) elif in_desired_section and re.match(r'{}\s*='.format(setting_name), line): results.append(i + 1) if len(results) == 1: return results[0] except OSError: pass return -1 class BuildManager: """This class holds shared state for building a mypy program. It is used to coordinate parsing, import processing, semantic analysis and type checking. The actual build steps are carried out by dispatch(). Attributes: data_dir: Mypy data directory (contains stubs) lib_path: Library path for looking up modules modules: Mapping of module ID to MypyFile (shared by the passes) semantic_analyzer: Semantic analyzer, pass 2 semantic_analyzer_pass3: Semantic analyzer, pass 3 all_types: Map {Expression: Type} collected from all modules (tests only) options: Build options missing_modules: Set of modules that could not be imported encountered so far stale_modules: Set of modules that needed to be rechecked (only used by tests) version_id: The current mypy version (based on commit id when possible) plugin: Active mypy plugin(s) errors: Used for reporting all errors saved_cache: Dict with saved cache state for dmypy and fine-grained incremental mode (read-write!) stats: Dict with various instrumentation numbers """ def __init__(self, data_dir: str, lib_path: List[str], ignore_prefix: str, source_set: BuildSourceSet, reports: Reports, options: Options, version_id: str, plugin: Plugin, errors: Errors, saved_cache: Optional[SavedCache] = None, ) -> None: self.start_time = time.time() self.data_dir = data_dir self.errors = errors self.errors.set_ignore_prefix(ignore_prefix) self.lib_path = tuple(lib_path) self.source_set = source_set self.reports = reports self.options = options self.version_id = version_id self.modules = {} # type: Dict[str, MypyFile] self.missing_modules = set() # type: Set[str] self.plugin = plugin self.semantic_analyzer = SemanticAnalyzerPass2(self.modules, self.missing_modules, lib_path, self.errors, self.plugin) self.semantic_analyzer_pass3 = SemanticAnalyzerPass3(self.modules, self.errors, self.semantic_analyzer) self.all_types = {} # type: Dict[Expression, Type] # Used by tests only self.indirection_detector = TypeIndirectionVisitor() self.stale_modules = set() # type: Set[str] self.rechecked_modules = set() # type: Set[str] self.plugin = plugin self.saved_cache = saved_cache if saved_cache is not None else {} # type: SavedCache self.stats = {} # type: Dict[str, Any] # Values are ints or floats def maybe_swap_for_shadow_path(self, path: str) -> str: if (self.options.shadow_file and os.path.samefile(self.options.shadow_file[0], path)): path = self.options.shadow_file[1] return path def get_stat(self, path: str) -> os.stat_result: return os.stat(self.maybe_swap_for_shadow_path(path)) def all_imported_modules_in_file(self, file: MypyFile) -> List[Tuple[int, str, int]]: """Find all reachable import statements in a file. Return list of tuples (priority, module id, import line number) for all modules imported in file; lower numbers == higher priority. """ def correct_rel_imp(imp: Union[ImportFrom, ImportAll]) -> str: """Function to correct for relative imports.""" file_id = file.fullname() rel = imp.relative if rel == 0: return imp.id if os.path.basename(file.path).startswith('__init__.'): rel -= 1 if rel != 0: file_id = ".".join(file_id.split(".")[:-rel]) new_id = file_id + "." + imp.id if imp.id else file_id return new_id res = [] # type: List[Tuple[int, str, int]] for imp in file.imports: if not imp.is_unreachable: if isinstance(imp, Import): pri = import_priority(imp, PRI_MED) ancestor_pri = import_priority(imp, PRI_LOW) for id, _ in imp.ids: ancestor_parts = id.split(".")[:-1] ancestors = [] for part in ancestor_parts: ancestors.append(part) res.append((ancestor_pri, ".".join(ancestors), imp.line)) res.append((pri, id, imp.line)) elif isinstance(imp, ImportFrom): cur_id = correct_rel_imp(imp) pos = len(res) all_are_submodules = True # Also add any imported names that are submodules. pri = import_priority(imp, PRI_MED) for name, __ in imp.names: sub_id = cur_id + '.' + name if self.is_module(sub_id): res.append((pri, sub_id, imp.line)) else: all_are_submodules = False # If all imported names are submodules, don't add # cur_id as a dependency. Otherwise (i.e., if at # least one imported name isn't a submodule) # cur_id is also a dependency, and we should # insert it *before* any submodules. if not all_are_submodules: pri = import_priority(imp, PRI_HIGH) res.insert(pos, ((pri, cur_id, imp.line))) elif isinstance(imp, ImportAll): pri = import_priority(imp, PRI_HIGH) res.append((pri, correct_rel_imp(imp), imp.line)) return res def is_module(self, id: str) -> bool: """Is there a file in the file system corresponding to module id?""" return find_module(id, self.lib_path) is not None def parse_file(self, id: str, path: str, source: str, ignore_errors: bool) -> MypyFile: """Parse the source of a file with the given name. Raise CompileError if there is a parse error. """ num_errs = self.errors.num_messages() tree = parse(source, path, id, self.errors, options=self.options) tree._fullname = id self.add_stats(files_parsed=1, modules_parsed=int(not tree.is_stub), stubs_parsed=int(tree.is_stub)) if self.errors.num_messages() != num_errs: self.log("Bailing due to parse errors") self.errors.raise_error() self.errors.set_file_ignored_lines(path, tree.ignored_lines, ignore_errors) return tree def module_not_found(self, path: str, source: str, line: int, target: str) -> None: self.errors.set_file(path, source) stub_msg = "(Stub files are from https://github.com/python/typeshed)" if target == 'builtins': self.errors.report(line, 0, "Cannot find 'builtins' module. Typeshed appears broken!", blocker=True) self.errors.raise_error() elif ((self.options.python_version[0] == 2 and moduleinfo.is_py2_std_lib_module(target)) or (self.options.python_version[0] >= 3 and moduleinfo.is_py3_std_lib_module(target))): self.errors.report( line, 0, "No library stub file for standard library module '{}'".format(target)) self.errors.report(line, 0, stub_msg, severity='note', only_once=True) elif moduleinfo.is_third_party_module(target): self.errors.report(line, 0, "No library stub file for module '{}'".format(target)) self.errors.report(line, 0, stub_msg, severity='note', only_once=True) else: self.errors.report(line, 0, "Cannot find module named '{}'".format(target)) self.errors.report(line, 0, '(Perhaps setting MYPYPATH ' 'or using the "--ignore-missing-imports" flag would help)', severity='note', only_once=True) def report_file(self, file: MypyFile, type_map: Dict[Expression, Type], options: Options) -> None: if self.source_set.is_source(file): self.reports.file(file, type_map, options) def log(self, *message: str) -> None: if self.options.verbosity >= 1: if message: print('LOG: ', *message, file=sys.stderr) else: print(file=sys.stderr) sys.stderr.flush() def trace(self, *message: str) -> None: if self.options.verbosity >= 2: print('TRACE:', *message, file=sys.stderr) sys.stderr.flush() def add_stats(self, **kwds: Any) -> None: for key, value in kwds.items(): if key in self.stats: self.stats[key] += value else: self.stats[key] = value def stats_summary(self) -> Mapping[str, object]: return self.stats def remove_cwd_prefix_from_path(p: str) -> str: """Remove current working directory prefix from p, if present. Also crawl up until a directory without __init__.py is found. If the result would be empty, return '.' instead. """ cur = os.getcwd() # Add separator to the end of the path, unless one is already present. if basename(cur) != '': cur += os.sep # Compute root path. while (p and (os.path.isfile(os.path.join(p, '__init__.py')) or os.path.isfile(os.path.join(p, '__init__.pyi')))): dir, base = os.path.split(p) if not base: break p = dir # Remove current directory prefix from the path, if present. if p.startswith(cur): p = p[len(cur):] # Avoid returning an empty path; replace that with '.'. if p == '': p = '.' return p # Cache find_module: (id, lib_path) -> result. find_module_cache = {} # type: Dict[Tuple[str, Tuple[str, ...]], Optional[str]] # Cache some repeated work within distinct find_module calls: finding which # elements of lib_path have even the subdirectory they'd need for the module # to exist. This is shared among different module ids when they differ only # in the last component. find_module_dir_cache = {} # type: Dict[Tuple[str, Tuple[str, ...]], List[str]] # Cache directory listings. We assume that while one os.listdir() # call may be more expensive than one os.stat() call, a small number # of os.stat() calls is quickly more expensive than caching the # os.listdir() outcome, and the advantage of the latter is that it # gives us the case-correct filename on Windows and Mac. find_module_listdir_cache = {} # type: Dict[str, Optional[List[str]]] # Cache for is_file() find_module_is_file_cache = {} # type: Dict[str, bool] # Cache for isdir(join(head, tail)) find_module_isdir_cache = {} # type: Dict[Tuple[str, str], bool] def find_module_clear_caches() -> None: find_module_cache.clear() find_module_dir_cache.clear() find_module_listdir_cache.clear() find_module_is_file_cache.clear() find_module_isdir_cache.clear() def list_dir(path: str) -> Optional[List[str]]: """Return a cached directory listing. Returns None if the path doesn't exist or isn't a directory. """ res = find_module_listdir_cache.get(path) if res is None: try: res = os.listdir(path) except OSError: res = None find_module_listdir_cache[path] = res return res def is_file(path: str) -> bool: """Return whether path exists and is a file. On case-insensitive filesystems (like Mac or Windows) this returns False if the case of the path's last component does not exactly match the case found in the filesystem. """ res = find_module_is_file_cache.get(path) if res is None: head, tail = os.path.split(path) if not tail: res = False else: names = list_dir(head) res = names is not None and tail in names and os.path.isfile(path) find_module_is_file_cache[path] = res return res def find_module(id: str, lib_path_arg: Iterable[str]) -> Optional[str]: """Return the path of the module source file, or None if not found.""" lib_path = tuple(lib_path_arg) def find() -> Optional[str]: # If we're looking for a module like 'foo.bar.baz', it's likely that most of the # many elements of lib_path don't even have a subdirectory 'foo/bar'. Discover # that only once and cache it for when we look for modules like 'foo.bar.blah' # that will require the same subdirectory. components = id.split('.') dir_chain = os.sep.join(components[:-1]) # e.g., 'foo/bar' if (dir_chain, lib_path) not in find_module_dir_cache: dirs = [] for pathitem in lib_path: # e.g., '/usr/lib/python3.4/foo/bar' isdir = find_module_isdir_cache.get((pathitem, dir_chain)) if isdir is None: dir = os.path.normpath(os.path.join(pathitem, dir_chain)) isdir = os.path.isdir(dir) find_module_isdir_cache[pathitem, dir_chain] = isdir if isdir: dirs.append(dir) find_module_dir_cache[dir_chain, lib_path] = dirs candidate_base_dirs = find_module_dir_cache[dir_chain, lib_path] # If we're looking for a module like 'foo.bar.baz', then candidate_base_dirs now # contains just the subdirectories 'foo/bar' that actually exist under the # elements of lib_path. This is probably much shorter than lib_path itself. # Now just look for 'baz.pyi', 'baz/__init__.py', etc., inside those directories. seplast = os.sep + components[-1] # so e.g. '/baz' sepinit = os.sep + '__init__' for base_dir in candidate_base_dirs: base_path = base_dir + seplast # so e.g. '/usr/lib/python3.4/foo/bar/baz' # Prefer package over module, i.e. baz/__init__.py* over baz.py*. for extension in PYTHON_EXTENSIONS: path = base_path + sepinit + extension if is_file(path) and verify_module(id, path): return path # No package, look for module. for extension in PYTHON_EXTENSIONS: path = base_path + extension if is_file(path) and verify_module(id, path): return path return None key = (id, lib_path) if key not in find_module_cache: find_module_cache[key] = find() return find_module_cache[key] def find_modules_recursive(module: str, lib_path: List[str]) -> List[BuildSource]: module_path = find_module(module, lib_path) if not module_path: return [] result = [BuildSource(module_path, module, None)] if module_path.endswith(('__init__.py', '__init__.pyi')): # Subtle: this code prefers the .pyi over the .py if both # exists, and also prefers packages over modules if both x/ # and x.py* exist. How? We sort the directory items, so x # comes before x.py and x.pyi. But the preference for .pyi # over .py is encoded in find_module(); even though we see # x.py before x.pyi, find_module() will find x.pyi first. We # use hits to avoid adding it a second time when we see x.pyi. # This also avoids both x.py and x.pyi when x/ was seen first. hits = set() # type: Set[str] for item in sorted(os.listdir(os.path.dirname(module_path))): abs_path = os.path.join(os.path.dirname(module_path), item) if os.path.isdir(abs_path) and \ (os.path.isfile(os.path.join(abs_path, '__init__.py')) or os.path.isfile(os.path.join(abs_path, '__init__.pyi'))): hits.add(item) result += find_modules_recursive(module + '.' + item, lib_path) elif item != '__init__.py' and item != '__init__.pyi' and \ item.endswith(('.py', '.pyi')): mod = item.split('.')[0] if mod not in hits: hits.add(mod) result += find_modules_recursive( module + '.' + mod, lib_path) return result def verify_module(id: str, path: str) -> bool: """Check that all packages containing id have a __init__ file.""" if path.endswith(('__init__.py', '__init__.pyi')): path = dirname(path) for i in range(id.count('.')): path = dirname(path) if not any(is_file(os.path.join(path, '__init__{}'.format(extension))) for extension in PYTHON_EXTENSIONS): return False return True def read_with_python_encoding(path: str, pyversion: Tuple[int, int]) -> Tuple[str, str]: """Read the Python file with while obeying PEP-263 encoding detection. Returns: A tuple: the source as a string, and the hash calculated from the binary representation. """ source_bytearray = bytearray() encoding = 'utf8' if pyversion[0] >= 3 else 'ascii' with open(path, 'rb') as f: # read first two lines and check if PEP-263 coding is present source_bytearray.extend(f.readline()) source_bytearray.extend(f.readline()) m = hashlib.md5(source_bytearray) # check for BOM UTF-8 encoding and strip it out if present if source_bytearray.startswith(b'\xef\xbb\xbf'): encoding = 'utf8' source_bytearray = source_bytearray[3:] else: _encoding, _ = util.find_python_encoding(source_bytearray, pyversion) # check that the coding isn't mypy. We skip it since # registering may not have happened yet if _encoding != 'mypy': encoding = _encoding remainder = f.read() m.update(remainder) source_bytearray.extend(remainder) try: source_text = source_bytearray.decode(encoding) except LookupError as lookuperr: raise DecodeError(str(lookuperr)) return source_text, m.hexdigest() def get_cache_names(id: str, path: str, manager: BuildManager) -> Tuple[str, str]: """Return the file names for the cache files. Args: id: module ID path: module path (used to recognize packages) cache_dir: cache directory pyversion: Python version (major, minor) Returns: A tuple with the file names to be used for the meta JSON and the data JSON, respectively. """ cache_dir = manager.options.cache_dir pyversion = manager.options.python_version prefix = os.path.join(cache_dir, '%d.%d' % pyversion, *id.split('.')) is_package = os.path.basename(path).startswith('__init__.py') if is_package: prefix = os.path.join(prefix, '__init__') return (prefix + '.meta.json', prefix + '.data.json') def find_cache_meta(id: str, path: str, manager: BuildManager) -> Optional[CacheMeta]: """Find cache data for a module. Args: id: module ID path: module path manager: the build manager (for pyversion, log/trace, and build options) Returns: A CacheMeta instance if the cache data was found and appears valid; otherwise None. """ saved_cache = manager.saved_cache if id in saved_cache: m, t, types = saved_cache[id] manager.add_stats(reused_metas=1) manager.trace("Reusing saved metadata for %s" % id) # Note: it could still be skipped if the mtime/size/hash mismatches. return m # TODO: May need to take more build options into account meta_json, data_json = get_cache_names(id, path, manager) manager.trace('Looking for {} at {}'.format(id, meta_json)) try: with open(meta_json, 'r') as f: meta_str = f.read() manager.trace('Meta {} {}'.format(id, meta_str.rstrip())) meta = json.loads(meta_str) # TODO: Errors except IOError: manager.log('Could not load cache for {}: could not find {}'.format(id, meta_json)) return None if not isinstance(meta, dict): manager.log('Could not load cache for {}: meta cache is not a dict: {}' .format(id, repr(meta))) return None m = cache_meta_from_dict(meta, data_json) # Don't check for path match, that is dealt with in validate_meta(). if (m.id != id or m.mtime is None or m.size is None or m.dependencies is None or m.data_mtime is None): manager.log('Metadata abandoned for {}: attributes are missing'.format(id)) return None # Ignore cache if generated by an older mypy version. if ((m.version_id != manager.version_id and not manager.options.skip_version_check) or m.options is None or len(m.dependencies) != len(m.dep_prios)): manager.log('Metadata abandoned for {}: new attributes are missing'.format(id)) return None # Ignore cache if (relevant) options aren't the same. # Note that it's fine to mutilate cached_options since it's only used here. cached_options = m.options current_options = manager.options.clone_for_module(id).select_options_affecting_cache() if manager.options.quick_and_dirty: # In quick_and_dirty mode allow non-quick_and_dirty cache files. cached_options['quick_and_dirty'] = True if manager.options.skip_version_check: # When we're lax about version we're also lax about platform. cached_options['platform'] = current_options['platform'] if 'debug_cache' in cached_options: # Older versions included debug_cache, but it's silly to compare it. del cached_options['debug_cache'] if cached_options != current_options: manager.log('Metadata abandoned for {}: options differ'.format(id)) if manager.options.verbosity >= 2: for key in sorted(set(cached_options) | set(current_options)): if cached_options.get(key) != current_options.get(key): manager.trace(' {}: {} != {}' .format(key, cached_options.get(key), current_options.get(key))) return None manager.add_stats(fresh_metas=1) return m def random_string() -> str: return binascii.hexlify(os.urandom(8)).decode('ascii') def atomic_write(filename: str, *lines: str) -> bool: tmp_filename = filename + '.' + random_string() try: with open(tmp_filename, 'w') as f: for line in lines: f.write(line) os.replace(tmp_filename, filename) except os.error as err: return False return True def validate_meta(meta: Optional[CacheMeta], id: str, path: Optional[str], ignore_all: bool, manager: BuildManager) -> Optional[CacheMeta]: '''Checks whether the cached AST of this module can be used. Returns: None, if the cached AST is unusable. Original meta, if mtime/size matched. Meta with mtime updated to match source file, if hash/size matched but mtime/path didn't. ''' # This requires two steps. The first one is obvious: we check that the module source file # contents is the same as it was when the cache data file was created. The second one is not # too obvious: we check that the cache data file mtime has not changed; it is needed because # we use cache data file mtime to propagate information about changes in the dependencies. if meta is None: manager.log('Metadata not found for {}'.format(id)) return None if meta.ignore_all and not ignore_all: manager.log('Metadata abandoned for {}: errors were previously ignored'.format(id)) return None if meta.memory_only: # Special case for fine-grained incremental mode when the JSON file is missing but # we want to cache the module anyway. manager.log('Memory-only metadata for {}'.format(id)) return meta assert path is not None, "Internal error: meta was provided without a path" # Check data_json; assume if its mtime matches it's good. # TODO: stat() errors data_mtime = getmtime(meta.data_json) if data_mtime != meta.data_mtime: manager.log('Metadata abandoned for {}: data cache is modified'.format(id)) return None # TODO: Share stat() outcome with find_module() path = os.path.abspath(path) st = manager.get_stat(path) # TODO: Errors if not stat.S_ISREG(st.st_mode): manager.log('Metadata abandoned for {}: file {} does not exist'.format(id, path)) return None size = st.st_size if size != meta.size: manager.log('Metadata abandoned for {}: file {} has different size'.format(id, path)) return None mtime = int(st.st_mtime) if mtime != meta.mtime or path != meta.path: with open(path, 'rb') as f: source_hash = hashlib.md5(f.read()).hexdigest() if source_hash != meta.hash: manager.log('Metadata abandoned for {}: file {} has different hash'.format(id, path)) return None else: # Optimization: update mtime and path (otherwise, this mismatch will reappear). meta = meta._replace(mtime=mtime, path=path) # Construct a dict we can pass to json.dumps() (compare to write_cache()). meta_dict = { 'id': id, 'path': path, 'mtime': mtime, 'size': size, 'hash': source_hash, 'data_mtime': data_mtime, 'dependencies': meta.dependencies, 'suppressed': meta.suppressed, 'child_modules': meta.child_modules, 'options': (manager.options.clone_for_module(id) .select_options_affecting_cache()), 'dep_prios': meta.dep_prios, 'interface_hash': meta.interface_hash, 'version_id': manager.version_id, 'ignore_all': meta.ignore_all, } if manager.options.debug_cache: meta_str = json.dumps(meta_dict, indent=2, sort_keys=True) else: meta_str = json.dumps(meta_dict) meta_json, _ = get_cache_names(id, path, manager) manager.log('Updating mtime for {}: file {}, meta {}, mtime {}' .format(id, path, meta_json, meta.mtime)) atomic_write(meta_json, meta_str, '\n') # Ignore errors, it's just an optimization. return meta # It's a match on (id, path, size, hash, mtime). manager.trace('Metadata fresh for {}: file {}'.format(id, path)) return meta def compute_hash(text: str) -> str: # We use md5 instead of the builtin hash(...) function because the output of hash(...) # can differ between runs due to hash randomization (enabled by default in Python 3.3). # See the note in https://docs.python.org/3/reference/datamodel.html#object.__hash__. return hashlib.md5(text.encode('utf-8')).hexdigest() def write_cache(id: str, path: str, tree: MypyFile, dependencies: List[str], suppressed: List[str], child_modules: List[str], dep_prios: List[int], old_interface_hash: str, source_hash: str, ignore_all: bool, manager: BuildManager) -> Tuple[str, Optional[CacheMeta]]: """Write cache files for a module. Note that this mypy's behavior is still correct when any given write_cache() call is replaced with a no-op, so error handling code that bails without writing anything is okay. Args: id: module ID path: module path tree: the fully checked module data dependencies: module IDs on which this module depends suppressed: module IDs which were suppressed as dependencies dep_prios: priorities (parallel array to dependencies) old_interface_hash: the hash from the previous version of the data cache file manager: the build manager (for pyversion, log/trace) Returns: A tuple containing the interface hash and CacheMeta corresponding to the metadata that was written (the latter may be None if the cache could not be written). """ # Obtain file paths path = os.path.abspath(path) meta_json, data_json = get_cache_names(id, path, manager) manager.log('Writing {} {} {} {}'.format(id, path, meta_json, data_json)) # Make sure directory for cache files exists parent = os.path.dirname(data_json) assert os.path.dirname(meta_json) == parent # Serialize data and analyze interface data = tree.serialize() if manager.options.debug_cache: data_str = json.dumps(data, indent=2, sort_keys=True) else: data_str = json.dumps(data, sort_keys=True) interface_hash = compute_hash(data_str) # Obtain and set up metadata try: os.makedirs(parent, exist_ok=True) st = manager.get_stat(path) except OSError as err: manager.log("Cannot get stat for {}: {}".format(path, err)) # Remove apparently-invalid cache files. # (This is purely an optimization.) for filename in [data_json, meta_json]: try: os.remove(filename) except OSError: pass # Still return the interface hash we computed. return interface_hash, None # Write data cache file, if applicable if old_interface_hash == interface_hash: # If the interface is unchanged, the cached data is guaranteed # to be equivalent, and we only need to update the metadata. data_mtime = getmtime(data_json) manager.trace("Interface for {} is unchanged".format(id)) else: manager.trace("Interface for {} has changed".format(id)) if not atomic_write(data_json, data_str, '\n'): # Most likely the error is the replace() call # (see https://github.com/python/mypy/issues/3215). manager.log("Error writing data JSON file {}".format(data_json)) # Let's continue without writing the meta file. Analysis: # If the replace failed, we've changed nothing except left # behind an extraneous temporary file; if the replace # worked but the getmtime() call failed, the meta file # will be considered invalid on the next run because the # data_mtime field won't match the data file's mtime. # Both have the effect of slowing down the next run a # little bit due to an out-of-date cache file. return interface_hash, None data_mtime = getmtime(data_json) mtime = int(st.st_mtime) size = st.st_size options = manager.options.clone_for_module(id) assert source_hash is not None meta = {'id': id, 'path': path, 'mtime': mtime, 'size': size, 'hash': source_hash, 'data_mtime': data_mtime, 'dependencies': dependencies, 'suppressed': suppressed, 'child_modules': child_modules, 'options': options.select_options_affecting_cache(), 'dep_prios': dep_prios, 'interface_hash': interface_hash, 'version_id': manager.version_id, 'ignore_all': ignore_all, } # Write meta cache file if manager.options.debug_cache: meta_str = json.dumps(meta, indent=2, sort_keys=True) else: meta_str = json.dumps(meta) if not atomic_write(meta_json, meta_str, '\n'): # Most likely the error is the replace() call # (see https://github.com/python/mypy/issues/3215). # The next run will simply find the cache entry out of date. manager.log("Error writing meta JSON file {}".format(meta_json)) return interface_hash, cache_meta_from_dict(meta, data_json) def delete_cache(id: str, path: str, manager: BuildManager) -> None: """Delete cache files for a module. The cache files for a module are deleted when mypy finds errors there. This avoids inconsistent states with cache files from different mypy runs, see #4043 for an example. """ path = os.path.abspath(path) meta_json, data_json = get_cache_names(id, path, manager) manager.log('Deleting {} {} {} {}'.format(id, path, meta_json, data_json)) if id in manager.saved_cache: del manager.saved_cache[id] for filename in [data_json, meta_json]: try: os.remove(filename) except OSError as e: if e.errno != errno.ENOENT: manager.log("Error deleting cache file {}: {}".format(filename, e.strerror)) """Dependency manager. Design ====== Ideally ------- A. Collapse cycles (each SCC -- strongly connected component -- becomes one "supernode"). B. Topologically sort nodes based on dependencies. C. Process from leaves towards roots. Wrinkles -------- a. Need to parse source modules to determine dependencies. b. Processing order for modules within an SCC. c. Must order mtimes of files to decide whether to re-process; depends on clock never resetting. d. from P import M; checks filesystem whether module P.M exists in filesystem. e. Race conditions, where somebody modifies a file while we're processing. I propose not to modify the algorithm to handle this, but to detect when this could lead to inconsistencies. (For example, when we decide on the dependencies based on cache metadata, and then we decide to re-parse a file because of a stale dependency, if the re-parsing leads to a different list of dependencies we should warn the user or start over.) Steps ----- 1. For each explicitly given module find the source file location. 2. For each such module load and check the cache metadata, and decide whether it's valid. 3. Now recursively (or iteratively) find dependencies and add those to the graph: - for cached nodes use the list of dependencies from the cache metadata (this will be valid even if we later end up re-parsing the same source); - for uncached nodes parse the file and process all imports found, taking care of (a) above. Step 3 should also address (d) above. Once step 3 terminates we have the entire dependency graph, and for each module we've either loaded the cache metadata or parsed the source code. (However, we may still need to parse those modules for which we have cache metadata but that depend, directly or indirectly, on at least one module for which the cache metadata is stale.) Now we can execute steps A-C from the first section. Finding SCCs for step A shouldn't be hard; there's a recipe here: http://code.activestate.com/recipes/578507/. There's also a plethora of topsort recipes, e.g. http://code.activestate.com/recipes/577413/. For single nodes, processing is simple. If the node was cached, we deserialize the cache data and fix up cross-references. Otherwise, we do semantic analysis followed by type checking. We also handle (c) above; if a module has valid cache data *but* any of its dependencies was processed from source, then the module should be processed from source. A relatively simple optimization (outside SCCs) we might do in the future is as follows: if a node's cache data is valid, but one or more of its dependencies are out of date so we have to re-parse the node from source, once we have fully type-checked the node, we can decide whether its symbol table actually changed compared to the cache data (by reading the cache data and comparing it to the data we would be writing). If there is no change we can declare the node up to date, and any node that depends (and for which we have cached data, and whose other dependencies are up to date) on it won't need to be re-parsed from source. Import cycles ------------- Finally we have to decide how to handle (c), import cycles. Here we'll need a modified version of the original state machine (build.py), but we only need to do this per SCC, and we won't have to deal with changes to the list of nodes while we're processing it. If all nodes in the SCC have valid cache metadata and all dependencies outside the SCC are still valid, we can proceed as follows: 1. Load cache data for all nodes in the SCC. 2. Fix up cross-references for all nodes in the SCC. Otherwise, the simplest (but potentially slow) way to proceed is to invalidate all cache data in the SCC and re-parse all nodes in the SCC from source. We can do this as follows: 1. Parse source for all nodes in the SCC. 2. Semantic analysis for all nodes in the SCC. 3. Type check all nodes in the SCC. (If there are more passes the process is the same -- each pass should be done for all nodes before starting the next pass for any nodes in the SCC.) We could process the nodes in the SCC in any order. For sentimental reasons, I've decided to process them in the reverse order in which we encountered them when originally constructing the graph. That's how the old build.py deals with cycles, and at least this reproduces the previous implementation more accurately. Can we do better than re-parsing all nodes in the SCC when any of its dependencies are out of date? It's doubtful. The optimization mentioned at the end of the previous section would require re-parsing and type-checking a node and then comparing its symbol table to the cached data; but because the node is part of a cycle we can't technically type-check it until the semantic analysis of all other nodes in the cycle has completed. (This is an important issue because Dropbox has a very large cycle in production code. But I'd like to deal with it later.) Additional wrinkles ------------------- During implementation more wrinkles were found. - When a submodule of a package (e.g. x.y) is encountered, the parent package (e.g. x) must also be loaded, but it is not strictly a dependency. See State.add_ancestors() below. """ class ModuleNotFound(Exception): """Control flow exception to signal that a module was not found.""" class State: """The state for a module. The source is only used for the -c command line option; in that case path is None. Otherwise source is None and path isn't. """ manager = None # type: BuildManager order_counter = 0 # Class variable order = None # type: int # Order in which modules were encountered id = None # type: str # Fully qualified module name path = None # type: Optional[str] # Path to module source xpath = None # type: str # Path or '' source = None # type: Optional[str] # Module source code source_hash = None # type: str # Hash calculated based on the source code meta = None # type: Optional[CacheMeta] data = None # type: Optional[str] tree = None # type: Optional[MypyFile] is_from_saved_cache = False # True if the tree came from the in-memory cache dependencies = None # type: List[str] suppressed = None # type: List[str] # Suppressed/missing dependencies priorities = None # type: Dict[str, int] # Map each dependency to the line number where it is first imported dep_line_map = None # type: Dict[str, int] # Parent package, its parent, etc. ancestors = None # type: Optional[List[str]] # A list of all direct submodules of a given module child_modules = None # type: Set[str] # List of (path, line number) tuples giving context for import import_context = None # type: List[Tuple[str, int]] # The State from which this module was imported, if any caller_state = None # type: Optional[State] # If caller_state is set, the line number in the caller where the import occurred caller_line = 0 # If True, indicate that the public interface of this module is unchanged externally_same = True # Contains a hash of the public interface in incremental mode interface_hash = "" # type: str # Options, specialized for this file options = None # type: Options # Whether to ignore all errors ignore_all = False # Whether the module has an error or any of its dependencies have one. transitive_error = False # Type checker used for checking this file. Use type_checker() for # access and to construct this on demand. _type_checker = None # type: Optional[TypeChecker] def __init__(self, id: Optional[str], path: Optional[str], source: Optional[str], manager: BuildManager, caller_state: 'Optional[State]' = None, caller_line: int = 0, ancestor_for: 'Optional[State]' = None, root_source: bool = False, ) -> None: assert id or path or source is not None, "Neither id, path nor source given" self.manager = manager State.order_counter += 1 self.order = State.order_counter self.caller_state = caller_state self.caller_line = caller_line if caller_state: self.import_context = caller_state.import_context[:] self.import_context.append((caller_state.xpath, caller_line)) else: self.import_context = [] self.id = id or '__main__' self.options = manager.options.clone_for_module(self.id) self._type_checker = None if not path and source is None: assert id is not None file_id = id if id == 'builtins' and self.options.python_version[0] == 2: # The __builtin__ module is called internally by mypy # 'builtins' in Python 2 mode (similar to Python 3), # but the stub file is __builtin__.pyi. The reason is # that a lot of code hard-codes 'builtins.x' and it's # easier to work it around like this. It also means # that the implementation can mostly ignore the # difference and just assume 'builtins' everywhere, # which simplifies code. file_id = '__builtin__' path = find_module(file_id, manager.lib_path) if path: # For non-stubs, look at options.follow_imports: # - normal (default) -> fully analyze # - silent -> analyze but silence errors # - skip -> don't analyze, make the type Any follow_imports = self.options.follow_imports if (follow_imports != 'normal' and not root_source # Honor top-level modules and path.endswith('.py') # Stubs are always normal and id != 'builtins'): # Builtins is always normal if follow_imports == 'silent': # Still import it, but silence non-blocker errors. manager.log("Silencing %s (%s)" % (path, id)) self.ignore_all = True else: # In 'error' mode, produce special error messages. manager.log("Skipping %s (%s)" % (path, id)) if follow_imports == 'error': if ancestor_for: self.skipping_ancestor(id, path, ancestor_for) else: self.skipping_module(id, path) path = None manager.missing_modules.add(id) raise ModuleNotFound else: # Could not find a module. Typically the reason is a # misspelled module name, missing stub, module not in # search path or the module has not been installed. if caller_state: if not self.options.ignore_missing_imports: save_import_context = manager.errors.import_context() manager.errors.set_import_context(caller_state.import_context) manager.module_not_found(caller_state.xpath, caller_state.id, caller_line, id) manager.errors.set_import_context(save_import_context) manager.missing_modules.add(id) raise ModuleNotFound else: # If we can't find a root source it's always fatal. # TODO: This might hide non-fatal errors from # root sources processed earlier. raise CompileError(["mypy: can't find module '%s'" % id]) self.path = path self.xpath = path or '' self.source = source if path and source is None and self.options.incremental: self.meta = find_cache_meta(self.id, path, manager) # TODO: Get mtime if not cached. if self.meta is not None: self.interface_hash = self.meta.interface_hash self.add_ancestors() self.meta = validate_meta(self.meta, self.id, self.path, self.ignore_all, manager) if self.meta: # Make copies, since we may modify these and want to # compare them to the originals later. self.dependencies = list(self.meta.dependencies) self.suppressed = list(self.meta.suppressed) assert len(self.meta.dependencies) == len(self.meta.dep_prios) self.priorities = {id: pri for id, pri in zip(self.meta.dependencies, self.meta.dep_prios)} self.child_modules = set(self.meta.child_modules) self.dep_line_map = {} else: # Parse the file (and then some) to get the dependencies. self.parse_file() self.suppressed = [] self.child_modules = set() def skipping_ancestor(self, id: str, path: str, ancestor_for: 'State') -> None: # TODO: Read the path (the __init__.py file) and return # immediately if it's empty or only contains comments. # But beware, some package may be the ancestor of many modules, # so we'd need to cache the decision. manager = self.manager manager.errors.set_import_context([]) manager.errors.set_file(ancestor_for.xpath, ancestor_for.id) manager.errors.report(-1, -1, "Ancestor package '%s' ignored" % (id,), severity='note', only_once=True) manager.errors.report(-1, -1, "(Using --follow-imports=error, submodule passed on command line)", severity='note', only_once=True) def skipping_module(self, id: str, path: str) -> None: assert self.caller_state, (id, path) manager = self.manager save_import_context = manager.errors.import_context() manager.errors.set_import_context(self.caller_state.import_context) manager.errors.set_file(self.caller_state.xpath, self.caller_state.id) line = self.caller_line manager.errors.report(line, 0, "Import of '%s' ignored" % (id,), severity='note') manager.errors.report(line, 0, "(Using --follow-imports=error, module not passed on command line)", severity='note', only_once=True) manager.errors.set_import_context(save_import_context) def add_ancestors(self) -> None: if self.path is not None: _, name = os.path.split(self.path) base, _ = os.path.splitext(name) if '.' in base: # This is just a weird filename, don't add anything self.ancestors = [] return # All parent packages are new ancestors. ancestors = [] parent = self.id while '.' in parent: parent, _ = parent.rsplit('.', 1) ancestors.append(parent) self.ancestors = ancestors def is_fresh(self) -> bool: """Return whether the cache data for this file is fresh.""" # NOTE: self.dependencies may differ from # self.meta.dependencies when a dependency is dropped due to # suppression by silent mode. However when a suppressed # dependency is added back we find out later in the process. return (self.meta is not None and self.is_interface_fresh() and self.dependencies == self.meta.dependencies and self.child_modules == set(self.meta.child_modules)) def is_interface_fresh(self) -> bool: return self.externally_same def has_new_submodules(self) -> bool: """Return if this module has new submodules after being loaded from a warm cache.""" return self.meta is not None and self.child_modules != set(self.meta.child_modules) def mark_as_rechecked(self) -> None: """Marks this module as having been fully re-analyzed by the type-checker.""" self.manager.rechecked_modules.add(self.id) def mark_interface_stale(self, *, on_errors: bool = False) -> None: """Marks this module as having a stale public interface, and discards the cache data.""" self.externally_same = False if not on_errors: self.manager.stale_modules.add(self.id) def check_blockers(self) -> None: """Raise CompileError if a blocking error is detected.""" if self.manager.errors.is_blockers(): self.manager.log("Bailing due to blocking errors") self.manager.errors.raise_error() @contextlib.contextmanager def wrap_context(self) -> Iterator[None]: save_import_context = self.manager.errors.import_context() self.manager.errors.set_import_context(self.import_context) try: yield except CompileError: raise except Exception as err: report_internal_error(err, self.path, 0, self.manager.errors, self.options) self.manager.errors.set_import_context(save_import_context) self.check_blockers() # Methods for processing cached modules. def load_tree(self) -> None: assert self.meta is not None, "Internal error: this method must be called only" \ " for cached modules" with open(self.meta.data_json) as f: data = json.load(f) # TODO: Assert data file wasn't changed. self.tree = MypyFile.deserialize(data) self.manager.modules[self.id] = self.tree self.manager.add_stats(fresh_trees=1) def fix_cross_refs(self) -> None: assert self.tree is not None, "Internal error: method must be called on parsed file only" fixup_module_pass_one(self.tree, self.manager.modules, self.manager.options.quick_and_dirty) def calculate_mros(self) -> None: assert self.tree is not None, "Internal error: method must be called on parsed file only" fixup_module_pass_two(self.tree, self.manager.modules, self.manager.options.quick_and_dirty) def patch_dependency_parents(self) -> None: """ In Python, if a and a.b are both modules, running `import a.b` will modify not only the current module's namespace, but a's namespace as well -- see SemanticAnalyzerPass2.add_submodules_to_parent_modules for more details. However, this patching process can occur after `a` has been parsed and serialized during increment mode. Consequently, we need to repeat this patch when deserializing a cached file. This function should be called only when processing fresh SCCs -- the semantic analyzer will perform this patch for us when processing stale SCCs. """ for dep in self.dependencies: self.manager.semantic_analyzer.add_submodules_to_parent_modules(dep, True) def fix_suppressed_dependencies(self, graph: Graph) -> None: """Corrects whether dependencies are considered stale in silent mode. This method is a hack to correct imports in silent mode + incremental mode. In particular, the problem is that when running mypy with a cold cache, the `parse_file(...)` function is called *at the start* of the `load_graph(...)` function. Note that load_graph will mark some dependencies as suppressed if they weren't specified on the command line in silent mode. However, if the interface for a module is changed, parse_file will be called within `process_stale_scc` -- *after* load_graph is finished, wiping out the changes load_graph previously made. This method is meant to be run after parse_file finishes in process_stale_scc and will recompute what modules should be considered suppressed in silent mode. """ # TODO: See if it's possible to move this check directly into parse_file in some way. # TODO: Find a way to write a test case for this fix. silent_mode = (self.options.ignore_missing_imports or self.options.follow_imports == 'skip') if not silent_mode: return new_suppressed = [] new_dependencies = [] entry_points = self.manager.source_set.source_modules for dep in self.dependencies + self.suppressed: ignored = dep in self.suppressed and dep not in entry_points if ignored or dep not in graph: new_suppressed.append(dep) else: new_dependencies.append(dep) self.dependencies = new_dependencies self.suppressed = new_suppressed # Methods for processing modules from source code. def parse_file(self) -> None: """Parse file and run first pass of semantic analysis. Everything done here is local to the file. Don't depend on imported modules in any way. Also record module dependencies based on imports. """ if self.tree is not None: # The file was already parsed (in __init__()). return manager = self.manager modules = manager.modules manager.log("Parsing %s (%s)" % (self.xpath, self.id)) with self.wrap_context(): source = self.source self.source = None # We won't need it again. if self.path and source is None: try: path = manager.maybe_swap_for_shadow_path(self.path) source, self.source_hash = read_with_python_encoding( path, self.options.python_version) except IOError as ioerr: raise CompileError([ "mypy: can't read file '{}': {}".format(self.path, ioerr.strerror)]) except (UnicodeDecodeError, DecodeError) as decodeerr: raise CompileError([ "mypy: can't decode file '{}': {}".format(self.path, str(decodeerr))]) assert source is not None self.tree = manager.parse_file(self.id, self.xpath, source, self.ignore_all or self.options.ignore_errors) modules[self.id] = self.tree # Do the first pass of semantic analysis: add top-level # definitions in the file to the symbol table. We must do # this before processing imports, since this may mark some # import statements as unreachable. first = SemanticAnalyzerPass1(manager.semantic_analyzer) with self.wrap_context(): first.visit_file(self.tree, self.xpath, self.id, self.options) # Initialize module symbol table, which was populated by the # semantic analyzer. # TODO: Why can't SemanticAnalyzerPass1 .analyze() do this? self.tree.names = manager.semantic_analyzer.globals # Compute (direct) dependencies. # Add all direct imports (this is why we needed the first pass). # Also keep track of each dependency's source line. dependencies = [] suppressed = [] priorities = {} # type: Dict[str, int] # id -> priority dep_line_map = {} # type: Dict[str, int] # id -> line for pri, id, line in manager.all_imported_modules_in_file(self.tree): priorities[id] = min(pri, priorities.get(id, PRI_ALL)) if id == self.id: continue # Omit missing modules, as otherwise we could not type-check # programs with missing modules. if id in manager.missing_modules: if id not in dep_line_map: suppressed.append(id) dep_line_map[id] = line continue if id == '': # Must be from a relative import. manager.errors.set_file(self.xpath, self.id) manager.errors.report(line, 0, "No parent module -- cannot perform relative import", blocker=True) continue if id not in dep_line_map: dependencies.append(id) dep_line_map[id] = line # Every module implicitly depends on builtins. if self.id != 'builtins' and 'builtins' not in dep_line_map: dependencies.append('builtins') # If self.dependencies is already set, it was read from the # cache, but for some reason we're re-parsing the file. # NOTE: What to do about race conditions (like editing the # file while mypy runs)? A previous version of this code # explicitly checked for this, but ran afoul of other reasons # for differences (e.g. silent mode). self.dependencies = dependencies self.suppressed = suppressed self.priorities = priorities self.dep_line_map = dep_line_map self.check_blockers() def semantic_analysis(self) -> None: assert self.tree is not None, "Internal error: method must be called on parsed file only" patches = [] # type: List[Callable[[], None]] with self.wrap_context(): self.manager.semantic_analyzer.visit_file(self.tree, self.xpath, self.options, patches) self.patches = patches def semantic_analysis_pass_three(self) -> None: assert self.tree is not None, "Internal error: method must be called on parsed file only" patches = [] # type: List[Callable[[], None]] with self.wrap_context(): self.manager.semantic_analyzer_pass3.visit_file(self.tree, self.xpath, self.options, patches) if self.options.dump_type_stats: dump_type_stats(self.tree, self.xpath) self.patches = patches + self.patches def semantic_analysis_apply_patches(self) -> None: for patch_func in self.patches: patch_func() def type_check_first_pass(self) -> None: if self.options.semantic_analysis_only: return with self.wrap_context(): self.type_checker().check_first_pass() def type_checker(self) -> TypeChecker: if not self._type_checker: assert self.tree is not None, "Internal error: must be called on parsed file only" manager = self.manager self._type_checker = TypeChecker(manager.errors, manager.modules, self.options, self.tree, self.xpath, manager.plugin) return self._type_checker def type_map(self) -> Dict[Expression, Type]: return self.type_checker().type_map def type_check_second_pass(self) -> bool: if self.options.semantic_analysis_only: return False with self.wrap_context(): return self.type_checker().check_second_pass() def finish_passes(self) -> None: assert self.tree is not None, "Internal error: method must be called on parsed file only" manager = self.manager if self.options.semantic_analysis_only: return with self.wrap_context(): # Some tests want to look at the set of all types. if manager.options.use_builtins_fixtures or manager.options.dump_deps: manager.all_types.update(self.type_map()) if self.options.incremental: self._patch_indirect_dependencies(self.type_checker().module_refs, self.type_map()) if self.options.dump_inference_stats: dump_type_stats(self.tree, self.xpath, inferred=True, typemap=self.type_map()) manager.report_file(self.tree, self.type_map(), self.options) def _patch_indirect_dependencies(self, module_refs: Set[str], type_map: Dict[Expression, Type]) -> None: types = set(type_map.values()) assert None not in types valid = self.valid_references() encountered = self.manager.indirection_detector.find_modules(types) | module_refs extra = encountered - valid for dep in sorted(extra): if dep not in self.manager.modules: continue if dep not in self.suppressed and dep not in self.manager.missing_modules: self.dependencies.append(dep) self.priorities[dep] = PRI_INDIRECT elif dep not in self.suppressed and dep in self.manager.missing_modules: self.suppressed.append(dep) def valid_references(self) -> Set[str]: assert self.ancestors is not None valid_refs = set(self.dependencies + self.suppressed + self.ancestors) valid_refs.add(self.id) if "os" in valid_refs: valid_refs.add("os.path") return valid_refs def write_cache(self) -> None: assert self.tree is not None, "Internal error: method must be called on parsed file only" if not self.path or self.options.cache_dir == os.devnull: return if self.manager.options.quick_and_dirty: is_errors = self.manager.errors.is_errors_for_file(self.path) else: is_errors = self.transitive_error if is_errors: delete_cache(self.id, self.path, self.manager) self.meta = None self.mark_interface_stale(on_errors=True) return dep_prios = self.dependency_priorities() new_interface_hash, self.meta = write_cache( self.id, self.path, self.tree, list(self.dependencies), list(self.suppressed), list(self.child_modules), dep_prios, self.interface_hash, self.source_hash, self.ignore_all, self.manager) if new_interface_hash == self.interface_hash: self.manager.log("Cached module {} has same interface".format(self.id)) else: self.manager.log("Cached module {} has changed interface".format(self.id)) self.mark_interface_stale() self.interface_hash = new_interface_hash def dependency_priorities(self) -> List[int]: return [self.priorities.get(dep, PRI_HIGH) for dep in self.dependencies] def dispatch(sources: List[BuildSource], manager: BuildManager) -> Graph: set_orig = set(manager.saved_cache) manager.log() manager.log("Mypy version %s" % __version__) t0 = time.time() graph = load_graph(sources, manager) t1 = time.time() manager.add_stats(graph_size=len(graph), stubs_found=sum(g.path is not None and g.path.endswith('.pyi') for g in graph.values()), graph_load_time=(t1 - t0), fm_cache_size=len(find_module_cache), fm_dir_cache_size=len(find_module_dir_cache), fm_listdir_cache_size=len(find_module_listdir_cache), fm_is_file_cache_size=len(find_module_is_file_cache), fm_isdir_cache_size=len(find_module_isdir_cache), ) if not graph: print("Nothing to do?!") return graph manager.log("Loaded graph with %d nodes (%.3f sec)" % (len(graph), t1 - t0)) if manager.options.dump_graph: dump_graph(graph) return graph process_graph(graph, manager) if manager.options.warn_unused_ignores: # TODO: This could also be a per-module option. manager.errors.generate_unused_ignore_notes() updated = preserve_cache(graph) set_updated = set(updated) manager.saved_cache.clear() manager.saved_cache.update(updated) set_final = set(manager.saved_cache) # These keys have numbers in them to force a sort order. manager.add_stats(saved_cache_1orig=len(set_orig), saved_cache_2updated=len(set_updated & set_orig), saved_cache_3added=len(set_final - set_orig), saved_cache_4removed=len(set_orig - set_final), saved_cache_5final=len(set_final)) if manager.options.dump_deps: # This speeds up startup a little when not using the daemon mode. from mypy.server.deps import dump_all_dependencies dump_all_dependencies(manager.modules, manager.all_types, manager.options.python_version) return graph def preserve_cache(graph: Graph) -> SavedCache: saved_cache = {} for id, state in graph.items(): assert state.id == id if state.meta is not None and state.tree is not None: saved_cache[id] = (state.meta, state.tree, state.type_map()) return saved_cache class NodeInfo: """Some info about a node in the graph of SCCs.""" def __init__(self, index: int, scc: List[str]) -> None: self.node_id = "n%d" % index self.scc = scc self.sizes = {} # type: Dict[str, int] # mod -> size in bytes self.deps = {} # type: Dict[str, int] # node_id -> pri def dumps(self) -> str: """Convert to JSON string.""" total_size = sum(self.sizes.values()) return "[%s, %s, %s,\n %s,\n %s]" % (json.dumps(self.node_id), json.dumps(total_size), json.dumps(self.scc), json.dumps(self.sizes), json.dumps(self.deps)) def dump_graph(graph: Graph) -> None: """Dump the graph as a JSON string to stdout. This copies some of the work by process_graph() (sorted_components() and order_ascc()). """ nodes = [] sccs = sorted_components(graph) for i, ascc in enumerate(sccs): scc = order_ascc(graph, ascc) node = NodeInfo(i, scc) nodes.append(node) inv_nodes = {} # module -> node_id for node in nodes: for mod in node.scc: inv_nodes[mod] = node.node_id for node in nodes: for mod in node.scc: state = graph[mod] size = 0 if state.path: try: size = os.path.getsize(state.path) except os.error: pass node.sizes[mod] = size for dep in state.dependencies: if dep in state.priorities: pri = state.priorities[dep] if dep in inv_nodes: dep_id = inv_nodes[dep] if (dep_id != node.node_id and (dep_id not in node.deps or pri < node.deps[dep_id])): node.deps[dep_id] = pri print("[" + ",\n ".join(node.dumps() for node in nodes) + "\n]") def load_graph(sources: List[BuildSource], manager: BuildManager) -> Graph: """Given some source files, load the full dependency graph. As this may need to parse files, this can raise CompileError in case there are syntax errors. """ graph = {} # type: Graph # The deque is used to implement breadth-first traversal. # TODO: Consider whether to go depth-first instead. This may # affect the order in which we process files within import cycles. new = collections.deque() # type: Deque[State] entry_points = set() # type: Set[str] # Seed the graph with the initial root sources. for bs in sources: try: st = State(id=bs.module, path=bs.path, source=bs.text, manager=manager, root_source=True) except ModuleNotFound: continue if st.id in graph: manager.errors.set_file(st.xpath, st.id) manager.errors.report(-1, -1, "Duplicate module named '%s'" % st.id) manager.errors.raise_error() graph[st.id] = st new.append(st) entry_points.add(bs.module) # Collect dependencies. We go breadth-first. while new: st = new.popleft() assert st.ancestors is not None # Strip out indirect dependencies. These will be dealt with # when they show up as direct dependencies, and there's a # scenario where they hurt: # - Suppose A imports B and B imports C. # - Suppose on the next round: # - C is deleted; # - B is updated to remove the dependency on C; # - A is unchanged. # - In this case A's cached *direct* dependencies are still valid # (since direct dependencies reflect the imports found in the source) # but A's cached *indirect* dependency on C is wrong. dependencies = [dep for dep in st.dependencies if st.priorities.get(dep) != PRI_INDIRECT] for dep in st.ancestors + dependencies + st.suppressed: # We don't want to recheck imports marked with '# type: ignore' # so we ignore any suppressed module not explicitly re-included # from the command line. ignored = dep in st.suppressed and dep not in entry_points if ignored: manager.missing_modules.add(dep) elif dep not in graph: try: if dep in st.ancestors: # TODO: Why not 'if dep not in st.dependencies' ? # Ancestors don't have import context. newst = State(id=dep, path=None, source=None, manager=manager, ancestor_for=st) else: newst = State(id=dep, path=None, source=None, manager=manager, caller_state=st, caller_line=st.dep_line_map.get(dep, 1)) except ModuleNotFound: if dep in st.dependencies: st.dependencies.remove(dep) st.suppressed.append(dep) else: assert newst.id not in graph, newst.id graph[newst.id] = newst new.append(newst) if dep in st.ancestors and dep in graph: graph[dep].child_modules.add(st.id) if dep in graph and dep in st.suppressed: # Previously suppressed file is now visible if dep in st.suppressed: st.suppressed.remove(dep) st.dependencies.append(dep) for id, g in graph.items(): if g.has_new_submodules(): g.parse_file() g.fix_suppressed_dependencies(graph) g.mark_interface_stale() return graph class FreshState(State): meta = None # type: CacheMeta def process_graph(graph: Graph, manager: BuildManager) -> None: """Process everything in dependency order.""" sccs = sorted_components(graph) manager.log("Found %d SCCs; largest has %d nodes" % (len(sccs), max(len(scc) for scc in sccs))) fresh_scc_queue = [] # type: List[List[str]] # We're processing SCCs from leaves (those without further # dependencies) to roots (those from which everything else can be # reached). for ascc in sccs: # Order the SCC's nodes using a heuristic. # Note that ascc is a set, and scc is a list. scc = order_ascc(graph, ascc) # If builtins is in the list, move it last. (This is a bit of # a hack, but it's necessary because the builtins module is # part of a small cycle involving at least {builtins, abc, # typing}. Of these, builtins must be processed last or else # some builtin objects will be incompletely processed.) if 'builtins' in ascc: scc.remove('builtins') scc.append('builtins') if manager.options.verbosity >= 2: for id in scc: manager.trace("Priorities for %s:" % id, " ".join("%s:%d" % (x, graph[id].priorities[x]) for x in graph[id].dependencies if x in ascc and x in graph[id].priorities)) # Because the SCCs are presented in topological sort order, we # don't need to look at dependencies recursively for staleness # -- the immediate dependencies are sufficient. stale_scc = {id for id in scc if not graph[id].is_fresh()} fresh = not stale_scc deps = set() for id in scc: deps.update(graph[id].dependencies) deps -= ascc stale_deps = {id for id in deps if id in graph and not graph[id].is_interface_fresh()} if not manager.options.quick_and_dirty: fresh = fresh and not stale_deps undeps = set() if fresh: # Check if any dependencies that were suppressed according # to the cache have heen added back in this run. # NOTE: Newly suppressed dependencies are handled by is_fresh(). for id in scc: undeps.update(graph[id].suppressed) undeps &= graph.keys() if undeps: fresh = False if fresh: # All cache files are fresh. Check that no dependency's # cache file is newer than any scc node's cache file. fresh_graph = cast(Dict[str, FreshState], graph) oldest_in_scc = min(fresh_graph[id].meta.data_mtime for id in scc) viable = {id for id in stale_deps if graph[id].meta is not None} newest_in_deps = 0 if not viable else max(fresh_graph[dep].meta.data_mtime for dep in viable) if manager.options.verbosity >= 3: # Dump all mtimes for extreme debugging. all_ids = sorted(ascc | viable, key=lambda id: fresh_graph[id].meta.data_mtime) for id in all_ids: if id in scc: if fresh_graph[id].meta.data_mtime < newest_in_deps: key = "*id:" else: key = "id:" else: if fresh_graph[id].meta.data_mtime > oldest_in_scc: key = "+dep:" else: key = "dep:" manager.trace(" %5s %.0f %s" % (key, fresh_graph[id].meta.data_mtime, id)) # If equal, give the benefit of the doubt, due to 1-sec time granularity # (on some platforms). if manager.options.quick_and_dirty and stale_deps: fresh_msg = "fresh(ish)" elif oldest_in_scc < newest_in_deps: fresh = False fresh_msg = "out of date by %.0f seconds" % (newest_in_deps - oldest_in_scc) else: fresh_msg = "fresh" elif undeps: fresh_msg = "stale due to changed suppression (%s)" % " ".join(sorted(undeps)) elif stale_scc: fresh_msg = "inherently stale" if stale_scc != ascc: fresh_msg += " (%s)" % " ".join(sorted(stale_scc)) if stale_deps: fresh_msg += " with stale deps (%s)" % " ".join(sorted(stale_deps)) else: fresh_msg = "stale due to deps (%s)" % " ".join(sorted(stale_deps)) # Initialize transitive_error for all SCC members from union # of transitive_error of dependencies. if any(graph[dep].transitive_error for dep in deps if dep in graph): for id in scc: graph[id].transitive_error = True scc_str = " ".join(scc) if fresh: if not maybe_reuse_in_memory_tree(graph, scc, manager): manager.trace("Queuing %s SCC (%s)" % (fresh_msg, scc_str)) fresh_scc_queue.append(scc) else: if len(fresh_scc_queue) > 0: manager.log("Processing {} queued fresh SCCs".format(len(fresh_scc_queue))) # Defer processing fresh SCCs until we actually run into a stale SCC # and need the earlier modules to be loaded. # # Note that `process_graph` may end with us not having processed every # single fresh SCC. This is intentional -- we don't need those modules # loaded if there are no more stale SCCs to be rechecked. # # Also note we shouldn't have to worry about transitive_error here, # since modules with transitive errors aren't written to the cache, # and if any dependencies were changed, this SCC would be stale. # (Also, in quick_and_dirty mode we don't care about transitive errors.) # # TODO: see if it's possible to determine if we need to process only a # _subset_ of the past SCCs instead of having to process them all. for prev_scc in fresh_scc_queue: process_fresh_scc(graph, prev_scc, manager) fresh_scc_queue = [] size = len(scc) if size == 1: manager.log("Processing SCC singleton (%s) as %s" % (scc_str, fresh_msg)) else: manager.log("Processing SCC of size %d (%s) as %s" % (size, scc_str, fresh_msg)) process_stale_scc(graph, scc, manager) sccs_left = len(fresh_scc_queue) nodes_left = sum(len(scc) for scc in fresh_scc_queue) manager.add_stats(sccs_left=sccs_left, nodes_left=nodes_left) if sccs_left: manager.log("{} fresh SCCs ({} nodes) left in queue (and will remain unprocessed)" .format(sccs_left, nodes_left)) manager.trace(str(fresh_scc_queue)) else: manager.log("No fresh SCCs left in queue") def order_ascc(graph: Graph, ascc: AbstractSet[str], pri_max: int = PRI_ALL) -> List[str]: """Come up with the ideal processing order within an SCC. Using the priorities assigned by all_imported_modules_in_file(), try to reduce the cycle to a DAG, by omitting arcs representing dependencies of lower priority. In the simplest case, if we have A <--> B where A has a top-level "import B" (medium priority) but B only has the reverse "import A" inside a function (low priority), we turn the cycle into a DAG by dropping the B --> A arc, which leaves only A --> B. If all arcs have the same priority, we fall back to sorting by reverse global order (the order in which modules were first encountered). The algorithm is recursive, as follows: when as arcs of different priorities are present, drop all arcs of the lowest priority, identify SCCs in the resulting graph, and apply the algorithm to each SCC thus found. The recursion is bounded because at each recursion the spread in priorities is (at least) one less. In practice there are only a few priority levels (less than a dozen) and in the worst case we just carry out the same algorithm for finding SCCs N times. Thus the complexity is no worse than the complexity of the original SCC-finding algorithm -- see strongly_connected_components() below for a reference. """ if len(ascc) == 1: return [s for s in ascc] pri_spread = set() for id in ascc: state = graph[id] for dep in state.dependencies: if dep in ascc: pri = state.priorities.get(dep, PRI_HIGH) if pri < pri_max: pri_spread.add(pri) if len(pri_spread) == 1: # Filtered dependencies are uniform -- order by global order. return sorted(ascc, key=lambda id: -graph[id].order) pri_max = max(pri_spread) sccs = sorted_components(graph, ascc, pri_max) # The recursion is bounded by the len(pri_spread) check above. return [s for ss in sccs for s in order_ascc(graph, ss, pri_max)] def process_fresh_scc(graph: Graph, scc: List[str], manager: BuildManager) -> None: """Process the modules in one SCC from their cached data. This involves loading the tree from JSON and then doing various cleanups. If the tree is loaded from memory ('saved_cache') it's even quicker. """ for id in scc: graph[id].load_tree() for id in scc: graph[id].fix_cross_refs() for id in scc: graph[id].calculate_mros() for id in scc: graph[id].patch_dependency_parents() def maybe_reuse_in_memory_tree(graph: Graph, scc: List[str], manager: BuildManager) -> bool: """Set the trees for the given SCC from the in-memory cache, if all valid. If any saved tree for this SCC is invalid, set the trees for all SCC members to None and mark as not-from-cache. """ if not can_reuse_in_memory_tree(graph, scc, manager): for id in scc: manager.add_stats(cleared_trees=1) manager.trace("Clearing tree %s" % id) st = graph[id] st.tree = None st.is_from_saved_cache = False if id in manager.modules: del manager.modules[id] return False trees = {id: manager.saved_cache[id][1] for id in scc} for id, tree in trees.items(): manager.add_stats(reused_trees=1) manager.trace("Reusing saved tree %s" % id) st = graph[id] st.tree = tree st.is_from_saved_cache = True manager.modules[id] = tree # Delete any submodules from the module that aren't # dependencies of the module; they will be re-added once # imported. It's possible that the parent module is reused # but a submodule isn't; we don't want to accidentally link # into the old submodule's tree. See also # patch_dependency_parents() above. The exception for subname # in st.dependencies handles the case where 'import m' # guarantees that some submodule of m is also available # (e.g. 'os.path'); in those cases the submodule is an # explicit dependency of the parent. for name in list(tree.names): sym = tree.names[name] subname = id + '.' + name if (sym.kind == MODULE_REF and sym.node is not None and sym.node.fullname() == subname and subname not in st.dependencies): manager.trace("Purging %s" % subname) del tree.names[name] return True def can_reuse_in_memory_tree(graph: Graph, scc: List[str], manager: BuildManager) -> bool: """Check whether the given SCC can safely reuse the trees from saved_cache. Assumes the SCC is already considered fresh. """ saved_cache = manager.saved_cache # Check that all nodes are available for loading from memory. if all(id in saved_cache for id in scc): # Check that all dependencies were loaded from memory. # If not, some dependency was reparsed but the interface hash # wasn't changed -- in that case we can't reuse the tree. # TODO: Pass deps in from process_graph(), via maybe_reuse_in_memory_tree()? deps = set(dep for id in scc for dep in graph[id].dependencies if dep in graph) deps -= set(scc) # Subtract the SCC itself (else nothing will be safe) if all(graph[dep].is_from_saved_cache for dep in deps): return True return False def process_stale_scc(graph: Graph, scc: List[str], manager: BuildManager) -> None: """Process the modules in one SCC from source code. Exception: If quick_and_dirty is set, use the cache for fresh modules. """ if manager.options.quick_and_dirty: fresh = [id for id in scc if graph[id].is_fresh()] fresh_set = set(fresh) # To avoid running into O(N**2) stale = [id for id in scc if id not in fresh_set] if fresh: manager.log(" Fresh ids: %s" % (", ".join(fresh))) if stale: manager.log(" Stale ids: %s" % (", ".join(stale))) else: fresh = [] stale = scc for id in fresh: graph[id].load_tree() for id in stale: # We may already have parsed the module, or not. # If the former, parse_file() is a no-op. graph[id].parse_file() graph[id].fix_suppressed_dependencies(graph) for id in fresh: graph[id].fix_cross_refs() for id in stale: graph[id].semantic_analysis() for id in stale: graph[id].semantic_analysis_pass_three() for id in fresh: graph[id].calculate_mros() for id in stale: graph[id].semantic_analysis_apply_patches() for id in stale: graph[id].type_check_first_pass() more = True while more: more = False for id in stale: if graph[id].type_check_second_pass(): more = True if any(manager.errors.is_errors_for_file(graph[id].xpath) for id in stale): for id in stale: graph[id].transitive_error = True for id in stale: graph[id].finish_passes() graph[id].write_cache() graph[id].mark_as_rechecked() def sorted_components(graph: Graph, vertices: Optional[AbstractSet[str]] = None, pri_max: int = PRI_ALL) -> List[AbstractSet[str]]: """Return the graph's SCCs, topologically sorted by dependencies. The sort order is from leaves (nodes without dependencies) to roots (nodes on which no other nodes depend). This works for a subset of the full dependency graph too; dependencies that aren't present in graph.keys() are ignored. """ # Compute SCCs. if vertices is None: vertices = set(graph) edges = {id: deps_filtered(graph, vertices, id, pri_max) for id in vertices} sccs = list(strongly_connected_components(vertices, edges)) # Topsort. sccsmap = {id: frozenset(scc) for scc in sccs for id in scc} data = {} # type: Dict[AbstractSet[str], Set[AbstractSet[str]]] for scc in sccs: deps = set() # type: Set[AbstractSet[str]] for id in scc: deps.update(sccsmap[x] for x in deps_filtered(graph, vertices, id, pri_max)) data[frozenset(scc)] = deps res = [] for ready in topsort(data): # Sort the sets in ready by reversed smallest State.order. Examples: # # - If ready is [{x}, {y}], x.order == 1, y.order == 2, we get # [{y}, {x}]. # # - If ready is [{a, b}, {c, d}], a.order == 1, b.order == 3, # c.order == 2, d.order == 4, the sort keys become [1, 2] # and the result is [{c, d}, {a, b}]. res.extend(sorted(ready, key=lambda scc: -min(graph[id].order for id in scc))) return res def deps_filtered(graph: Graph, vertices: AbstractSet[str], id: str, pri_max: int) -> List[str]: """Filter dependencies for id with pri < pri_max.""" if id not in vertices: return [] state = graph[id] return [dep for dep in state.dependencies if dep in vertices and state.priorities.get(dep, PRI_HIGH) < pri_max] def strongly_connected_components(vertices: AbstractSet[str], edges: Dict[str, List[str]]) -> Iterator[Set[str]]: """Compute Strongly Connected Components of a directed graph. Args: vertices: the labels for the vertices edges: for each vertex, gives the target vertices of its outgoing edges Returns: An iterator yielding strongly connected components, each represented as a set of vertices. Each input vertex will occur exactly once; vertices not part of a SCC are returned as singleton sets. From http://code.activestate.com/recipes/578507/. """ identified = set() # type: Set[str] stack = [] # type: List[str] index = {} # type: Dict[str, int] boundaries = [] # type: List[int] def dfs(v: str) -> Iterator[Set[str]]: index[v] = len(stack) stack.append(v) boundaries.append(index[v]) for w in edges[v]: if w not in index: # For Python >= 3.3, replace with "yield from dfs(w)" for scc in dfs(w): yield scc elif w not in identified: while index[w] < boundaries[-1]: boundaries.pop() if boundaries[-1] == index[v]: boundaries.pop() scc = set(stack[index[v]:]) del stack[index[v]:] identified.update(scc) yield scc for v in vertices: if v not in index: # For Python >= 3.3, replace with "yield from dfs(v)" for scc in dfs(v): yield scc def topsort(data: Dict[AbstractSet[str], Set[AbstractSet[str]]]) -> Iterable[Set[AbstractSet[str]]]: """Topological sort. Args: data: A map from SCCs (represented as frozen sets of strings) to sets of SCCs, its dependencies. NOTE: This data structure is modified in place -- for normalization purposes, self-dependencies are removed and entries representing orphans are added. Returns: An iterator yielding sets of SCCs that have an equivalent ordering. NOTE: The algorithm doesn't care about the internal structure of SCCs. Example: Suppose the input has the following structure: {A: {B, C}, B: {D}, C: {D}} This is normalized to: {A: {B, C}, B: {D}, C: {D}, D: {}} The algorithm will yield the following values: {D} {B, C} {A} From http://code.activestate.com/recipes/577413/. """ # TODO: Use a faster algorithm? for k, v in data.items(): v.discard(k) # Ignore self dependencies. for item in set.union(*data.values()) - set(data.keys()): data[item] = set() while True: ready = {item for item, dep in data.items() if not dep} if not ready: break yield ready data = {item: (dep - ready) for item, dep in data.items() if item not in ready} assert not data, "A cyclic dependency exists amongst %r" % data mypy-0.560/mypy/checker.py0000644€tŠÔÚ€2›s®0000051270613215007206021657 0ustar jukkaDROPBOX\Domain Users00000000000000"""Mypy type checker.""" import itertools import fnmatch from contextlib import contextmanager import sys from typing import ( Dict, Set, List, cast, Tuple, TypeVar, Union, Optional, NamedTuple, Iterator ) from mypy.errors import Errors, report_internal_error from mypy.nodes import ( SymbolTable, Statement, MypyFile, Var, Expression, Lvalue, OverloadedFuncDef, FuncDef, FuncItem, FuncBase, TypeInfo, ClassDef, GDEF, Block, AssignmentStmt, NameExpr, MemberExpr, IndexExpr, TupleExpr, ListExpr, ExpressionStmt, ReturnStmt, IfStmt, WhileStmt, OperatorAssignmentStmt, WithStmt, AssertStmt, RaiseStmt, TryStmt, ForStmt, DelStmt, CallExpr, IntExpr, StrExpr, BytesExpr, UnicodeExpr, FloatExpr, OpExpr, UnaryExpr, CastExpr, RevealTypeExpr, SuperExpr, TypeApplication, DictExpr, SliceExpr, LambdaExpr, TempNode, SymbolTableNode, Context, ListComprehension, ConditionalExpr, GeneratorExpr, Decorator, SetExpr, TypeVarExpr, NewTypeExpr, PrintStmt, LITERAL_TYPE, BreakStmt, PassStmt, ContinueStmt, ComparisonExpr, StarExpr, YieldFromExpr, NamedTupleExpr, TypedDictExpr, SetComprehension, DictionaryComprehension, ComplexExpr, EllipsisExpr, TypeAliasExpr, RefExpr, YieldExpr, BackquoteExpr, Import, ImportFrom, ImportAll, ImportBase, AwaitExpr, PromoteExpr, Node, EnumCallExpr, ARG_POS, MDEF, CONTRAVARIANT, COVARIANT, INVARIANT) from mypy import nodes from mypy.literals import literal, literal_hash from mypy.typeanal import has_any_from_unimported_type, check_for_explicit_any from mypy.types import ( Type, AnyType, CallableType, FunctionLike, Overloaded, TupleType, TypedDictType, Instance, NoneTyp, strip_type, TypeType, TypeOfAny, UnionType, TypeVarId, TypeVarType, PartialType, DeletedType, UninhabitedType, TypeVarDef, true_only, false_only, function_type, is_named_instance, union_items ) from mypy.sametypes import is_same_type, is_same_types from mypy.messages import MessageBuilder, make_inferred_type_note import mypy.checkexpr from mypy.checkmember import map_type_from_supertype, bind_self, erase_to_bound from mypy import messages from mypy.subtypes import ( is_subtype, is_equivalent, is_proper_subtype, is_more_precise, restrict_subtype_away, is_subtype_ignoring_tvars, is_callable_subtype, unify_generic_callable, find_member ) from mypy.maptype import map_instance_to_supertype from mypy.typevars import fill_typevars, has_no_typevars from mypy.semanal import set_callable_name, refers_to_fullname from mypy.erasetype import erase_typevars from mypy.expandtype import expand_type, expand_type_by_instance from mypy.visitor import NodeVisitor from mypy.join import join_types from mypy.treetransform import TransformVisitor from mypy.binder import ConditionalTypeBinder, get_declaration from mypy.meet import is_overlapping_types from mypy.options import Options from mypy.plugin import Plugin, CheckerPluginInterface from mypy import experiments T = TypeVar('T') LAST_PASS = 1 # Pass numbers start at 0 # A node which is postponed to be processed during the next pass. # This is used for both batch mode and fine-grained incremental mode. DeferredNode = NamedTuple( 'DeferredNode', [ # In batch mode only FuncDef and LambdaExpr are supported ('node', Union[FuncDef, LambdaExpr, MypyFile]), ('context_type_name', Optional[str]), # Name of the surrounding class (for error messages) ('active_typeinfo', Optional[TypeInfo]), # And its TypeInfo (for semantic analysis # self type handling) ]) class TypeChecker(NodeVisitor[None], CheckerPluginInterface): """Mypy type checker. Type check mypy source files that have been semantically analyzed. You must create a separate instance for each source file. """ # Are we type checking a stub? is_stub = False # Error message reporter errors = None # type: Errors # Utility for generating messages msg = None # type: MessageBuilder # Types of type checked nodes type_map = None # type: Dict[Expression, Type] # Helper for managing conditional types binder = None # type: ConditionalTypeBinder # Helper for type checking expressions expr_checker = None # type: mypy.checkexpr.ExpressionChecker scope = None # type: Scope # Stack of function return types return_types = None # type: List[Type] # Flags; true for dynamically typed functions dynamic_funcs = None # type: List[bool] # Stack of collections of variables with partial types partial_types = None # type: List[Dict[Var, Context]] # Vars for which partial type errors are already reported # (to avoid logically duplicate errors with different error context). partial_reported = None # type: Set[Var] globals = None # type: SymbolTable modules = None # type: Dict[str, MypyFile] # Nodes that couldn't be checked because some types weren't available. We'll run # another pass and try these again. deferred_nodes = None # type: List[DeferredNode] # Type checking pass number (0 = first pass) pass_num = 0 # Have we deferred the current function? If yes, don't infer additional # types during this pass within the function. current_node_deferred = False # Is this file a typeshed stub? is_typeshed_stub = False # Should strict Optional-related errors be suppressed in this file? suppress_none_errors = False # TODO: Get it from options instead options = None # type: Options # Used for collecting inferred attribute types so that they can be checked # for consistency. inferred_attribute_types = None # type: Optional[Dict[Var, Type]] # Don't infer partial None types if we are processing assignment from Union no_partial_types = False # type: bool # The set of all dependencies (suppressed or not) that this module accesses, either # directly or indirectly. module_refs = None # type: Set[str] # Plugin that provides special type checking rules for specific library # functions such as open(), etc. plugin = None # type: Plugin def __init__(self, errors: Errors, modules: Dict[str, MypyFile], options: Options, tree: MypyFile, path: str, plugin: Plugin) -> None: """Construct a type checker. Use errors to report type check errors. """ self.errors = errors self.modules = modules self.options = options self.tree = tree self.path = path self.msg = MessageBuilder(errors, modules) self.plugin = plugin self.expr_checker = mypy.checkexpr.ExpressionChecker(self, self.msg, self.plugin) self.scope = Scope(tree) self.binder = ConditionalTypeBinder() self.globals = tree.names self.return_types = [] self.dynamic_funcs = [] self.partial_types = [] self.partial_reported = set() self.deferred_nodes = [] self.type_map = {} self.module_refs = set() self.pass_num = 0 self.current_node_deferred = False self.is_stub = tree.is_stub self.is_typeshed_stub = errors.is_typeshed_file(path) self.inferred_attribute_types = None if options.strict_optional_whitelist is None: self.suppress_none_errors = not options.show_none_errors else: self.suppress_none_errors = not any(fnmatch.fnmatch(path, pattern) for pattern in options.strict_optional_whitelist) def check_first_pass(self) -> None: """Type check the entire file, but defer functions with unresolved references. Unresolved references are forward references to variables whose types haven't been inferred yet. They may occur later in the same file or in a different file that's being processed later (usually due to an import cycle). Deferred functions will be processed by check_second_pass(). """ with experiments.strict_optional_set(self.options.strict_optional): self.errors.set_file(self.path, self.tree.fullname()) with self.enter_partial_types(): with self.binder.top_frame_context(): for d in self.tree.defs: self.accept(d) assert not self.current_node_deferred all_ = self.globals.get('__all__') if all_ is not None and all_.type is not None: all_node = all_.node assert all_node is not None seq_str = self.named_generic_type('typing.Sequence', [self.named_type('builtins.str')]) if self.options.python_version[0] < 3: seq_str = self.named_generic_type('typing.Sequence', [self.named_type('builtins.unicode')]) if not is_subtype(all_.type, seq_str): str_seq_s, all_s = self.msg.format_distinctly(seq_str, all_.type) self.fail(messages.ALL_MUST_BE_SEQ_STR.format(str_seq_s, all_s), all_node) def check_second_pass(self, todo: Optional[List[DeferredNode]] = None) -> bool: """Run second or following pass of type checking. This goes through deferred nodes, returning True if there were any. """ with experiments.strict_optional_set(self.options.strict_optional): if not todo and not self.deferred_nodes: return False self.errors.set_file(self.path, self.tree.fullname()) self.pass_num += 1 if not todo: todo = self.deferred_nodes else: assert not self.deferred_nodes self.deferred_nodes = [] done = set() # type: Set[Union[FuncDef, LambdaExpr, MypyFile]] for node, type_name, active_typeinfo in todo: if node in done: continue # This is useful for debugging: # print("XXX in pass %d, class %s, function %s" % # (self.pass_num, type_name, node.fullname() or node.name())) done.add(node) with self.errors.enter_type(type_name) if type_name else nothing(): with self.scope.push_class(active_typeinfo) if active_typeinfo else nothing(): self.check_partial(node) return True def check_partial(self, node: Union[FuncDef, LambdaExpr, MypyFile]) -> None: if isinstance(node, MypyFile): self.check_top_level(node) elif isinstance(node, LambdaExpr): self.expr_checker.accept(node) else: self.accept(node) def check_top_level(self, node: MypyFile) -> None: """Check only the top-level of a module, skipping function definitions.""" with self.enter_partial_types(): with self.binder.top_frame_context(): for d in node.defs: # TODO: Type check class bodies. if not isinstance(d, (FuncDef, ClassDef)): d.accept(self) assert not self.current_node_deferred # TODO: Handle __all__ def handle_cannot_determine_type(self, name: str, context: Context) -> None: node = self.scope.top_non_lambda_function() if self.pass_num < LAST_PASS and isinstance(node, FuncDef): # Don't report an error yet. Just defer. Note that we don't defer # lambdas because they are coupled to the surrounding function # through the binder and the inferred type of the lambda, so it # would get messy. if self.errors.type_name: type_name = self.errors.type_name[-1] else: type_name = None # Shouldn't we freeze the entire scope? enclosing_class = self.scope.enclosing_class() self.deferred_nodes.append(DeferredNode(node, type_name, enclosing_class)) # Set a marker so that we won't infer additional types in this # function. Any inferred types could be bogus, because there's at # least one type that we don't know. self.current_node_deferred = True else: self.msg.cannot_determine_type(name, context) def accept(self, stmt: Statement) -> None: """Type check a node in the given type context.""" try: stmt.accept(self) except Exception as err: report_internal_error(err, self.errors.file, stmt.line, self.errors, self.options) def accept_loop(self, body: Statement, else_body: Optional[Statement] = None, *, exit_condition: Optional[Expression] = None) -> None: """Repeatedly type check a loop body until the frame doesn't change. If exit_condition is set, assume it must be False on exit from the loop. Then check the else_body. """ # The outer frame accumulates the results of all iterations with self.binder.frame_context(can_skip=False): while True: with self.binder.frame_context(can_skip=True, break_frame=2, continue_frame=1): self.accept(body) if not self.binder.last_pop_changed: break if exit_condition: _, else_map = self.find_isinstance_check(exit_condition) self.push_type_map(else_map) if else_body: self.accept(else_body) # # Definitions # def visit_overloaded_func_def(self, defn: OverloadedFuncDef) -> None: num_abstract = 0 if not defn.items: # In this case we have already complained about none of these being # valid overloads. return None if len(defn.items) == 1: self.fail('Single overload definition, multiple required', defn) if defn.is_property: # HACK: Infer the type of the property. self.visit_decorator(cast(Decorator, defn.items[0])) for fdef in defn.items: assert isinstance(fdef, Decorator) self.check_func_item(fdef.func, name=fdef.func.name()) if fdef.func.is_abstract: num_abstract += 1 if num_abstract not in (0, len(defn.items)): self.fail(messages.INCONSISTENT_ABSTRACT_OVERLOAD, defn) if defn.impl: defn.impl.accept(self) if defn.info: self.check_method_override(defn) self.check_inplace_operator_method(defn) self.check_overlapping_overloads(defn) return None def check_overlapping_overloads(self, defn: OverloadedFuncDef) -> None: # At this point we should have set the impl already, and all remaining # items are decorators for i, item in enumerate(defn.items): assert isinstance(item, Decorator) sig1 = self.function_type(item.func) for j, item2 in enumerate(defn.items[i + 1:]): # TODO overloads involving decorators assert isinstance(item2, Decorator) sig2 = self.function_type(item2.func) if is_unsafe_overlapping_signatures(sig1, sig2): self.msg.overloaded_signatures_overlap(i + 1, i + j + 2, item.func) if defn.impl: if isinstance(defn.impl, FuncDef): impl_type = defn.impl.type elif isinstance(defn.impl, Decorator): impl_type = defn.impl.var.type else: assert False, "Impl isn't the right type" # This can happen if we've got an overload with a different # decorator too -- we gave up on the types. if impl_type is None or isinstance(impl_type, AnyType) or sig1 is None: return assert isinstance(impl_type, CallableType) assert isinstance(sig1, CallableType) if not is_callable_subtype(impl_type, sig1, ignore_return=True): self.msg.overloaded_signatures_arg_specific(i + 1, defn.impl) impl_type_subst = impl_type if impl_type.variables: unified = unify_generic_callable(impl_type, sig1, ignore_return=False) if unified is None: self.fail("Type variable mismatch between " + "overload signature {} and implementation".format(i + 1), defn.impl) return impl_type_subst = unified if not is_subtype(sig1.ret_type, impl_type_subst.ret_type): self.msg.overloaded_signatures_ret_specific(i + 1, defn.impl) # Here's the scoop about generators and coroutines. # # There are two kinds of generators: classic generators (functions # with `yield` or `yield from` in the body) and coroutines # (functions declared with `async def`). The latter are specified # in PEP 492 and only available in Python >= 3.5. # # Classic generators can be parameterized with three types: # - ty is the Yield type (the type of y in `yield y`) # - tc is the type reCeived by yield (the type of c in `c = yield`). # - tr is the Return type (the type of r in `return r`) # # A classic generator must define a return type that's either # `Generator[ty, tc, tr]`, Iterator[ty], or Iterable[ty] (or # object or Any). If tc/tr are not given, both are None. # # A coroutine must define a return type corresponding to tr; the # other two are unconstrained. The "external" return type (seen # by the caller) is Awaitable[tr]. # # In addition, there's the synthetic type AwaitableGenerator: it # inherits from both Awaitable and Generator and can be used both # in `yield from` and in `await`. This type is set automatically # for functions decorated with `@types.coroutine` or # `@asyncio.coroutine`. Its single parameter corresponds to tr. # # PEP 525 adds a new type, the asynchronous generator, which was # first released in Python 3.6. Async generators are `async def` # functions that can also `yield` values. They can be parameterized # with two types, ty and tc, because they cannot return a value. # # There are several useful methods, each taking a type t and a # flag c indicating whether it's for a generator or coroutine: # # - is_generator_return_type(t, c) returns whether t is a Generator, # Iterator, Iterable (if not c), or Awaitable (if c), or # AwaitableGenerator (regardless of c). # - is_async_generator_return_type(t) returns whether t is an # AsyncGenerator. # - get_generator_yield_type(t, c) returns ty. # - get_generator_receive_type(t, c) returns tc. # - get_generator_return_type(t, c) returns tr. def is_generator_return_type(self, typ: Type, is_coroutine: bool) -> bool: """Is `typ` a valid type for a generator/coroutine? True if `typ` is a *supertype* of Generator or Awaitable. Also true it it's *exactly* AwaitableGenerator (modulo type parameters). """ if is_coroutine: # This means we're in Python 3.5 or later. at = self.named_generic_type('typing.Awaitable', [AnyType(TypeOfAny.special_form)]) if is_subtype(at, typ): return True else: any_type = AnyType(TypeOfAny.special_form) gt = self.named_generic_type('typing.Generator', [any_type, any_type, any_type]) if is_subtype(gt, typ): return True return isinstance(typ, Instance) and typ.type.fullname() == 'typing.AwaitableGenerator' def is_async_generator_return_type(self, typ: Type) -> bool: """Is `typ` a valid type for an async generator? True if `typ` is a supertype of AsyncGenerator. """ try: any_type = AnyType(TypeOfAny.special_form) agt = self.named_generic_type('typing.AsyncGenerator', [any_type, any_type]) except KeyError: # we're running on a version of typing that doesn't have AsyncGenerator yet return False return is_subtype(agt, typ) def get_generator_yield_type(self, return_type: Type, is_coroutine: bool) -> Type: """Given the declared return type of a generator (t), return the type it yields (ty).""" if isinstance(return_type, AnyType): return AnyType(TypeOfAny.from_another_any, source_any=return_type) elif (not self.is_generator_return_type(return_type, is_coroutine) and not self.is_async_generator_return_type(return_type)): # If the function doesn't have a proper Generator (or # Awaitable) return type, anything is permissible. return AnyType(TypeOfAny.from_error) elif not isinstance(return_type, Instance): # Same as above, but written as a separate branch so the typechecker can understand. return AnyType(TypeOfAny.from_error) elif return_type.type.fullname() == 'typing.Awaitable': # Awaitable: ty is Any. return AnyType(TypeOfAny.special_form) elif return_type.args: # AwaitableGenerator, Generator, AsyncGenerator, Iterator, or Iterable; ty is args[0]. ret_type = return_type.args[0] # TODO not best fix, better have dedicated yield token return ret_type else: # If the function's declared supertype of Generator has no type # parameters (i.e. is `object`), then the yielded values can't # be accessed so any type is acceptable. IOW, ty is Any. # (However, see https://github.com/python/mypy/issues/1933) return AnyType(TypeOfAny.special_form) def get_generator_receive_type(self, return_type: Type, is_coroutine: bool) -> Type: """Given a declared generator return type (t), return the type its yield receives (tc).""" if isinstance(return_type, AnyType): return AnyType(TypeOfAny.from_another_any, source_any=return_type) elif (not self.is_generator_return_type(return_type, is_coroutine) and not self.is_async_generator_return_type(return_type)): # If the function doesn't have a proper Generator (or # Awaitable) return type, anything is permissible. return AnyType(TypeOfAny.from_error) elif not isinstance(return_type, Instance): # Same as above, but written as a separate branch so the typechecker can understand. return AnyType(TypeOfAny.from_error) elif return_type.type.fullname() == 'typing.Awaitable': # Awaitable, AwaitableGenerator: tc is Any. return AnyType(TypeOfAny.special_form) elif (return_type.type.fullname() in ('typing.Generator', 'typing.AwaitableGenerator') and len(return_type.args) >= 3): # Generator: tc is args[1]. return return_type.args[1] elif return_type.type.fullname() == 'typing.AsyncGenerator' and len(return_type.args) >= 2: return return_type.args[1] else: # `return_type` is a supertype of Generator, so callers won't be able to send it # values. IOW, tc is None. return NoneTyp() def get_generator_return_type(self, return_type: Type, is_coroutine: bool) -> Type: """Given the declared return type of a generator (t), return the type it returns (tr).""" if isinstance(return_type, AnyType): return AnyType(TypeOfAny.from_another_any, source_any=return_type) elif not self.is_generator_return_type(return_type, is_coroutine): # If the function doesn't have a proper Generator (or # Awaitable) return type, anything is permissible. return AnyType(TypeOfAny.from_error) elif not isinstance(return_type, Instance): # Same as above, but written as a separate branch so the typechecker can understand. return AnyType(TypeOfAny.from_error) elif return_type.type.fullname() == 'typing.Awaitable' and len(return_type.args) == 1: # Awaitable: tr is args[0]. return return_type.args[0] elif (return_type.type.fullname() in ('typing.Generator', 'typing.AwaitableGenerator') and len(return_type.args) >= 3): # AwaitableGenerator, Generator: tr is args[2]. return return_type.args[2] else: # Supertype of Generator (Iterator, Iterable, object): tr is any. return AnyType(TypeOfAny.special_form) def visit_func_def(self, defn: FuncDef) -> None: """Type check a function definition.""" self.check_func_item(defn, name=defn.name()) if defn.info: if not defn.is_dynamic(): self.check_method_override(defn) self.check_inplace_operator_method(defn) if defn.original_def: # Override previous definition. new_type = self.function_type(defn) if isinstance(defn.original_def, FuncDef): # Function definition overrides function definition. if not is_same_type(new_type, self.function_type(defn.original_def)): self.msg.incompatible_conditional_function_def(defn) else: # Function definition overrides a variable initialized via assignment or a # decorated function. orig_type = defn.original_def.type if orig_type is None: # XXX This can be None, as happens in # test_testcheck_TypeCheckSuite.testRedefinedFunctionInTryWithElse self.msg.note("Internal mypy error checking function redefinition", defn) return if isinstance(orig_type, PartialType): if orig_type.type is None: # Ah this is a partial type. Give it the type of the function. orig_def = defn.original_def if isinstance(orig_def, Decorator): var = orig_def.var else: var = orig_def partial_types = self.find_partial_types(var) if partial_types is not None: var.type = new_type del partial_types[var] else: # Trying to redefine something like partial empty list as function. self.fail(messages.INCOMPATIBLE_REDEFINITION, defn) else: # TODO: Update conditional type binder. self.check_subtype(new_type, orig_type, defn, messages.INCOMPATIBLE_REDEFINITION, 'redefinition with type', 'original type') def check_func_item(self, defn: FuncItem, type_override: Optional[CallableType] = None, name: Optional[str] = None) -> None: """Type check a function. If type_override is provided, use it as the function type. """ # We may be checking a function definition or an anonymous function. In # the first case, set up another reference with the precise type. fdef = None # type: Optional[FuncDef] if isinstance(defn, FuncDef): fdef = defn self.dynamic_funcs.append(defn.is_dynamic() and not type_override) with self.errors.enter_function(fdef.name()) if fdef else nothing(): with self.enter_partial_types(): typ = self.function_type(defn) if type_override: typ = type_override.copy_modified(line=typ.line, column=typ.column) if isinstance(typ, CallableType): with self.enter_attribute_inference_context(): self.check_func_def(defn, typ, name) else: raise RuntimeError('Not supported') self.dynamic_funcs.pop() self.current_node_deferred = False @contextmanager def enter_attribute_inference_context(self) -> Iterator[None]: old_types = self.inferred_attribute_types self.inferred_attribute_types = {} yield None self.inferred_attribute_types = old_types def check_func_def(self, defn: FuncItem, typ: CallableType, name: Optional[str]) -> None: """Type check a function definition.""" # Expand type variables with value restrictions to ordinary types. for item, typ in self.expand_typevars(defn, typ): old_binder = self.binder self.binder = ConditionalTypeBinder() with self.binder.top_frame_context(): defn.expanded.append(item) # We may be checking a function definition or an anonymous # function. In the first case, set up another reference with the # precise type. if isinstance(item, FuncDef): fdef = item # Check if __init__ has an invalid, non-None return type. if (fdef.info and fdef.name() in ('__init__', '__init_subclass__') and not isinstance(typ.ret_type, NoneTyp) and not self.dynamic_funcs[-1]): self.fail(messages.MUST_HAVE_NONE_RETURN_TYPE.format(fdef.name()), item) self.check_for_missing_annotations(fdef) if self.options.disallow_any_unimported: if fdef.type and isinstance(fdef.type, CallableType): ret_type = fdef.type.ret_type if has_any_from_unimported_type(ret_type): self.msg.unimported_type_becomes_any("Return type", ret_type, fdef) for idx, arg_type in enumerate(fdef.type.arg_types): if has_any_from_unimported_type(arg_type): prefix = "Argument {} to \"{}\"".format(idx + 1, fdef.name()) self.msg.unimported_type_becomes_any(prefix, arg_type, fdef) check_for_explicit_any(fdef.type, self.options, self.is_typeshed_stub, self.msg, context=fdef) if name: # Special method names if name in nodes.reverse_op_method_set: self.check_reverse_op_method(item, typ, name, defn) elif name in ('__getattr__', '__getattribute__'): self.check_getattr_method(typ, defn, name) elif name == '__setattr__': self.check_setattr_method(typ, defn) # Refuse contravariant return type variable if isinstance(typ.ret_type, TypeVarType): if typ.ret_type.variance == CONTRAVARIANT: self.fail(messages.RETURN_TYPE_CANNOT_BE_CONTRAVARIANT, typ.ret_type) # Check that Generator functions have the appropriate return type. if defn.is_generator: if defn.is_async_generator: if not self.is_async_generator_return_type(typ.ret_type): self.fail(messages.INVALID_RETURN_TYPE_FOR_ASYNC_GENERATOR, typ) else: if not self.is_generator_return_type(typ.ret_type, defn.is_coroutine): self.fail(messages.INVALID_RETURN_TYPE_FOR_GENERATOR, typ) # Python 2 generators aren't allowed to return values. if (self.options.python_version[0] == 2 and isinstance(typ.ret_type, Instance) and typ.ret_type.type.fullname() == 'typing.Generator'): if not isinstance(typ.ret_type.args[2], (NoneTyp, AnyType)): self.fail(messages.INVALID_GENERATOR_RETURN_ITEM_TYPE, typ) # Fix the type if decorated with `@types.coroutine` or `@asyncio.coroutine`. if defn.is_awaitable_coroutine: # Update the return type to AwaitableGenerator. # (This doesn't exist in typing.py, only in typing.pyi.) t = typ.ret_type c = defn.is_coroutine ty = self.get_generator_yield_type(t, c) tc = self.get_generator_receive_type(t, c) tr = self.get_generator_return_type(t, c) ret_type = self.named_generic_type('typing.AwaitableGenerator', [ty, tc, tr, t]) typ = typ.copy_modified(ret_type=ret_type) defn.type = typ # Push return type. self.return_types.append(typ.ret_type) # Store argument types. for i in range(len(typ.arg_types)): arg_type = typ.arg_types[i] ref_type = self.scope.active_self_type() # type: Optional[Type] if (isinstance(defn, FuncDef) and ref_type is not None and i == 0 and not defn.is_static and typ.arg_kinds[0] not in [nodes.ARG_STAR, nodes.ARG_STAR2]): isclass = defn.is_class or defn.name() in ('__new__', '__init_subclass__') if isclass: ref_type = mypy.types.TypeType.make_normalized(ref_type) erased = erase_to_bound(arg_type) if not is_subtype_ignoring_tvars(ref_type, erased): note = None if typ.arg_names[i] in ['self', 'cls']: if (self.options.python_version[0] < 3 and is_same_type(erased, arg_type) and not isclass): msg = ("Invalid type for self, or extra argument type " "in function annotation") note = '(Hint: typically annotations omit the type for self)' else: msg = ("The erased type of self '{}' " "is not a supertype of its class '{}'" ).format(erased, ref_type) else: msg = ("Self argument missing for a non-static method " "(or an invalid type for self)") self.fail(msg, defn) if note: self.note(note, defn) if defn.is_class and isinstance(arg_type, CallableType): arg_type.is_classmethod_class = True elif isinstance(arg_type, TypeVarType): # Refuse covariant parameter type variables # TODO: check recursively for inner type variables if ( arg_type.variance == COVARIANT and defn.name() not in ('__init__', '__new__') ): ctx = arg_type # type: Context if ctx.line < 0: ctx = typ self.fail(messages.FUNCTION_PARAMETER_CANNOT_BE_COVARIANT, ctx) if typ.arg_kinds[i] == nodes.ARG_STAR: # builtins.tuple[T] is typing.Tuple[T, ...] arg_type = self.named_generic_type('builtins.tuple', [arg_type]) elif typ.arg_kinds[i] == nodes.ARG_STAR2: arg_type = self.named_generic_type('builtins.dict', [self.str_type(), arg_type]) item.arguments[i].variable.type = arg_type # Type check initialization expressions. for arg in item.arguments: if arg.initializer is not None: name = arg.variable.name() msg = 'Incompatible default for ' if name.startswith('__tuple_arg_'): msg += "tuple argument {}".format(name[12:]) else: msg += 'argument "{}"'.format(name) self.check_simple_assignment(arg.variable.type, arg.initializer, context=arg, msg=msg, lvalue_name='argument', rvalue_name='default') # Type check body in a new scope. with self.binder.top_frame_context(): with self.scope.push_function(defn): self.accept(item.body) unreachable = self.binder.is_unreachable() if (self.options.warn_no_return and not unreachable): if (defn.is_generator or is_named_instance(self.return_types[-1], 'typing.AwaitableGenerator')): return_type = self.get_generator_return_type(self.return_types[-1], defn.is_coroutine) else: return_type = self.return_types[-1] if (not isinstance(return_type, (NoneTyp, AnyType)) and not self.is_trivial_body(defn.body)): # Control flow fell off the end of a function that was # declared to return a non-None type and is not # entirely pass/Ellipsis. if isinstance(return_type, UninhabitedType): # This is a NoReturn function self.msg.note(messages.INVALID_IMPLICIT_RETURN, defn) else: self.msg.fail(messages.MISSING_RETURN_STATEMENT, defn) self.return_types.pop() self.binder = old_binder def check_for_missing_annotations(self, fdef: FuncItem) -> None: # Check for functions with unspecified/not fully specified types. def is_unannotated_any(t: Type) -> bool: return isinstance(t, AnyType) and t.type_of_any == TypeOfAny.unannotated has_explicit_annotation = (isinstance(fdef.type, CallableType) and any(not is_unannotated_any(t) for t in fdef.type.arg_types + [fdef.type.ret_type])) show_untyped = not self.is_typeshed_stub or self.options.warn_incomplete_stub check_incomplete_defs = self.options.disallow_incomplete_defs and has_explicit_annotation if show_untyped and (self.options.disallow_untyped_defs or check_incomplete_defs): if fdef.type is None and self.options.disallow_untyped_defs: self.fail(messages.FUNCTION_TYPE_EXPECTED, fdef) elif isinstance(fdef.type, CallableType): ret_type = fdef.type.ret_type if is_unannotated_any(ret_type): self.fail(messages.RETURN_TYPE_EXPECTED, fdef) elif (fdef.is_coroutine and isinstance(ret_type, Instance) and is_unannotated_any(ret_type.args[0])): self.fail(messages.RETURN_TYPE_EXPECTED, fdef) if any(is_unannotated_any(t) for t in fdef.type.arg_types): self.fail(messages.ARGUMENT_TYPE_EXPECTED, fdef) def is_trivial_body(self, block: Block) -> bool: body = block.body # Skip a docstring if (isinstance(body[0], ExpressionStmt) and isinstance(body[0].expr, (StrExpr, UnicodeExpr))): body = block.body[1:] if len(body) == 0: # There's only a docstring. return True elif len(body) > 1: return False stmt = body[0] return (isinstance(stmt, PassStmt) or (isinstance(stmt, ExpressionStmt) and isinstance(stmt.expr, EllipsisExpr))) def check_reverse_op_method(self, defn: FuncItem, typ: CallableType, method: str, context: Context) -> None: """Check a reverse operator method such as __radd__.""" # This used to check for some very obscure scenario. It now # just decides whether it's worth calling # check_overlapping_op_methods(). # First check for a valid signature method_type = CallableType([AnyType(TypeOfAny.special_form), AnyType(TypeOfAny.special_form)], [nodes.ARG_POS, nodes.ARG_POS], [None, None], AnyType(TypeOfAny.special_form), self.named_type('builtins.function')) if not is_subtype(typ, method_type): self.msg.invalid_signature(typ, context) return if method in ('__eq__', '__ne__'): # These are defined for all objects => can't cause trouble. return # With 'Any' or 'object' return type we are happy, since any possible # return value is valid. ret_type = typ.ret_type if isinstance(ret_type, AnyType): return if isinstance(ret_type, Instance): if ret_type.type.fullname() == 'builtins.object': return if len(typ.arg_types) == 2: # TODO check self argument kind # Check for the issue described above. arg_type = typ.arg_types[1] other_method = nodes.normal_from_reverse_op[method] if isinstance(arg_type, Instance): if not arg_type.type.has_readable_member(other_method): return elif isinstance(arg_type, AnyType): return elif isinstance(arg_type, UnionType): if not arg_type.has_readable_member(other_method): return else: return typ2 = self.expr_checker.analyze_external_member_access( other_method, arg_type, defn) self.check_overlapping_op_methods( typ, method, defn.info, typ2, other_method, cast(Instance, arg_type), defn) def check_overlapping_op_methods(self, reverse_type: CallableType, reverse_name: str, reverse_class: TypeInfo, forward_type: Type, forward_name: str, forward_base: Instance, context: Context) -> None: """Check for overlapping method and reverse method signatures. Assume reverse method has valid argument count and kinds. """ # Reverse operator method that overlaps unsafely with the # forward operator method can result in type unsafety. This is # similar to overlapping overload variants. # # This example illustrates the issue: # # class X: pass # class A: # def __add__(self, x: X) -> int: # if isinstance(x, X): # return 1 # return NotImplemented # class B: # def __radd__(self, x: A) -> str: return 'x' # class C(X, B): pass # def f(b: B) -> None: # A() + b # Result is 1, even though static type seems to be str! # f(C()) # # The reason for the problem is that B and X are overlapping # types, and the return types are different. Also, if the type # of x in __radd__ would not be A, the methods could be # non-overlapping. for forward_item in union_items(forward_type): if isinstance(forward_item, CallableType): # TODO check argument kinds if len(forward_item.arg_types) < 1: # Not a valid operator method -- can't succeed anyway. return # Construct normalized function signatures corresponding to the # operator methods. The first argument is the left operand and the # second operand is the right argument -- we switch the order of # the arguments of the reverse method. forward_tweaked = CallableType( [forward_base, forward_item.arg_types[0]], [nodes.ARG_POS] * 2, [None] * 2, forward_item.ret_type, forward_item.fallback, name=forward_item.name) reverse_args = reverse_type.arg_types reverse_tweaked = CallableType( [reverse_args[1], reverse_args[0]], [nodes.ARG_POS] * 2, [None] * 2, reverse_type.ret_type, fallback=self.named_type('builtins.function'), name=reverse_type.name) if is_unsafe_overlapping_signatures(forward_tweaked, reverse_tweaked): self.msg.operator_method_signatures_overlap( reverse_class.name(), reverse_name, forward_base.type.name(), forward_name, context) elif isinstance(forward_item, Overloaded): for item in forward_item.items(): self.check_overlapping_op_methods( reverse_type, reverse_name, reverse_class, item, forward_name, forward_base, context) elif not isinstance(forward_item, AnyType): self.msg.forward_operator_not_callable(forward_name, context) def check_inplace_operator_method(self, defn: FuncBase) -> None: """Check an inplace operator method such as __iadd__. They cannot arbitrarily overlap with __add__. """ method = defn.name() if method not in nodes.inplace_operator_methods: return typ = bind_self(self.function_type(defn)) cls = defn.info other_method = '__' + method[3:] if cls.has_readable_member(other_method): instance = fill_typevars(cls) typ2 = self.expr_checker.analyze_external_member_access( other_method, instance, defn) fail = False if isinstance(typ2, FunctionLike): if not is_more_general_arg_prefix(typ, typ2): fail = True else: # TODO overloads fail = True if fail: self.msg.signatures_incompatible(method, other_method, defn) def check_getattr_method(self, typ: CallableType, context: Context, name: str) -> None: if len(self.scope.stack) == 1: # module-level __getattr__ if name == '__getattribute__': self.msg.fail('__getattribute__ is not valid at the module level', context) return elif name == '__getattr__' and not self.is_stub: self.msg.fail('__getattr__ is not valid at the module level outside a stub file', context) return method_type = CallableType([self.named_type('builtins.str')], [nodes.ARG_POS], [None], AnyType(TypeOfAny.special_form), self.named_type('builtins.function')) else: method_type = CallableType([AnyType(TypeOfAny.special_form), self.named_type('builtins.str')], [nodes.ARG_POS, nodes.ARG_POS], [None, None], AnyType(TypeOfAny.special_form), self.named_type('builtins.function')) if not is_subtype(typ, method_type): self.msg.invalid_signature(typ, context) def check_setattr_method(self, typ: CallableType, context: Context) -> None: method_type = CallableType([AnyType(TypeOfAny.special_form), self.named_type('builtins.str'), AnyType(TypeOfAny.special_form)], [nodes.ARG_POS, nodes.ARG_POS, nodes.ARG_POS], [None, None, None], NoneTyp(), self.named_type('builtins.function')) if not is_subtype(typ, method_type): self.msg.invalid_signature(typ, context) def expand_typevars(self, defn: FuncItem, typ: CallableType) -> List[Tuple[FuncItem, CallableType]]: # TODO use generator subst = [] # type: List[List[Tuple[TypeVarId, Type]]] tvars = typ.variables or [] tvars = tvars[:] if defn.info: # Class type variables tvars += defn.info.defn.type_vars or [] for tvar in tvars: if tvar.values: subst.append([(tvar.id, value) for value in tvar.values]) if subst: result = [] # type: List[Tuple[FuncItem, CallableType]] for substitutions in itertools.product(*subst): mapping = dict(substitutions) expanded = cast(CallableType, expand_type(typ, mapping)) result.append((expand_func(defn, mapping), expanded)) return result else: return [(defn, typ)] def check_method_override(self, defn: Union[FuncBase, Decorator]) -> None: """Check if function definition is compatible with base classes.""" # Check against definitions in base classes. for base in defn.info.mro[1:]: self.check_method_or_accessor_override_for_base(defn, base) def check_method_or_accessor_override_for_base(self, defn: Union[FuncBase, Decorator], base: TypeInfo) -> None: """Check if method definition is compatible with a base class.""" if base: name = defn.name() if name not in ('__init__', '__new__', '__init_subclass__'): # Check method override # (__init__, __new__, __init_subclass__ are special). self.check_method_override_for_base_with_name(defn, name, base) if name in nodes.inplace_operator_methods: # Figure out the name of the corresponding operator method. method = '__' + name[3:] # An inplace operator method such as __iadd__ might not be # always introduced safely if a base class defined __add__. # TODO can't come up with an example where this is # necessary; now it's "just in case" self.check_method_override_for_base_with_name(defn, method, base) def check_method_override_for_base_with_name( self, defn: Union[FuncBase, Decorator], name: str, base: TypeInfo) -> None: base_attr = base.names.get(name) if base_attr: # The name of the method is defined in the base class. # Point errors at the 'def' line (important for backward compatibility # of type ignores). if not isinstance(defn, Decorator): context = defn else: context = defn.func # Construct the type of the overriding method. if isinstance(defn, FuncBase): typ = self.function_type(defn) # type: Type else: assert defn.var.is_ready assert defn.var.type is not None typ = defn.var.type if isinstance(typ, FunctionLike) and not is_static(context): typ = bind_self(typ, self.scope.active_self_type()) # Map the overridden method type to subtype context so that # it can be checked for compatibility. original_type = base_attr.type if original_type is None: if isinstance(base_attr.node, FuncDef): original_type = self.function_type(base_attr.node) elif isinstance(base_attr.node, Decorator): original_type = self.function_type(base_attr.node.func) else: assert False, str(base_attr.node) if isinstance(original_type, AnyType) or isinstance(typ, AnyType): pass elif isinstance(original_type, FunctionLike) and isinstance(typ, FunctionLike): if (isinstance(base_attr.node, (FuncBase, Decorator)) and not is_static(base_attr.node)): bound = bind_self(original_type, self.scope.active_self_type()) else: bound = original_type original = map_type_from_supertype(bound, defn.info, base) # Check that the types are compatible. # TODO overloaded signatures self.check_override(typ, cast(FunctionLike, original), defn.name(), name, base.name(), context) elif is_equivalent(original_type, typ): # Assume invariance for a non-callable attribute here. Note # that this doesn't affect read-only properties which can have # covariant overrides. # # TODO: Allow covariance for read-only attributes? pass else: self.msg.signature_incompatible_with_supertype( defn.name(), name, base.name(), context) def check_override(self, override: FunctionLike, original: FunctionLike, name: str, name_in_super: str, supertype: str, node: Context) -> None: """Check a method override with given signatures. Arguments: override: The signature of the overriding method. original: The signature of the original supertype method. name: The name of the subtype. This and the next argument are only used for generating error messages. supertype: The name of the supertype. """ # Use boolean variable to clarify code. fail = False if not is_subtype(override, original, ignore_pos_arg_names=True): fail = True elif (not isinstance(original, Overloaded) and isinstance(override, Overloaded) and name in nodes.reverse_op_methods.keys()): # Operator method overrides cannot introduce overloading, as # this could be unsafe with reverse operator methods. fail = True if isinstance(original, CallableType) and isinstance(override, CallableType): if (isinstance(original.definition, FuncItem) and isinstance(override.definition, FuncItem)): if ((original.definition.is_static or original.definition.is_class) and not (override.definition.is_static or override.definition.is_class)): fail = True if fail: emitted_msg = False if (isinstance(override, CallableType) and isinstance(original, CallableType) and len(override.arg_types) == len(original.arg_types) and override.min_args == original.min_args): # Give more detailed messages for the common case of both # signatures having the same number of arguments and no # overloads. # override might have its own generic function type # variables. If an argument or return type of override # does not have the correct subtyping relationship # with the original type even after these variables # are erased, then it is definitely an incompatibility. override_ids = override.type_var_ids() def erase_override(t: Type) -> Type: return erase_typevars(t, ids_to_erase=override_ids) for i in range(len(override.arg_types)): if not is_subtype(original.arg_types[i], erase_override(override.arg_types[i])): self.msg.argument_incompatible_with_supertype( i + 1, name, name_in_super, supertype, node) emitted_msg = True if not is_subtype(erase_override(override.ret_type), original.ret_type): self.msg.return_type_incompatible_with_supertype( name, name_in_super, supertype, node) emitted_msg = True if not emitted_msg: # Fall back to generic incompatibility message. self.msg.signature_incompatible_with_supertype( name, name_in_super, supertype, node) def visit_class_def(self, defn: ClassDef) -> None: """Type check a class definition.""" typ = defn.info if typ.is_protocol and typ.defn.type_vars: self.check_protocol_variance(defn) with self.errors.enter_type(defn.name), self.enter_partial_types(): old_binder = self.binder self.binder = ConditionalTypeBinder() with self.binder.top_frame_context(): with self.scope.push_class(defn.info): self.accept(defn.defs) self.binder = old_binder if not defn.has_incompatible_baseclass: # Otherwise we've already found errors; more errors are not useful self.check_multiple_inheritance(typ) def check_protocol_variance(self, defn: ClassDef) -> None: """Check that protocol definition is compatible with declared variances of type variables. Note that we also prohibit declaring protocol classes as invariant if they are actually covariant/contravariant, since this may break transitivity of subtyping, see PEP 544. """ info = defn.info object_type = Instance(info.mro[-1], []) tvars = info.defn.type_vars for i, tvar in enumerate(tvars): up_args = [object_type if i == j else AnyType(TypeOfAny.special_form) for j, _ in enumerate(tvars)] down_args = [UninhabitedType() if i == j else AnyType(TypeOfAny.special_form) for j, _ in enumerate(tvars)] up, down = Instance(info, up_args), Instance(info, down_args) # TODO: add advanced variance checks for recursive protocols if is_subtype(down, up, ignore_declared_variance=True): expected = COVARIANT elif is_subtype(up, down, ignore_declared_variance=True): expected = CONTRAVARIANT else: expected = INVARIANT if expected != tvar.variance: self.msg.bad_proto_variance(tvar.variance, tvar.name, expected, defn) def check_multiple_inheritance(self, typ: TypeInfo) -> None: """Check for multiple inheritance related errors.""" if len(typ.bases) <= 1: # No multiple inheritance. return # Verify that inherited attributes are compatible. mro = typ.mro[1:] for i, base in enumerate(mro): for name in base.names: for base2 in mro[i + 1:]: # We only need to check compatibility of attributes from classes not # in a subclass relationship. For subclasses, normal (single inheritance) # checks suffice (these are implemented elsewhere). if name in base2.names and base2 not in base.mro: self.check_compatibility(name, base, base2, typ) def check_compatibility(self, name: str, base1: TypeInfo, base2: TypeInfo, ctx: Context) -> None: """Check if attribute name in base1 is compatible with base2 in multiple inheritance. Assume base1 comes before base2 in the MRO, and that base1 and base2 don't have a direct subclass relationship (i.e., the compatibility requirement only derives from multiple inheritance). """ if name == '__init__': # __init__ can be incompatible -- it's a special case. return first = base1[name] second = base2[name] first_type = first.type if first_type is None and isinstance(first.node, FuncDef): first_type = self.function_type(first.node) second_type = second.type if second_type is None and isinstance(second.node, FuncDef): second_type = self.function_type(second.node) # TODO: What if some classes are generic? if (isinstance(first_type, FunctionLike) and isinstance(second_type, FunctionLike)): # Method override first_sig = bind_self(first_type) second_sig = bind_self(second_type) ok = is_subtype(first_sig, second_sig, ignore_pos_arg_names=True) elif first_type and second_type: ok = is_equivalent(first_type, second_type) else: if first_type is None: self.msg.cannot_determine_type_in_base(name, base1.name(), ctx) if second_type is None: self.msg.cannot_determine_type_in_base(name, base2.name(), ctx) ok = True if not ok: self.msg.base_class_definitions_incompatible(name, base1, base2, ctx) def visit_import_from(self, node: ImportFrom) -> None: self.check_import(node) def visit_import_all(self, node: ImportAll) -> None: self.check_import(node) def visit_import(self, s: Import) -> None: pass def check_import(self, node: ImportBase) -> None: for assign in node.assignments: lvalue = assign.lvalues[0] lvalue_type, _, __ = self.check_lvalue(lvalue) if lvalue_type is None: # TODO: This is broken. lvalue_type = AnyType(TypeOfAny.special_form) message = '{} "{}"'.format(messages.INCOMPATIBLE_IMPORT_OF, cast(NameExpr, assign.rvalue).name) self.check_simple_assignment(lvalue_type, assign.rvalue, node, msg=message, lvalue_name='local name', rvalue_name='imported name') # # Statements # def visit_block(self, b: Block) -> None: if b.is_unreachable: self.binder.unreachable() return for s in b.body: if self.binder.is_unreachable(): break self.accept(s) def visit_assignment_stmt(self, s: AssignmentStmt) -> None: """Type check an assignment statement. Handle all kinds of assignment statements (simple, indexed, multiple). """ self.check_assignment(s.lvalues[-1], s.rvalue, s.type is None, s.new_syntax) if (s.type is not None and self.options.disallow_any_unimported and has_any_from_unimported_type(s.type)): if isinstance(s.lvalues[-1], TupleExpr): # This is a multiple assignment. Instead of figuring out which type is problematic, # give a generic error message. self.msg.unimported_type_becomes_any("A type on this line", AnyType(TypeOfAny.special_form), s) else: self.msg.unimported_type_becomes_any("Type of variable", s.type, s) check_for_explicit_any(s.type, self.options, self.is_typeshed_stub, self.msg, context=s) if len(s.lvalues) > 1: # Chained assignment (e.g. x = y = ...). # Make sure that rvalue type will not be reinferred. if s.rvalue not in self.type_map: self.expr_checker.accept(s.rvalue) rvalue = self.temp_node(self.type_map[s.rvalue], s) for lv in s.lvalues[:-1]: self.check_assignment(lv, rvalue, s.type is None) def check_assignment(self, lvalue: Lvalue, rvalue: Expression, infer_lvalue_type: bool = True, new_syntax: bool = False) -> None: """Type check a single assignment: lvalue = rvalue.""" if isinstance(lvalue, TupleExpr) or isinstance(lvalue, ListExpr): self.check_assignment_to_multiple_lvalues(lvalue.items, rvalue, lvalue, infer_lvalue_type) else: lvalue_type, index_lvalue, inferred = self.check_lvalue(lvalue) if isinstance(lvalue, NameExpr): if self.check_compatibility_all_supers(lvalue, lvalue_type, rvalue): # We hit an error on this line; don't check for any others return if lvalue_type: if isinstance(lvalue_type, PartialType) and lvalue_type.type is None: # Try to infer a proper type for a variable with a partial None type. rvalue_type = self.expr_checker.accept(rvalue) if isinstance(rvalue_type, NoneTyp): # This doesn't actually provide any additional information -- multiple # None initializers preserve the partial None type. return if is_valid_inferred_type(rvalue_type): var = lvalue_type.var partial_types = self.find_partial_types(var) if partial_types is not None: if not self.current_node_deferred: inferred_type = UnionType.make_simplified_union( [rvalue_type, NoneTyp()]) self.set_inferred_type(var, lvalue, inferred_type) else: var.type = None del partial_types[var] lvalue_type = var.type else: # Try to infer a partial type. No need to check the return value, as # an error will be reported elsewhere. self.infer_partial_type(lvalue_type.var, lvalue, rvalue_type) elif (is_literal_none(rvalue) and isinstance(lvalue, NameExpr) and isinstance(lvalue.node, Var) and lvalue.node.is_initialized_in_class and not new_syntax): # Allow None's to be assigned to class variables with non-Optional types. rvalue_type = lvalue_type elif (isinstance(lvalue, MemberExpr) and lvalue.kind is None): # Ignore member access to modules instance_type = self.expr_checker.accept(lvalue.expr) rvalue_type, infer_lvalue_type = self.check_member_assignment( instance_type, lvalue_type, rvalue, lvalue) else: rvalue_type = self.check_simple_assignment(lvalue_type, rvalue, lvalue) # Special case: only non-abstract non-protocol classes can be assigned to # variables with explicit type Type[A], where A is protocol or abstract. if (isinstance(rvalue_type, CallableType) and rvalue_type.is_type_obj() and (rvalue_type.type_object().is_abstract or rvalue_type.type_object().is_protocol) and isinstance(lvalue_type, TypeType) and isinstance(lvalue_type.item, Instance) and (lvalue_type.item.type.is_abstract or lvalue_type.item.type.is_protocol)): self.msg.concrete_only_assign(lvalue_type, rvalue) return if rvalue_type and infer_lvalue_type: self.binder.assign_type(lvalue, rvalue_type, lvalue_type, False) elif index_lvalue: self.check_indexed_assignment(index_lvalue, rvalue, lvalue) if inferred: self.infer_variable_type(inferred, lvalue, self.expr_checker.accept(rvalue), rvalue) def check_compatibility_all_supers(self, lvalue: NameExpr, lvalue_type: Optional[Type], rvalue: Expression) -> bool: lvalue_node = lvalue.node # Check if we are a class variable with at least one base class if (isinstance(lvalue_node, Var) and lvalue.kind == MDEF and len(lvalue_node.info.bases) > 0): for base in lvalue_node.info.mro[1:]: tnode = base.names.get(lvalue_node.name()) if tnode is not None: if not self.check_compatibility_classvar_super(lvalue_node, base, tnode.node): # Show only one error per variable break for base in lvalue_node.info.mro[1:]: # Only check __slots__ against the 'object' # If a base class defines a Tuple of 3 elements, a child of # this class should not be allowed to define it as a Tuple of # anything other than 3 elements. The exception to this rule # is __slots__, where it is allowed for any child class to # redefine it. if lvalue_node.name() == "__slots__" and base.fullname() != "builtins.object": continue base_type, base_node = self.lvalue_type_from_base(lvalue_node, base) if base_type: assert base_node is not None if not self.check_compatibility_super(lvalue, lvalue_type, rvalue, base, base_type, base_node): # Only show one error per variable; even if other # base classes are also incompatible return True break return False def check_compatibility_super(self, lvalue: NameExpr, lvalue_type: Optional[Type], rvalue: Expression, base: TypeInfo, base_type: Type, base_node: Node) -> bool: lvalue_node = lvalue.node assert isinstance(lvalue_node, Var) # Do not check whether the rvalue is compatible if the # lvalue had a type defined; this is handled by other # parts, and all we have to worry about in that case is # that lvalue is compatible with the base class. compare_node = None if lvalue_type: compare_type = lvalue_type compare_node = lvalue.node else: compare_type = self.expr_checker.accept(rvalue, base_type) if isinstance(rvalue, NameExpr): compare_node = rvalue.node if isinstance(compare_node, Decorator): compare_node = compare_node.func if compare_type: if (isinstance(base_type, CallableType) and isinstance(compare_type, CallableType)): base_static = is_node_static(base_node) compare_static = is_node_static(compare_node) # In case compare_static is unknown, also check # if 'definition' is set. The most common case for # this is with TempNode(), where we lose all # information about the real rvalue node (but only get # the rvalue type) if compare_static is None and compare_type.definition: compare_static = is_node_static(compare_type.definition) # Compare against False, as is_node_static can return None if base_static is False and compare_static is False: # Class-level function objects and classmethods become bound # methods: the former to the instance, the latter to the # class base_type = bind_self(base_type, self.scope.active_self_type()) compare_type = bind_self(compare_type, self.scope.active_self_type()) # If we are a static method, ensure to also tell the # lvalue it now contains a static method if base_static and compare_static: lvalue_node.is_staticmethod = True return self.check_subtype(compare_type, base_type, lvalue, messages.INCOMPATIBLE_TYPES_IN_ASSIGNMENT, 'expression has type', 'base class "%s" defined the type as' % base.name()) return True def lvalue_type_from_base(self, expr_node: Var, base: TypeInfo) -> Tuple[Optional[Type], Optional[Node]]: """For a NameExpr that is part of a class, walk all base classes and try to find the first class that defines a Type for the same name.""" expr_name = expr_node.name() base_var = base.names.get(expr_name) if base_var: base_node = base_var.node base_type = base_var.type if isinstance(base_node, Decorator): base_node = base_node.func base_type = base_node.type if base_type: if not has_no_typevars(base_type): self_type = self.scope.active_self_type() assert self_type is not None, "Internal error: base lookup outside class" if isinstance(self_type, TupleType): instance = self_type.fallback else: instance = self_type itype = map_instance_to_supertype(instance, base) base_type = expand_type_by_instance(base_type, itype) if isinstance(base_type, CallableType) and isinstance(base_node, FuncDef): # If we are a property, return the Type of the return # value, not the Callable if base_node.is_property: base_type = base_type.ret_type return base_type, base_node return None, None def check_compatibility_classvar_super(self, node: Var, base: TypeInfo, base_node: Optional[Node]) -> bool: if not isinstance(base_node, Var): return True if node.is_classvar and not base_node.is_classvar: self.fail('Cannot override instance variable ' '(previously declared on base class "%s") ' 'with class variable' % base.name(), node) return False elif not node.is_classvar and base_node.is_classvar: self.fail('Cannot override class variable ' '(previously declared on base class "%s") ' 'with instance variable' % base.name(), node) return False return True def check_assignment_to_multiple_lvalues(self, lvalues: List[Lvalue], rvalue: Expression, context: Context, infer_lvalue_type: bool = True) -> None: if isinstance(rvalue, TupleExpr) or isinstance(rvalue, ListExpr): # Recursively go into Tuple or List expression rhs instead of # using the type of rhs, because this allowed more fine grained # control in cases like: a, b = [int, str] where rhs would get # type List[object] rvalues = rvalue.items if self.check_rvalue_count_in_assignment(lvalues, len(rvalues), context): star_index = next((i for i, lv in enumerate(lvalues) if isinstance(lv, StarExpr)), len(lvalues)) left_lvs = lvalues[:star_index] star_lv = cast(StarExpr, lvalues[star_index]) if star_index != len(lvalues) else None right_lvs = lvalues[star_index + 1:] left_rvs, star_rvs, right_rvs = self.split_around_star( rvalues, star_index, len(lvalues)) lr_pairs = list(zip(left_lvs, left_rvs)) if star_lv: rv_list = ListExpr(star_rvs) rv_list.set_line(rvalue.get_line()) lr_pairs.append((star_lv.expr, rv_list)) lr_pairs.extend(zip(right_lvs, right_rvs)) for lv, rv in lr_pairs: self.check_assignment(lv, rv, infer_lvalue_type) else: self.check_multi_assignment(lvalues, rvalue, context, infer_lvalue_type) def check_rvalue_count_in_assignment(self, lvalues: List[Lvalue], rvalue_count: int, context: Context) -> bool: if any(isinstance(lvalue, StarExpr) for lvalue in lvalues): if len(lvalues) - 1 > rvalue_count: self.msg.wrong_number_values_to_unpack(rvalue_count, len(lvalues) - 1, context) return False elif rvalue_count != len(lvalues): self.msg.wrong_number_values_to_unpack(rvalue_count, len(lvalues), context) return False return True def check_multi_assignment(self, lvalues: List[Lvalue], rvalue: Expression, context: Context, infer_lvalue_type: bool = True, rv_type: Optional[Type] = None, undefined_rvalue: bool = False) -> None: """Check the assignment of one rvalue to a number of lvalues.""" # Infer the type of an ordinary rvalue expression. # TODO: maybe elsewhere; redundant. rvalue_type = rv_type or self.expr_checker.accept(rvalue) if isinstance(rvalue_type, UnionType): # If this is an Optional type in non-strict Optional code, unwrap it. relevant_items = rvalue_type.relevant_items() if len(relevant_items) == 1: rvalue_type = relevant_items[0] if isinstance(rvalue_type, AnyType): for lv in lvalues: if isinstance(lv, StarExpr): lv = lv.expr temp_node = self.temp_node(AnyType(TypeOfAny.from_another_any, source_any=rvalue_type), context) self.check_assignment(lv, temp_node, infer_lvalue_type) elif isinstance(rvalue_type, TupleType): self.check_multi_assignment_from_tuple(lvalues, rvalue, rvalue_type, context, undefined_rvalue, infer_lvalue_type) elif isinstance(rvalue_type, UnionType): self.check_multi_assignment_from_union(lvalues, rvalue, rvalue_type, context, infer_lvalue_type) else: self.check_multi_assignment_from_iterable(lvalues, rvalue_type, context, infer_lvalue_type) def check_multi_assignment_from_union(self, lvalues: List[Expression], rvalue: Expression, rvalue_type: UnionType, context: Context, infer_lvalue_type: bool) -> None: """Check assignment to multiple lvalue targets when rvalue type is a Union[...]. For example: t: Union[Tuple[int, int], Tuple[str, str]] x, y = t reveal_type(x) # Union[int, str] The idea in this case is to process the assignment for every item of the union. Important note: the types are collected in two places, 'union_types' contains inferred types for first assignments, 'assignments' contains the narrowed types for binder. """ self.no_partial_types = True transposed = tuple([] for _ in self.flatten_lvalues(lvalues)) # type: Tuple[List[Type], ...] # Notify binder that we want to defer bindings and instead collect types. with self.binder.accumulate_type_assignments() as assignments: for item in rvalue_type.items: # Type check the assignment separately for each union item and collect # the inferred lvalue types for each union item. self.check_multi_assignment(lvalues, rvalue, context, infer_lvalue_type=infer_lvalue_type, rv_type=item, undefined_rvalue=True) for t, lv in zip(transposed, self.flatten_lvalues(lvalues)): t.append(self.type_map.pop(lv, AnyType(TypeOfAny.special_form))) union_types = tuple(UnionType.make_simplified_union(col) for col in transposed) for expr, items in assignments.items(): # Bind a union of types collected in 'assignments' to every expression. if isinstance(expr, StarExpr): expr = expr.expr types, declared_types = zip(*items) self.binder.assign_type(expr, UnionType.make_simplified_union(types), UnionType.make_simplified_union(declared_types), False) for union, lv in zip(union_types, self.flatten_lvalues(lvalues)): # Properly store the inferred types. _1, _2, inferred = self.check_lvalue(lv) if inferred: self.set_inferred_type(inferred, lv, union) else: self.store_type(lv, union) self.no_partial_types = False def flatten_lvalues(self, lvalues: List[Expression]) -> List[Expression]: res = [] # type: List[Expression] for lv in lvalues: if isinstance(lv, (TupleExpr, ListExpr)): res.extend(self.flatten_lvalues(lv.items)) if isinstance(lv, StarExpr): # Unwrap StarExpr, since it is unwrapped by other helpers. lv = lv.expr res.append(lv) return res def check_multi_assignment_from_tuple(self, lvalues: List[Lvalue], rvalue: Expression, rvalue_type: TupleType, context: Context, undefined_rvalue: bool, infer_lvalue_type: bool = True) -> None: if self.check_rvalue_count_in_assignment(lvalues, len(rvalue_type.items), context): star_index = next((i for i, lv in enumerate(lvalues) if isinstance(lv, StarExpr)), len(lvalues)) left_lvs = lvalues[:star_index] star_lv = cast(StarExpr, lvalues[star_index]) if star_index != len(lvalues) else None right_lvs = lvalues[star_index + 1:] if not undefined_rvalue: # Infer rvalue again, now in the correct type context. lvalue_type = self.lvalue_type_for_inference(lvalues, rvalue_type) reinferred_rvalue_type = self.expr_checker.accept(rvalue, lvalue_type) if isinstance(reinferred_rvalue_type, UnionType): # If this is an Optional type in non-strict Optional code, unwrap it. relevant_items = reinferred_rvalue_type.relevant_items() if len(relevant_items) == 1: reinferred_rvalue_type = relevant_items[0] if isinstance(reinferred_rvalue_type, UnionType): self.check_multi_assignment_from_union(lvalues, rvalue, reinferred_rvalue_type, context, infer_lvalue_type) return assert isinstance(reinferred_rvalue_type, TupleType) rvalue_type = reinferred_rvalue_type left_rv_types, star_rv_types, right_rv_types = self.split_around_star( rvalue_type.items, star_index, len(lvalues)) for lv, rv_type in zip(left_lvs, left_rv_types): self.check_assignment(lv, self.temp_node(rv_type, context), infer_lvalue_type) if star_lv: list_expr = ListExpr([self.temp_node(rv_type, context) for rv_type in star_rv_types]) list_expr.set_line(context.get_line()) self.check_assignment(star_lv.expr, list_expr, infer_lvalue_type) for lv, rv_type in zip(right_lvs, right_rv_types): self.check_assignment(lv, self.temp_node(rv_type, context), infer_lvalue_type) def lvalue_type_for_inference(self, lvalues: List[Lvalue], rvalue_type: TupleType) -> Type: star_index = next((i for i, lv in enumerate(lvalues) if isinstance(lv, StarExpr)), len(lvalues)) left_lvs = lvalues[:star_index] star_lv = cast(StarExpr, lvalues[star_index]) if star_index != len(lvalues) else None right_lvs = lvalues[star_index + 1:] left_rv_types, star_rv_types, right_rv_types = self.split_around_star( rvalue_type.items, star_index, len(lvalues)) type_parameters = [] # type: List[Type] def append_types_for_inference(lvs: List[Expression], rv_types: List[Type]) -> None: for lv, rv_type in zip(lvs, rv_types): sub_lvalue_type, index_expr, inferred = self.check_lvalue(lv) if sub_lvalue_type and not isinstance(sub_lvalue_type, PartialType): type_parameters.append(sub_lvalue_type) else: # index lvalue # TODO Figure out more precise type context, probably # based on the type signature of the _set method. type_parameters.append(rv_type) append_types_for_inference(left_lvs, left_rv_types) if star_lv: sub_lvalue_type, index_expr, inferred = self.check_lvalue(star_lv.expr) if sub_lvalue_type and not isinstance(sub_lvalue_type, PartialType): type_parameters.extend([sub_lvalue_type] * len(star_rv_types)) else: # index lvalue # TODO Figure out more precise type context, probably # based on the type signature of the _set method. type_parameters.extend(star_rv_types) append_types_for_inference(right_lvs, right_rv_types) return TupleType(type_parameters, self.named_type('builtins.tuple')) def split_around_star(self, items: List[T], star_index: int, length: int) -> Tuple[List[T], List[T], List[T]]: """Splits a list of items in three to match another list of length 'length' that contains a starred expression at 'star_index' in the following way: star_index = 2, length = 5 (i.e., [a,b,*,c,d]), items = [1,2,3,4,5,6,7] returns in: ([1,2], [3,4,5], [6,7]) """ nr_right_of_star = length - star_index - 1 right_index = -nr_right_of_star if nr_right_of_star != 0 else len(items) left = items[:star_index] star = items[star_index:right_index] right = items[right_index:] return (left, star, right) def type_is_iterable(self, type: Type) -> bool: if isinstance(type, CallableType) and type.is_type_obj(): type = type.fallback return (is_subtype(type, self.named_generic_type('typing.Iterable', [AnyType(TypeOfAny.special_form)])) and isinstance(type, Instance)) def check_multi_assignment_from_iterable(self, lvalues: List[Lvalue], rvalue_type: Type, context: Context, infer_lvalue_type: bool = True) -> None: if self.type_is_iterable(rvalue_type): item_type = self.iterable_item_type(cast(Instance, rvalue_type)) for lv in lvalues: if isinstance(lv, StarExpr): self.check_assignment(lv.expr, self.temp_node(rvalue_type, context), infer_lvalue_type) else: self.check_assignment(lv, self.temp_node(item_type, context), infer_lvalue_type) else: self.msg.type_not_iterable(rvalue_type, context) def check_lvalue(self, lvalue: Lvalue) -> Tuple[Optional[Type], Optional[IndexExpr], Optional[Var]]: lvalue_type = None index_lvalue = None inferred = None if self.is_definition(lvalue): if isinstance(lvalue, NameExpr): inferred = cast(Var, lvalue.node) assert isinstance(inferred, Var) else: assert isinstance(lvalue, MemberExpr) self.expr_checker.accept(lvalue.expr) inferred = lvalue.def_var elif isinstance(lvalue, IndexExpr): index_lvalue = lvalue elif isinstance(lvalue, MemberExpr): lvalue_type = self.expr_checker.analyze_ordinary_member_access(lvalue, True) self.store_type(lvalue, lvalue_type) elif isinstance(lvalue, NameExpr): lvalue_type = self.expr_checker.analyze_ref_expr(lvalue, lvalue=True) self.store_type(lvalue, lvalue_type) elif isinstance(lvalue, TupleExpr) or isinstance(lvalue, ListExpr): types = [self.check_lvalue(sub_expr)[0] or # This type will be used as a context for further inference of rvalue, # we put Uninhabited if there is no information available from lvalue. UninhabitedType() for sub_expr in lvalue.items] lvalue_type = TupleType(types, self.named_type('builtins.tuple')) else: lvalue_type = self.expr_checker.accept(lvalue) return lvalue_type, index_lvalue, inferred def is_definition(self, s: Lvalue) -> bool: if isinstance(s, NameExpr): if s.is_inferred_def: return True # If the node type is not defined, this must the first assignment # that we process => this is a definition, even though the semantic # analyzer did not recognize this as such. This can arise in code # that uses isinstance checks, if type checking of the primary # definition is skipped due to an always False type check. node = s.node if isinstance(node, Var): return node.type is None elif isinstance(s, MemberExpr): return s.is_inferred_def return False def infer_variable_type(self, name: Var, lvalue: Lvalue, init_type: Type, context: Context) -> None: """Infer the type of initialized variables from initializer type.""" if isinstance(init_type, DeletedType): self.msg.deleted_as_rvalue(init_type, context) elif not is_valid_inferred_type(init_type) and not self.no_partial_types: # We cannot use the type of the initialization expression for full type # inference (it's not specific enough), but we might be able to give # partial type which will be made more specific later. A partial type # gets generated in assignment like 'x = []' where item type is not known. if not self.infer_partial_type(name, lvalue, init_type): self.fail(messages.NEED_ANNOTATION_FOR_VAR, context) self.set_inference_error_fallback_type(name, lvalue, init_type, context) elif (isinstance(lvalue, MemberExpr) and self.inferred_attribute_types is not None and lvalue.def_var and lvalue.def_var in self.inferred_attribute_types and not is_same_type(self.inferred_attribute_types[lvalue.def_var], init_type)): # Multiple, inconsistent types inferred for an attribute. self.fail(messages.NEED_ANNOTATION_FOR_VAR, context) name.type = AnyType(TypeOfAny.from_error) else: # Infer type of the target. # Make the type more general (strip away function names etc.). init_type = strip_type(init_type) self.set_inferred_type(name, lvalue, init_type) def infer_partial_type(self, name: Var, lvalue: Lvalue, init_type: Type) -> bool: if isinstance(init_type, NoneTyp): partial_type = PartialType(None, name, [init_type]) elif isinstance(init_type, Instance): fullname = init_type.type.fullname() if (isinstance(lvalue, (NameExpr, MemberExpr)) and (fullname == 'builtins.list' or fullname == 'builtins.set' or fullname == 'builtins.dict') and all(isinstance(t, (NoneTyp, UninhabitedType)) for t in init_type.args)): partial_type = PartialType(init_type.type, name, init_type.args) else: return False else: return False self.set_inferred_type(name, lvalue, partial_type) self.partial_types[-1][name] = lvalue return True def set_inferred_type(self, var: Var, lvalue: Lvalue, type: Type) -> None: """Store inferred variable type. Store the type to both the variable node and the expression node that refers to the variable (lvalue). If var is None, do nothing. """ if var and not self.current_node_deferred: var.type = type var.is_inferred = True if isinstance(lvalue, MemberExpr) and self.inferred_attribute_types is not None: # Store inferred attribute type so that we can check consistency afterwards. if lvalue.def_var is not None: self.inferred_attribute_types[lvalue.def_var] = type self.store_type(lvalue, type) def set_inference_error_fallback_type(self, var: Var, lvalue: Lvalue, type: Type, context: Context) -> None: """If errors on context line are ignored, store dummy type for variable. If a program ignores error on type inference error, the variable should get some inferred type so that if can used later on in the program. Example: x = [] # type: ignore x.append(1) # Should be ok! We implement this here by giving x a valid type (Any). """ if context.get_line() in self.errors.ignored_lines[self.errors.file]: self.set_inferred_type(var, lvalue, AnyType(TypeOfAny.from_error)) def check_simple_assignment(self, lvalue_type: Optional[Type], rvalue: Expression, context: Context, msg: str = messages.INCOMPATIBLE_TYPES_IN_ASSIGNMENT, lvalue_name: str = 'variable', rvalue_name: str = 'expression') -> Type: if self.is_stub and isinstance(rvalue, EllipsisExpr): # '...' is always a valid initializer in a stub. return AnyType(TypeOfAny.special_form) else: always_allow_any = lvalue_type is not None and not isinstance(lvalue_type, AnyType) rvalue_type = self.expr_checker.accept(rvalue, lvalue_type, always_allow_any=always_allow_any) if isinstance(rvalue_type, DeletedType): self.msg.deleted_as_rvalue(rvalue_type, context) if isinstance(lvalue_type, DeletedType): self.msg.deleted_as_lvalue(lvalue_type, context) elif lvalue_type: self.check_subtype(rvalue_type, lvalue_type, context, msg, '{} has type'.format(rvalue_name), '{} has type'.format(lvalue_name)) return rvalue_type def check_member_assignment(self, instance_type: Type, attribute_type: Type, rvalue: Expression, context: Context) -> Tuple[Type, bool]: """Type member assigment. This defers to check_simple_assignment, unless the member expression is a descriptor, in which case this checks descriptor semantics as well. Return the inferred rvalue_type and whether to infer anything about the attribute type """ # Descriptors don't participate in class-attribute access if ((isinstance(instance_type, FunctionLike) and instance_type.is_type_obj()) or isinstance(instance_type, TypeType)): rvalue_type = self.check_simple_assignment(attribute_type, rvalue, context) return rvalue_type, True if not isinstance(attribute_type, Instance): rvalue_type = self.check_simple_assignment(attribute_type, rvalue, context) return rvalue_type, True if not attribute_type.type.has_readable_member('__set__'): # If there is no __set__, we type-check that the assigned value matches # the return type of __get__. This doesn't match the python semantics, # (which allow you to override the descriptor with any value), but preserves # the type of accessing the attribute (even after the override). if attribute_type.type.has_readable_member('__get__'): attribute_type = self.expr_checker.analyze_descriptor_access( instance_type, attribute_type, context) rvalue_type = self.check_simple_assignment(attribute_type, rvalue, context) return rvalue_type, True dunder_set = attribute_type.type.get_method('__set__') if dunder_set is None: self.msg.fail("{}.__set__ is not callable".format(attribute_type), context) return AnyType(TypeOfAny.from_error), False function = function_type(dunder_set, self.named_type('builtins.function')) bound_method = bind_self(function, attribute_type) typ = map_instance_to_supertype(attribute_type, dunder_set.info) dunder_set_type = expand_type_by_instance(bound_method, typ) _, inferred_dunder_set_type = self.expr_checker.check_call( dunder_set_type, [TempNode(instance_type), rvalue], [nodes.ARG_POS, nodes.ARG_POS], context) if not isinstance(inferred_dunder_set_type, CallableType): self.fail("__set__ is not callable", context) return AnyType(TypeOfAny.from_error), True if len(inferred_dunder_set_type.arg_types) < 2: # A message already will have been recorded in check_call return AnyType(TypeOfAny.from_error), False return inferred_dunder_set_type.arg_types[1], False def check_indexed_assignment(self, lvalue: IndexExpr, rvalue: Expression, context: Context) -> None: """Type check indexed assignment base[index] = rvalue. The lvalue argument is the base[index] expression. """ self.try_infer_partial_type_from_indexed_assignment(lvalue, rvalue) basetype = self.expr_checker.accept(lvalue.base) if isinstance(basetype, TypedDictType): item_type = self.expr_checker.visit_typeddict_index_expr(basetype, lvalue.index) method_type = CallableType( arg_types=[self.named_type('builtins.str'), item_type], arg_kinds=[ARG_POS, ARG_POS], arg_names=[None, None], ret_type=NoneTyp(), fallback=self.named_type('builtins.function') ) # type: Type else: method_type = self.expr_checker.analyze_external_member_access( '__setitem__', basetype, context) lvalue.method_type = method_type self.expr_checker.check_call(method_type, [lvalue.index, rvalue], [nodes.ARG_POS, nodes.ARG_POS], context) def try_infer_partial_type_from_indexed_assignment( self, lvalue: IndexExpr, rvalue: Expression) -> None: # TODO: Should we share some of this with try_infer_partial_type? if isinstance(lvalue.base, RefExpr) and isinstance(lvalue.base.node, Var): var = lvalue.base.node if isinstance(var.type, PartialType): type_type = var.type.type if type_type is None: return # The partial type is None. partial_types = self.find_partial_types(var) if partial_types is None: return typename = type_type.fullname() if typename == 'builtins.dict': # TODO: Don't infer things twice. key_type = self.expr_checker.accept(lvalue.index) value_type = self.expr_checker.accept(rvalue) full_key_type = UnionType.make_simplified_union( [key_type, var.type.inner_types[0]]) full_value_type = UnionType.make_simplified_union( [value_type, var.type.inner_types[1]]) if (is_valid_inferred_type(full_key_type) and is_valid_inferred_type(full_value_type)): if not self.current_node_deferred: var.type = self.named_generic_type('builtins.dict', [full_key_type, full_value_type]) del partial_types[var] def visit_expression_stmt(self, s: ExpressionStmt) -> None: self.expr_checker.accept(s.expr, allow_none_return=True, always_allow_any=True) def visit_return_stmt(self, s: ReturnStmt) -> None: """Type check a return statement.""" self.check_return_stmt(s) self.binder.unreachable() def check_return_stmt(self, s: ReturnStmt) -> None: defn = self.scope.top_function() if defn is not None: if defn.is_generator: return_type = self.get_generator_return_type(self.return_types[-1], defn.is_coroutine) else: return_type = self.return_types[-1] if isinstance(return_type, UninhabitedType): self.fail(messages.NO_RETURN_EXPECTED, s) return if s.expr: is_lambda = isinstance(self.scope.top_function(), LambdaExpr) declared_none_return = isinstance(return_type, NoneTyp) declared_any_return = isinstance(return_type, AnyType) # This controls whether or not we allow a function call that # returns None as the expression of this return statement. # E.g. `return f()` for some `f` that returns None. We allow # this only if we're in a lambda or in a function that returns # `None` or `Any`. allow_none_func_call = is_lambda or declared_none_return or declared_any_return # Return with a value. typ = self.expr_checker.accept(s.expr, return_type, allow_none_return=allow_none_func_call) if defn.is_async_generator: self.fail("'return' with value in async generator is not allowed", s) return # Returning a value of type Any is always fine. if isinstance(typ, AnyType): # (Unless you asked to be warned in that case, and the # function is not declared to return Any) if (self.options.warn_return_any and not self.current_node_deferred and not is_proper_subtype(AnyType(TypeOfAny.special_form), return_type)): self.msg.incorrectly_returning_any(return_type, s) return # Disallow return expressions in functions declared to return # None, subject to two exceptions below. if declared_none_return: # Lambdas are allowed to have None returns. # Functions returning a value of type None are allowed to have a None return. if is_lambda or isinstance(typ, NoneTyp): return self.fail(messages.NO_RETURN_VALUE_EXPECTED, s) else: self.check_subtype( subtype_label='got', subtype=typ, supertype_label='expected', supertype=return_type, context=s, msg=messages.INCOMPATIBLE_RETURN_VALUE_TYPE) else: # Empty returns are valid in Generators with Any typed returns, but not in # coroutines. if (defn.is_generator and not defn.is_coroutine and isinstance(return_type, AnyType)): return if isinstance(return_type, (NoneTyp, AnyType)): return if self.in_checked_function(): self.fail(messages.RETURN_VALUE_EXPECTED, s) def visit_if_stmt(self, s: IfStmt) -> None: """Type check an if statement.""" # This frame records the knowledge from previous if/elif clauses not being taken. # Fall-through to the original frame is handled explicitly in each block. with self.binder.frame_context(can_skip=False, fall_through=0): for e, b in zip(s.expr, s.body): t = self.expr_checker.accept(e) if isinstance(t, DeletedType): self.msg.deleted_as_rvalue(t, s) if self.options.strict_boolean: is_bool = isinstance(t, Instance) and t.type.fullname() == 'builtins.bool' if not (is_bool or isinstance(t, AnyType)): self.fail(messages.NON_BOOLEAN_IN_CONDITIONAL, e) if_map, else_map = self.find_isinstance_check(e) # XXX Issue a warning if condition is always False? with self.binder.frame_context(can_skip=True, fall_through=2): self.push_type_map(if_map) self.accept(b) # XXX Issue a warning if condition is always True? self.push_type_map(else_map) with self.binder.frame_context(can_skip=False, fall_through=2): if s.else_body: self.accept(s.else_body) def visit_while_stmt(self, s: WhileStmt) -> None: """Type check a while statement.""" if_stmt = IfStmt([s.expr], [s.body], None) if_stmt.set_line(s.get_line(), s.get_column()) self.accept_loop(if_stmt, s.else_body, exit_condition=s.expr) def visit_operator_assignment_stmt(self, s: OperatorAssignmentStmt) -> None: """Type check an operator assignment statement, e.g. x += 1.""" lvalue_type = self.expr_checker.accept(s.lvalue) inplace, method = infer_operator_assignment_method(lvalue_type, s.op) if inplace: # There is __ifoo__, treat as x = x.__ifoo__(y) rvalue_type, method_type = self.expr_checker.check_op( method, lvalue_type, s.rvalue, s) if not is_subtype(rvalue_type, lvalue_type): self.msg.incompatible_operator_assignment(s.op, s) else: # There is no __ifoo__, treat as x = x y expr = OpExpr(s.op, s.lvalue, s.rvalue) expr.set_line(s) self.check_assignment(lvalue=s.lvalue, rvalue=expr, infer_lvalue_type=True, new_syntax=False) def visit_assert_stmt(self, s: AssertStmt) -> None: self.expr_checker.accept(s.expr) if s.msg is not None: self.expr_checker.accept(s.msg) if isinstance(s.expr, TupleExpr) and len(s.expr.items) > 0: self.warn(messages.MALFORMED_ASSERT, s) # If this is asserting some isinstance check, bind that type in the following code true_map, _ = self.find_isinstance_check(s.expr) self.push_type_map(true_map) def visit_raise_stmt(self, s: RaiseStmt) -> None: """Type check a raise statement.""" if s.expr: self.type_check_raise(s.expr, s) if s.from_expr: self.type_check_raise(s.from_expr, s, True) self.binder.unreachable() def type_check_raise(self, e: Expression, s: RaiseStmt, optional: bool = False) -> None: typ = self.expr_checker.accept(e) if isinstance(typ, FunctionLike): if typ.is_type_obj(): # Cases like "raise/from ExceptionClass". typeinfo = typ.type_object() base = self.lookup_typeinfo('builtins.BaseException') if base in typeinfo.mro or typeinfo.fallback_to_any: # Good! return # Else fall back to the checks below (which will fail). if isinstance(typ, TupleType) and self.options.python_version[0] == 2: # allow `raise type, value, traceback` # https://docs.python.org/2/reference/simple_stmts.html#the-raise-statement # TODO: Also check tuple item types. if len(typ.items) in (2, 3): return if isinstance(typ, Instance) and typ.type.fallback_to_any: # OK! return expected_type = self.named_type('builtins.BaseException') # type: Type if optional: expected_type = UnionType([expected_type, NoneTyp()]) self.check_subtype(typ, expected_type, s, messages.INVALID_EXCEPTION) def visit_try_stmt(self, s: TryStmt) -> None: """Type check a try statement.""" # Our enclosing frame will get the result if the try/except falls through. # This one gets all possible states after the try block exited abnormally # (by exception, return, break, etc.) with self.binder.frame_context(can_skip=False, fall_through=0): # Not only might the body of the try statement exit # abnormally, but so might an exception handler or else # clause. The finally clause runs in *all* cases, so we # need an outer try frame to catch all intermediate states # in case an exception is raised during an except or else # clause. As an optimization, only create the outer try # frame when there actually is a finally clause. self.visit_try_without_finally(s, try_frame=bool(s.finally_body)) if s.finally_body: # First we check finally_body is type safe on all abnormal exit paths self.accept(s.finally_body) if s.finally_body: # Then we try again for the more restricted set of options # that can fall through. (Why do we need to check the # finally clause twice? Depending on whether the finally # clause was reached by the try clause falling off the end # or exiting abnormally, after completing the finally clause # either flow will continue to after the entire try statement # or the exception/return/etc. will be processed and control # flow will escape. We need to check that the finally clause # type checks in both contexts, but only the resulting types # from the latter context affect the type state in the code # that follows the try statement.) self.accept(s.finally_body) def visit_try_without_finally(self, s: TryStmt, try_frame: bool) -> None: """Type check a try statement, ignoring the finally block. On entry, the top frame should receive all flow that exits the try block abnormally (i.e., such that the else block does not execute), and its parent should receive all flow that exits the try block normally. """ # This frame will run the else block if the try fell through. # In that case, control flow continues to the parent of what # was the top frame on entry. with self.binder.frame_context(can_skip=False, fall_through=2, try_frame=try_frame): # This frame receives exit via exception, and runs exception handlers with self.binder.frame_context(can_skip=False, fall_through=2): # Finally, the body of the try statement with self.binder.frame_context(can_skip=False, fall_through=2, try_frame=True): self.accept(s.body) for i in range(len(s.handlers)): with self.binder.frame_context(can_skip=True, fall_through=4): typ = s.types[i] if typ: t = self.check_except_handler_test(typ) var = s.vars[i] if var: # To support local variables, we make this a definition line, # causing assignment to set the variable's type. var.is_inferred_def = True # We also temporarily set current_node_deferred to False to # make sure the inference happens. # TODO: Use a better solution, e.g. a # separate Var for each except block. am_deferring = self.current_node_deferred self.current_node_deferred = False self.check_assignment(var, self.temp_node(t, var)) self.current_node_deferred = am_deferring self.accept(s.handlers[i]) var = s.vars[i] if var: # Exception variables are deleted in python 3 but not python 2. # But, since it's bad form in python 2 and the type checking # wouldn't work very well, we delete it anyway. # Unfortunately, this doesn't let us detect usage before the # try/except block. if self.options.python_version[0] >= 3: source = var.name else: source = ('(exception variable "{}", which we do not ' 'accept outside except: blocks even in ' 'python 2)'.format(var.name)) cast(Var, var.node).type = DeletedType(source=source) self.binder.cleanse(var) if s.else_body: self.accept(s.else_body) def check_except_handler_test(self, n: Expression) -> Type: """Type check an exception handler test clause.""" typ = self.expr_checker.accept(n) all_types = [] # type: List[Type] test_types = self.get_types_from_except_handler(typ, n) for ttype in test_types: if isinstance(ttype, AnyType): all_types.append(ttype) continue if isinstance(ttype, FunctionLike): item = ttype.items()[0] if not item.is_type_obj(): self.fail(messages.INVALID_EXCEPTION_TYPE, n) return AnyType(TypeOfAny.from_error) exc_type = item.ret_type elif isinstance(ttype, TypeType): exc_type = ttype.item else: self.fail(messages.INVALID_EXCEPTION_TYPE, n) return AnyType(TypeOfAny.from_error) if not is_subtype(exc_type, self.named_type('builtins.BaseException')): self.fail(messages.INVALID_EXCEPTION_TYPE, n) return AnyType(TypeOfAny.from_error) all_types.append(exc_type) return UnionType.make_simplified_union(all_types) def get_types_from_except_handler(self, typ: Type, n: Expression) -> List[Type]: """Helper for check_except_handler_test to retrieve handler types.""" if isinstance(typ, TupleType): return typ.items elif isinstance(typ, UnionType): return [ union_typ for item in typ.relevant_items() for union_typ in self.get_types_from_except_handler(item, n) ] elif isinstance(typ, Instance) and is_named_instance(typ, 'builtins.tuple'): # variadic tuple return [typ.args[0]] else: return [typ] def visit_for_stmt(self, s: ForStmt) -> None: """Type check a for statement.""" if s.is_async: item_type = self.analyze_async_iterable_item_type(s.expr) else: item_type = self.analyze_iterable_item_type(s.expr) s.inferred_item_type = item_type self.analyze_index_variables(s.index, item_type, s.index_type is None, s) self.accept_loop(s.body, s.else_body) def analyze_async_iterable_item_type(self, expr: Expression) -> Type: """Analyse async iterable expression and return iterator item type.""" echk = self.expr_checker iterable = echk.accept(expr) self.check_subtype(iterable, self.named_generic_type('typing.AsyncIterable', [AnyType(TypeOfAny.special_form)]), expr, messages.ASYNC_ITERABLE_EXPECTED) method = echk.analyze_external_member_access('__aiter__', iterable, expr) iterator = echk.check_call(method, [], [], expr)[0] method = echk.analyze_external_member_access('__anext__', iterator, expr) awaitable = echk.check_call(method, [], [], expr)[0] return echk.check_awaitable_expr(awaitable, expr, messages.INCOMPATIBLE_TYPES_IN_ASYNC_FOR) def analyze_iterable_item_type(self, expr: Expression) -> Type: """Analyse iterable expression and return iterator item type.""" echk = self.expr_checker iterable = echk.accept(expr) if isinstance(iterable, TupleType): joined = UninhabitedType() # type: Type for item in iterable.items: joined = join_types(joined, item) return joined else: # Non-tuple iterable. self.check_subtype(iterable, self.named_generic_type('typing.Iterable', [AnyType(TypeOfAny.special_form)]), expr, messages.ITERABLE_EXPECTED) method = echk.analyze_external_member_access('__iter__', iterable, expr) iterator = echk.check_call(method, [], [], expr)[0] if self.options.python_version[0] >= 3: nextmethod = '__next__' else: nextmethod = 'next' method = echk.analyze_external_member_access(nextmethod, iterator, expr) return echk.check_call(method, [], [], expr)[0] def analyze_index_variables(self, index: Expression, item_type: Type, infer_lvalue_type: bool, context: Context) -> None: """Type check or infer for loop or list comprehension index vars.""" self.check_assignment(index, self.temp_node(item_type, context), infer_lvalue_type) def visit_del_stmt(self, s: DelStmt) -> None: if isinstance(s.expr, IndexExpr): e = s.expr m = MemberExpr(e.base, '__delitem__') m.line = s.line c = CallExpr(m, [e.index], [nodes.ARG_POS], [None]) c.line = s.line self.expr_checker.accept(c, allow_none_return=True) else: s.expr.accept(self.expr_checker) for elt in flatten(s.expr): if isinstance(elt, NameExpr): self.binder.assign_type(elt, DeletedType(source=elt.name), get_declaration(elt), False) def visit_decorator(self, e: Decorator) -> None: for d in e.decorators: if isinstance(d, RefExpr): if d.fullname == 'typing.no_type_check': e.var.type = AnyType(TypeOfAny.special_form) e.var.is_ready = True return self.check_func_item(e.func, name=e.func.name()) # Process decorators from the inside out to determine decorated signature, which # may be different from the declared signature. sig = self.function_type(e.func) # type: Type for d in reversed(e.decorators): if refers_to_fullname(d, 'typing.overload'): self.fail('Single overload definition, multiple required', e) continue dec = self.expr_checker.accept(d) temp = self.temp_node(sig) fullname = None if isinstance(d, RefExpr): fullname = d.fullname self.check_for_untyped_decorator(e.func, dec, d) sig, t2 = self.expr_checker.check_call(dec, [temp], [nodes.ARG_POS], e, callable_name=fullname) self.check_untyped_after_decorator(sig, e.func) sig = cast(FunctionLike, sig) sig = set_callable_name(sig, e.func) e.var.type = sig e.var.is_ready = True if e.func.is_property: self.check_incompatible_property_override(e) if e.func.info and not e.func.is_dynamic(): self.check_method_override(e) def check_for_untyped_decorator(self, func: FuncDef, dec_type: Type, dec_expr: Expression) -> None: if (self.options.disallow_untyped_decorators and is_typed_callable(func.type) and is_untyped_decorator(dec_type)): self.msg.typed_function_untyped_decorator(func.name(), dec_expr) def check_incompatible_property_override(self, e: Decorator) -> None: if not e.var.is_settable_property and e.func.info is not None: name = e.func.name() for base in e.func.info.mro[1:]: base_attr = base.names.get(name) if not base_attr: continue if (isinstance(base_attr.node, OverloadedFuncDef) and base_attr.node.is_property and cast(Decorator, base_attr.node.items[0]).var.is_settable_property): self.fail(messages.READ_ONLY_PROPERTY_OVERRIDES_READ_WRITE, e) def visit_with_stmt(self, s: WithStmt) -> None: for expr, target in zip(s.expr, s.target): if s.is_async: self.check_async_with_item(expr, target, s.target_type is None) else: self.check_with_item(expr, target, s.target_type is None) self.accept(s.body) def check_untyped_after_decorator(self, typ: Type, func: FuncDef) -> None: if not self.options.disallow_any_decorated or self.is_stub: return if mypy.checkexpr.has_any_type(typ): self.msg.untyped_decorated_function(typ, func) def check_async_with_item(self, expr: Expression, target: Optional[Expression], infer_lvalue_type: bool) -> None: echk = self.expr_checker ctx = echk.accept(expr) enter = echk.analyze_external_member_access('__aenter__', ctx, expr) obj = echk.check_call(enter, [], [], expr)[0] obj = echk.check_awaitable_expr( obj, expr, messages.INCOMPATIBLE_TYPES_IN_ASYNC_WITH_AENTER) if target: self.check_assignment(target, self.temp_node(obj, expr), infer_lvalue_type) exit = echk.analyze_external_member_access('__aexit__', ctx, expr) arg = self.temp_node(AnyType(TypeOfAny.special_form), expr) res = echk.check_call(exit, [arg] * 3, [nodes.ARG_POS] * 3, expr)[0] echk.check_awaitable_expr( res, expr, messages.INCOMPATIBLE_TYPES_IN_ASYNC_WITH_AEXIT) def check_with_item(self, expr: Expression, target: Optional[Expression], infer_lvalue_type: bool) -> None: echk = self.expr_checker ctx = echk.accept(expr) enter = echk.analyze_external_member_access('__enter__', ctx, expr) obj = echk.check_call(enter, [], [], expr)[0] if target: self.check_assignment(target, self.temp_node(obj, expr), infer_lvalue_type) exit = echk.analyze_external_member_access('__exit__', ctx, expr) arg = self.temp_node(AnyType(TypeOfAny.special_form), expr) echk.check_call(exit, [arg] * 3, [nodes.ARG_POS] * 3, expr) def visit_print_stmt(self, s: PrintStmt) -> None: for arg in s.args: self.expr_checker.accept(arg) if s.target: target_type = self.expr_checker.accept(s.target) if not isinstance(target_type, NoneTyp): # TODO: Also verify the type of 'write'. self.expr_checker.analyze_external_member_access('write', target_type, s.target) def visit_break_stmt(self, s: BreakStmt) -> None: self.binder.handle_break() def visit_continue_stmt(self, s: ContinueStmt) -> None: self.binder.handle_continue() return None # # Helpers # def check_subtype(self, subtype: Type, supertype: Type, context: Context, msg: str = messages.INCOMPATIBLE_TYPES, subtype_label: Optional[str] = None, supertype_label: Optional[str] = None) -> bool: """Generate an error if the subtype is not compatible with supertype.""" if is_subtype(subtype, supertype): return True else: if self.should_suppress_optional_error([subtype]): return False extra_info = [] # type: List[str] note_msg = '' if subtype_label is not None or supertype_label is not None: subtype_str, supertype_str = self.msg.format_distinctly(subtype, supertype) if subtype_label is not None: extra_info.append(subtype_label + ' ' + subtype_str) if supertype_label is not None: extra_info.append(supertype_label + ' ' + supertype_str) note_msg = make_inferred_type_note(context, subtype, supertype, supertype_str) if extra_info: msg += ' (' + ', '.join(extra_info) + ')' self.fail(msg, context) if note_msg: self.note(note_msg, context) if (isinstance(supertype, Instance) and supertype.type.is_protocol and isinstance(subtype, (Instance, TupleType, TypedDictType))): self.msg.report_protocol_problems(subtype, supertype, context) if isinstance(supertype, CallableType) and isinstance(subtype, Instance): call = find_member('__call__', subtype, subtype) if call: self.msg.note_call(subtype, call, context) return False def contains_none(self, t: Type) -> bool: return ( isinstance(t, NoneTyp) or (isinstance(t, UnionType) and any(self.contains_none(ut) for ut in t.items)) or (isinstance(t, TupleType) and any(self.contains_none(tt) for tt in t.items)) or (isinstance(t, Instance) and bool(t.args) and any(self.contains_none(it) for it in t.args)) ) def should_suppress_optional_error(self, related_types: List[Type]) -> bool: return self.suppress_none_errors and any(self.contains_none(t) for t in related_types) def named_type(self, name: str) -> Instance: """Return an instance type with type given by the name and no type arguments. For example, named_type('builtins.object') produces the object type. """ # Assume that the name refers to a type. sym = self.lookup_qualified(name) node = sym.node assert isinstance(node, TypeInfo) any_type = AnyType(TypeOfAny.from_omitted_generics) return Instance(node, [any_type] * len(node.defn.type_vars)) def named_generic_type(self, name: str, args: List[Type]) -> Instance: """Return an instance with the given name and type arguments. Assume that the number of arguments is correct. Assume that the name refers to a compatible generic type. """ info = self.lookup_typeinfo(name) # TODO: assert len(args) == len(info.defn.type_vars) return Instance(info, args) def lookup_typeinfo(self, fullname: str) -> TypeInfo: # Assume that the name refers to a class. sym = self.lookup_qualified(fullname) node = sym.node assert isinstance(node, TypeInfo) return node def type_type(self) -> Instance: """Return instance type 'type'.""" return self.named_type('builtins.type') def str_type(self) -> Instance: """Return instance type 'str'.""" return self.named_type('builtins.str') def store_type(self, node: Expression, typ: Type) -> None: """Store the type of a node in the type map.""" self.type_map[node] = typ def in_checked_function(self) -> bool: """Should we type-check the current function? - Yes if --check-untyped-defs is set. - Yes outside functions. - Yes in annotated functions. - No otherwise. """ return (self.options.check_untyped_defs or not self.dynamic_funcs or not self.dynamic_funcs[-1]) def lookup(self, name: str, kind: int) -> SymbolTableNode: """Look up a definition from the symbol table with the given name. TODO remove kind argument """ if name in self.globals: return self.globals[name] else: b = self.globals.get('__builtins__', None) if b: table = cast(MypyFile, b.node).names if name in table: return table[name] raise KeyError('Failed lookup: {}'.format(name)) def lookup_qualified(self, name: str) -> SymbolTableNode: if '.' not in name: return self.lookup(name, GDEF) # FIX kind else: parts = name.split('.') n = self.modules[parts[0]] for i in range(1, len(parts) - 1): sym = n.names.get(parts[i]) assert sym is not None, "Internal error: attempted lookup of unknown name" n = cast(MypyFile, sym.node) last = parts[-1] if last in n.names: return n.names[last] elif len(parts) == 2 and parts[0] == 'builtins': raise KeyError("Could not find builtin symbol '{}'. (Are you running a " "test case? If so, make sure to include a fixture that " "defines this symbol.)".format(last)) else: msg = "Failed qualified lookup: '{}' (fullname = '{}')." raise KeyError(msg.format(last, name)) @contextmanager def enter_partial_types(self) -> Iterator[None]: """Enter a new scope for collecting partial types. Also report errors for variables which still have partial types, i.e. we couldn't infer a complete type. """ self.partial_types.append({}) yield partial_types = self.partial_types.pop() if not self.current_node_deferred: for var, context in partial_types.items(): if isinstance(var.type, PartialType) and var.type.type is None: # None partial type: assume variable is intended to have type None var.type = NoneTyp() else: if var not in self.partial_reported: self.msg.fail(messages.NEED_ANNOTATION_FOR_VAR, context) self.partial_reported.add(var) var.type = AnyType(TypeOfAny.from_error) def find_partial_types(self, var: Var) -> Optional[Dict[Var, Context]]: for partial_types in reversed(self.partial_types): if var in partial_types: return partial_types return None def temp_node(self, t: Type, context: Optional[Context] = None) -> TempNode: """Create a temporary node with the given, fixed type.""" temp = TempNode(t) if context: temp.set_line(context.get_line()) return temp def fail(self, msg: str, context: Context) -> None: """Produce an error message.""" self.msg.fail(msg, context) def warn(self, msg: str, context: Context) -> None: """Produce a warning message.""" self.msg.warn(msg, context) def note(self, msg: str, context: Context, offset: int = 0) -> None: """Produce a note.""" self.msg.note(msg, context, offset=offset) def iterable_item_type(self, instance: Instance) -> Type: iterable = map_instance_to_supertype( instance, self.lookup_typeinfo('typing.Iterable')) item_type = iterable.args[0] if not isinstance(item_type, AnyType): # This relies on 'map_instance_to_supertype' returning 'Iterable[Any]' # in case there is no explicit base class. return item_type # Try also structural typing. iter_type = find_member('__iter__', instance, instance) if (iter_type and isinstance(iter_type, CallableType) and isinstance(iter_type.ret_type, Instance)): iterator = map_instance_to_supertype(iter_type.ret_type, self.lookup_typeinfo('typing.Iterator')) item_type = iterator.args[0] return item_type def function_type(self, func: FuncBase) -> FunctionLike: return function_type(func, self.named_type('builtins.function')) def find_isinstance_check(self, n: Expression) -> 'Tuple[TypeMap, TypeMap]': return find_isinstance_check(n, self.type_map) def push_type_map(self, type_map: 'TypeMap') -> None: if type_map is None: self.binder.unreachable() else: for expr, type in type_map.items(): self.binder.put(expr, type) # Data structure returned by find_isinstance_check representing # information learned from the truth or falsehood of a condition. The # dict maps nodes representing expressions like 'a[0].x' to their # refined types under the assumption that the condition has a # particular truth value. A value of None means that the condition can # never have that truth value. # NB: The keys of this dict are nodes in the original source program, # which are compared by reference equality--effectively, being *the # same* expression of the program, not just two identical expressions # (such as two references to the same variable). TODO: it would # probably be better to have the dict keyed by the nodes' literal_hash # field instead. TypeMap = Optional[Dict[Expression, Type]] # An object that represents either a precise type or a type with an upper bound; # it is important for correct type inference with isinstance. TypeRange = NamedTuple( 'TypeRange', [ ('item', Type), ('is_upper_bound', bool), # False => precise type ]) def conditional_type_map(expr: Expression, current_type: Optional[Type], proposed_type_ranges: Optional[List[TypeRange]], ) -> Tuple[TypeMap, TypeMap]: """Takes in an expression, the current type of the expression, and a proposed type of that expression. Returns a 2-tuple: The first element is a map from the expression to the proposed type, if the expression can be the proposed type. The second element is a map from the expression to the type it would hold if it was not the proposed type, if any. None means bot, {} means top""" if proposed_type_ranges: if len(proposed_type_ranges) == 1: proposed_type = proposed_type_ranges[0].item # Union with a single type breaks tests else: proposed_type = UnionType([type_range.item for type_range in proposed_type_ranges]) if current_type: if (not any(type_range.is_upper_bound for type_range in proposed_type_ranges) and is_proper_subtype(current_type, proposed_type)): # Expression is always of one of the types in proposed_type_ranges return {}, None elif not is_overlapping_types(current_type, proposed_type): # Expression is never of any type in proposed_type_ranges return None, {} else: # we can only restrict when the type is precise, not bounded proposed_precise_type = UnionType([type_range.item for type_range in proposed_type_ranges if not type_range.is_upper_bound]) remaining_type = restrict_subtype_away(current_type, proposed_precise_type) return {expr: proposed_type}, {expr: remaining_type} else: return {expr: proposed_type}, {} else: # An isinstance check, but we don't understand the type return {}, {} def partition_by_callable(type: Type) -> Tuple[List[Type], List[Type]]: """Takes in a type and partitions that type into callable subtypes and uncallable subtypes. Thus, given: `callables, uncallables = partition_by_callable(type)` If we assert `callable(type)` then `type` has type Union[*callables], and If we assert `not callable(type)` then `type` has type Union[*uncallables] Guaranteed to not return [], []""" if isinstance(type, FunctionLike) or isinstance(type, TypeType): return [type], [] if isinstance(type, AnyType): return [type], [type] if isinstance(type, UnionType): callables = [] uncallables = [] for subtype in type.relevant_items(): subcallables, subuncallables = partition_by_callable(subtype) callables.extend(subcallables) uncallables.extend(subuncallables) return callables, uncallables if isinstance(type, TypeVarType): return partition_by_callable(type.erase_to_union_or_bound()) if isinstance(type, Instance): method = type.type.get_method('__call__') if method and method.type: callables, uncallables = partition_by_callable(method.type) if len(callables) and not len(uncallables): # Only consider the type callable if its __call__ method is # definitely callable. return [type], [] return [], [type] return [], [type] def conditional_callable_type_map(expr: Expression, current_type: Optional[Type], ) -> Tuple[TypeMap, TypeMap]: """Takes in an expression and the current type of the expression. Returns a 2-tuple: The first element is a map from the expression to the restricted type if it were callable. The second element is a map from the expression to the type it would hold if it weren't callable.""" if not current_type: return {}, {} if isinstance(current_type, AnyType): return {}, {} callables, uncallables = partition_by_callable(current_type) if len(callables) and len(uncallables): callable_map = {expr: UnionType.make_union(callables)} if len(callables) else None uncallable_map = {expr: UnionType.make_union(uncallables)} if len(uncallables) else None return callable_map, uncallable_map elif len(callables): return {}, None return None, {} def is_true_literal(n: Expression) -> bool: return (refers_to_fullname(n, 'builtins.True') or isinstance(n, IntExpr) and n.value == 1) def is_false_literal(n: Expression) -> bool: return (refers_to_fullname(n, 'builtins.False') or isinstance(n, IntExpr) and n.value == 0) def is_literal_none(n: Expression) -> bool: return isinstance(n, NameExpr) and n.fullname == 'builtins.None' def is_optional(t: Type) -> bool: return isinstance(t, UnionType) and any(isinstance(e, NoneTyp) for e in t.items) def remove_optional(typ: Type) -> Type: if isinstance(typ, UnionType): return UnionType.make_union([t for t in typ.items if not isinstance(t, NoneTyp)]) else: return typ def builtin_item_type(tp: Type) -> Optional[Type]: """Get the item type of a builtin container. If 'tp' is not one of the built containers (these includes NamedTuple and TypedDict) or if the container is not parameterized (like List or List[Any]) return None. This function is used to narrow optional types in situations like this: x: Optional[int] if x in (1, 2, 3): x + 42 # OK Note: this is only OK for built-in containers, where we know the behavior of __contains__. """ if isinstance(tp, Instance): if tp.type.fullname() in ['builtins.list', 'builtins.tuple', 'builtins.dict', 'builtins.set', 'builtins.frozenset']: if not tp.args: # TODO: fix tuple in lib-stub/builtins.pyi (it should be generic). return None if not isinstance(tp.args[0], AnyType): return tp.args[0] elif isinstance(tp, TupleType) and all(not isinstance(it, AnyType) for it in tp.items): return UnionType.make_simplified_union(tp.items) # this type is not externally visible elif isinstance(tp, TypedDictType): # TypedDict always has non-optional string keys. if tp.fallback.type.fullname() == 'typing.Mapping': return tp.fallback.args[0] elif tp.fallback.type.bases[0].type.fullname() == 'typing.Mapping': return tp.fallback.type.bases[0].args[0] return None def and_conditional_maps(m1: TypeMap, m2: TypeMap) -> TypeMap: """Calculate what information we can learn from the truth of (e1 and e2) in terms of the information that we can learn from the truth of e1 and the truth of e2. """ if m1 is None or m2 is None: # One of the conditions can never be true. return None # Both conditions can be true; combine the information. Anything # we learn from either conditions's truth is valid. If the same # expression's type is refined by both conditions, we somewhat # arbitrarily give precedence to m2. (In the future, we could use # an intersection type.) result = m2.copy() m2_keys = set(literal_hash(n2) for n2 in m2) for n1 in m1: if literal_hash(n1) not in m2_keys: result[n1] = m1[n1] return result def or_conditional_maps(m1: TypeMap, m2: TypeMap) -> TypeMap: """Calculate what information we can learn from the truth of (e1 or e2) in terms of the information that we can learn from the truth of e1 and the truth of e2. """ if m1 is None: return m2 if m2 is None: return m1 # Both conditions can be true. Combine information about # expressions whose type is refined by both conditions. (We do not # learn anything about expressions whose type is refined by only # one condition.) result = {} for n1 in m1: for n2 in m2: if literal_hash(n1) == literal_hash(n2): result[n1] = UnionType.make_simplified_union([m1[n1], m2[n2]]) return result def convert_to_typetype(type_map: TypeMap) -> TypeMap: converted_type_map = {} # type: Dict[Expression, Type] if type_map is None: return None for expr, typ in type_map.items(): if not isinstance(typ, (UnionType, Instance)): # unknown type; error was likely reported earlier return {} converted_type_map[expr] = TypeType.make_normalized(typ) return converted_type_map def find_isinstance_check(node: Expression, type_map: Dict[Expression, Type], ) -> Tuple[TypeMap, TypeMap]: """Find any isinstance checks (within a chain of ands). Includes implicit and explicit checks for None and calls to callable. Return value is a map of variables to their types if the condition is true and a map of variables to their types if the condition is false. If either of the values in the tuple is None, then that particular branch can never occur. Guaranteed to not return None, None. (But may return {}, {}) """ if is_true_literal(node): return {}, None elif is_false_literal(node): return None, {} elif isinstance(node, CallExpr): if refers_to_fullname(node.callee, 'builtins.isinstance'): if len(node.args) != 2: # the error will be reported later return {}, {} expr = node.args[0] if literal(expr) == LITERAL_TYPE: vartype = type_map[expr] type = get_isinstance_type(node.args[1], type_map) return conditional_type_map(expr, vartype, type) elif refers_to_fullname(node.callee, 'builtins.issubclass'): expr = node.args[0] if literal(expr) == LITERAL_TYPE: vartype = type_map[expr] type = get_isinstance_type(node.args[1], type_map) if isinstance(vartype, UnionType): union_list = [] for t in vartype.items: if isinstance(t, TypeType): union_list.append(t.item) else: # this is an error that should be reported earlier # if we reach here, we refuse to do any type inference return {}, {} vartype = UnionType(union_list) elif isinstance(vartype, TypeType): vartype = vartype.item else: # any other object whose type we don't know precisely # for example, Any or Instance of type type return {}, {} # unknown type yes_map, no_map = conditional_type_map(expr, vartype, type) yes_map, no_map = map(convert_to_typetype, (yes_map, no_map)) return yes_map, no_map elif refers_to_fullname(node.callee, 'builtins.callable'): expr = node.args[0] if literal(expr) == LITERAL_TYPE: vartype = type_map[expr] return conditional_callable_type_map(expr, vartype) elif isinstance(node, ComparisonExpr) and experiments.STRICT_OPTIONAL: # Check for `x is None` and `x is not None`. is_not = node.operators == ['is not'] if any(is_literal_none(n) for n in node.operands) and (is_not or node.operators == ['is']): if_vars = {} # type: TypeMap else_vars = {} # type: TypeMap for expr in node.operands: if (literal(expr) == LITERAL_TYPE and not is_literal_none(expr) and expr in type_map): # This should only be true at most once: there should be # two elements in node.operands, and at least one of them # should represent a None. vartype = type_map[expr] none_typ = [TypeRange(NoneTyp(), is_upper_bound=False)] if_vars, else_vars = conditional_type_map(expr, vartype, none_typ) break if is_not: if_vars, else_vars = else_vars, if_vars return if_vars, else_vars # Check for `x == y` where x is of type Optional[T] and y is of type T # or a type that overlaps with T (or vice versa). elif node.operators == ['==']: first_type = type_map[node.operands[0]] second_type = type_map[node.operands[1]] if is_optional(first_type) != is_optional(second_type): if is_optional(first_type): optional_type, comp_type = first_type, second_type optional_expr = node.operands[0] else: optional_type, comp_type = second_type, first_type optional_expr = node.operands[1] if is_overlapping_types(optional_type, comp_type): return {optional_expr: remove_optional(optional_type)}, {} elif node.operators in [['in'], ['not in']]: expr = node.operands[0] left_type = type_map[expr] right_type = builtin_item_type(type_map[node.operands[1]]) right_ok = right_type and (not is_optional(right_type) and (not isinstance(right_type, Instance) or right_type.type.fullname() != 'builtins.object')) if (right_type and right_ok and is_optional(left_type) and literal(expr) == LITERAL_TYPE and not is_literal_none(expr) and is_overlapping_types(left_type, right_type)): if node.operators == ['in']: return {expr: remove_optional(left_type)}, {} if node.operators == ['not in']: return {}, {expr: remove_optional(left_type)} elif isinstance(node, RefExpr): # Restrict the type of the variable to True-ish/False-ish in the if and else branches # respectively vartype = type_map[node] if_type = true_only(vartype) else_type = false_only(vartype) ref = node # type: Expression if_map = {ref: if_type} if not isinstance(if_type, UninhabitedType) else None else_map = {ref: else_type} if not isinstance(else_type, UninhabitedType) else None return if_map, else_map elif isinstance(node, OpExpr) and node.op == 'and': left_if_vars, left_else_vars = find_isinstance_check(node.left, type_map) right_if_vars, right_else_vars = find_isinstance_check(node.right, type_map) # (e1 and e2) is true if both e1 and e2 are true, # and false if at least one of e1 and e2 is false. return (and_conditional_maps(left_if_vars, right_if_vars), or_conditional_maps(left_else_vars, right_else_vars)) elif isinstance(node, OpExpr) and node.op == 'or': left_if_vars, left_else_vars = find_isinstance_check(node.left, type_map) right_if_vars, right_else_vars = find_isinstance_check(node.right, type_map) # (e1 or e2) is true if at least one of e1 or e2 is true, # and false if both e1 and e2 are false. return (or_conditional_maps(left_if_vars, right_if_vars), and_conditional_maps(left_else_vars, right_else_vars)) elif isinstance(node, UnaryExpr) and node.op == 'not': left, right = find_isinstance_check(node.expr, type_map) return right, left # Not a supported isinstance check return {}, {} def flatten(t: Expression) -> List[Expression]: """Flatten a nested sequence of tuples/lists into one list of nodes.""" if isinstance(t, TupleExpr) or isinstance(t, ListExpr): return [b for a in t.items for b in flatten(a)] else: return [t] def flatten_types(t: Type) -> List[Type]: """Flatten a nested sequence of tuples into one list of nodes.""" if isinstance(t, TupleType): return [b for a in t.items for b in flatten_types(a)] else: return [t] def get_isinstance_type(expr: Expression, type_map: Dict[Expression, Type]) -> Optional[List[TypeRange]]: all_types = flatten_types(type_map[expr]) types = [] # type: List[TypeRange] for typ in all_types: if isinstance(typ, FunctionLike) and typ.is_type_obj(): # Type variables may be present -- erase them, which is the best # we can do (outside disallowing them here). typ = erase_typevars(typ.items()[0].ret_type) types.append(TypeRange(typ, is_upper_bound=False)) elif isinstance(typ, TypeType): # Type[A] means "any type that is a subtype of A" rather than "precisely type A" # we indicate this by setting is_upper_bound flag types.append(TypeRange(typ.item, is_upper_bound=True)) elif isinstance(typ, Instance) and typ.type.fullname() == 'builtins.type': object_type = Instance(typ.type.mro[-1], []) types.append(TypeRange(object_type, is_upper_bound=True)) elif isinstance(typ, AnyType): types.append(TypeRange(typ, is_upper_bound=False)) else: # we didn't see an actual type, but rather a variable whose value is unknown to us return None if not types: # this can happen if someone has empty tuple as 2nd argument to isinstance # strictly speaking, we should return UninhabitedType but for simplicity we will simply # refuse to do any type inference for now return None return types def expand_func(defn: FuncItem, map: Dict[TypeVarId, Type]) -> FuncItem: visitor = TypeTransformVisitor(map) ret = defn.accept(visitor) assert isinstance(ret, FuncItem) return ret class TypeTransformVisitor(TransformVisitor): def __init__(self, map: Dict[TypeVarId, Type]) -> None: super().__init__() self.map = map def type(self, type: Type) -> Type: return expand_type(type, self.map) def is_unsafe_overlapping_signatures(signature: Type, other: Type) -> bool: """Check if two signatures may be unsafely overlapping. Two signatures s and t are overlapping if both can be valid for the same statically typed values and the return types are incompatible. Assume calls are first checked against 'signature', then against 'other'. Thus if 'signature' is more general than 'other', there is no unsafe overlapping. TODO If argument types vary covariantly, the return type may vary covariantly as well. """ if isinstance(signature, CallableType): if isinstance(other, CallableType): # TODO varargs # TODO keyword args # TODO erasure # TODO allow to vary covariantly # Check if the argument counts are overlapping. min_args = max(signature.min_args, other.min_args) max_args = min(len(signature.arg_types), len(other.arg_types)) if min_args > max_args: # Argument counts are not overlapping. return False # Signatures are overlapping iff if they are overlapping for the # smallest common argument count. for i in range(min_args): t1 = signature.arg_types[i] t2 = other.arg_types[i] if not is_overlapping_types(t1, t2): return False # All arguments types for the smallest common argument count are # overlapping => the signature is overlapping. The overlapping is # safe if the return types are identical. if is_same_type(signature.ret_type, other.ret_type): return False # If the first signature has more general argument types, the # latter will never be called if is_more_general_arg_prefix(signature, other): return False # Special case: all args are subtypes, and returns are subtypes if (all(is_proper_subtype(s, o) for (s, o) in zip(signature.arg_types, other.arg_types)) and is_proper_subtype(signature.ret_type, other.ret_type)): return False return not is_more_precise_signature(signature, other) return True def is_more_general_arg_prefix(t: FunctionLike, s: FunctionLike) -> bool: """Does t have wider arguments than s?""" # TODO should an overload with additional items be allowed to be more # general than one with fewer items (or just one item)? # TODO check argument kinds and otherwise make more general if isinstance(t, CallableType): if isinstance(s, CallableType): t, s = unify_generic_callables(t, s) return all(is_proper_subtype(args, argt) for argt, args in zip(t.arg_types, s.arg_types)) elif isinstance(t, FunctionLike): if isinstance(s, FunctionLike): if len(t.items()) == len(s.items()): return all(is_same_arg_prefix(items, itemt) for items, itemt in zip(t.items(), s.items())) return False def unify_generic_callables(t: CallableType, s: CallableType) -> Tuple[CallableType, CallableType]: """Make type variables in generic callables the same if possible. Return updated callables. If we can't unify the type variables, return the unmodified arguments. """ # TODO: Use this elsewhere when comparing generic callables. if t.is_generic() and s.is_generic(): t_substitutions = {} s_substitutions = {} for tv1, tv2 in zip(t.variables, s.variables): # Are these something we can unify? if tv1.id != tv2.id and is_equivalent_type_var_def(tv1, tv2): newdef = TypeVarDef.new_unification_variable(tv2) t_substitutions[tv1.id] = TypeVarType(newdef) s_substitutions[tv2.id] = TypeVarType(newdef) return (cast(CallableType, expand_type(t, t_substitutions)), cast(CallableType, expand_type(s, s_substitutions))) return t, s def is_equivalent_type_var_def(tv1: TypeVarDef, tv2: TypeVarDef) -> bool: """Are type variable definitions equivalent? Ignore ids, locations in source file and names. """ return ( tv1.variance == tv2.variance and is_same_types(tv1.values, tv2.values) and ((tv1.upper_bound is None and tv2.upper_bound is None) or (tv1.upper_bound is not None and tv2.upper_bound is not None and is_same_type(tv1.upper_bound, tv2.upper_bound)))) def is_same_arg_prefix(t: CallableType, s: CallableType) -> bool: # TODO check argument kinds return all(is_same_type(argt, args) for argt, args in zip(t.arg_types, s.arg_types)) def is_more_precise_signature(t: CallableType, s: CallableType) -> bool: """Is t more precise than s? A signature t is more precise than s if all argument types and the return type of t are more precise than the corresponding types in s. Assume that the argument kinds and names are compatible, and that the argument counts are overlapping. """ # TODO generic function types # Only consider the common prefix of argument types. for argt, args in zip(t.arg_types, s.arg_types): if not is_more_precise(argt, args): return False return is_more_precise(t.ret_type, s.ret_type) def infer_operator_assignment_method(typ: Type, operator: str) -> Tuple[bool, str]: """Determine if operator assignment on given value type is in-place, and the method name. For example, if operator is '+', return (True, '__iadd__') or (False, '__add__') depending on which method is supported by the type. """ method = nodes.op_methods[operator] if isinstance(typ, Instance): if operator in nodes.ops_with_inplace_method: inplace_method = '__i' + method[2:] if typ.type.has_readable_member(inplace_method): return True, inplace_method return False, method def is_valid_inferred_type(typ: Type) -> bool: """Is an inferred type valid? Examples of invalid types include the None type or List[]. When not doing strict Optional checking, all types containing None are invalid. When doing strict Optional checking, only None and types that are incompletely defined (i.e. contain UninhabitedType) are invalid. """ if isinstance(typ, (NoneTyp, UninhabitedType)): # With strict Optional checking, we *may* eventually infer NoneTyp when # the initializer is None, but we only do that if we can't infer a # specific Optional type. This resolution happens in # leave_partial_types when we pop a partial types scope. return False return is_valid_inferred_type_component(typ) def is_valid_inferred_type_component(typ: Type) -> bool: """Is this part of a type a valid inferred type? In strict Optional mode this excludes bare None types, as otherwise every type containing None would be invalid. """ if is_same_type(typ, UninhabitedType()): return False elif isinstance(typ, Instance): for arg in typ.args: if not is_valid_inferred_type_component(arg): return False elif isinstance(typ, TupleType): for item in typ.items: if not is_valid_inferred_type_component(item): return False return True def is_node_static(node: Optional[Node]) -> Optional[bool]: """Find out if a node describes a static function method.""" if isinstance(node, FuncDef): return node.is_static if isinstance(node, Var): return node.is_staticmethod return None class Scope: # We keep two stacks combined, to maintain the relative order stack = None # type: List[Union[TypeInfo, FuncItem, MypyFile]] def __init__(self, module: MypyFile) -> None: self.stack = [module] def top_function(self) -> Optional[FuncItem]: for e in reversed(self.stack): if isinstance(e, FuncItem): return e return None def top_non_lambda_function(self) -> Optional[FuncItem]: for e in reversed(self.stack): if isinstance(e, FuncItem) and not isinstance(e, LambdaExpr): return e return None def active_class(self) -> Optional[TypeInfo]: if isinstance(self.stack[-1], TypeInfo): return self.stack[-1] return None def enclosing_class(self) -> Optional[TypeInfo]: top = self.top_function() assert top, "This method must be called from inside a function" index = self.stack.index(top) assert index, "Scope stack must always start with a module" enclosing = self.stack[index - 1] if isinstance(enclosing, TypeInfo): return enclosing return None def active_self_type(self) -> Optional[Union[Instance, TupleType]]: info = self.active_class() if info: return fill_typevars(info) return None @contextmanager def push_function(self, item: FuncItem) -> Iterator[None]: self.stack.append(item) yield self.stack.pop() @contextmanager def push_class(self, info: TypeInfo) -> Iterator[None]: self.stack.append(info) yield self.stack.pop() @contextmanager def nothing() -> Iterator[None]: yield def is_typed_callable(c: Optional[Type]) -> bool: if not c or not isinstance(c, CallableType): return False return not all(isinstance(t, AnyType) and t.type_of_any == TypeOfAny.unannotated for t in c.arg_types + [c.ret_type]) def is_untyped_decorator(typ: Optional[Type]) -> bool: if not typ or not isinstance(typ, CallableType): return True return typ.implicit def is_static(func: Union[FuncBase, Decorator]) -> bool: if isinstance(func, Decorator): return is_static(func.func) elif isinstance(func, OverloadedFuncDef): return any(is_static(item) for item in func.items) elif isinstance(func, FuncItem): return func.is_static return False mypy-0.560/mypy/checkexpr.py0000644€tŠÔÚ€2›s®0000043467613215007205022237 0ustar jukkaDROPBOX\Domain Users00000000000000"""Expression type checker. This file is conceptually part of TypeChecker.""" from collections import OrderedDict from typing import cast, Dict, Set, List, Tuple, Callable, Union, Optional, Iterable, Sequence, Any from mypy.errors import report_internal_error from mypy.typeanal import has_any_from_unimported_type, check_for_explicit_any, set_any_tvars from mypy.types import ( Type, AnyType, CallableType, Overloaded, NoneTyp, TypeVarDef, TupleType, TypedDictType, Instance, TypeVarType, ErasedType, UnionType, PartialType, DeletedType, UnboundType, UninhabitedType, TypeType, TypeOfAny, true_only, false_only, is_named_instance, function_type, callable_type, FunctionLike, get_typ_args, set_typ_args, StarType) from mypy.nodes import ( NameExpr, RefExpr, Var, FuncDef, OverloadedFuncDef, TypeInfo, CallExpr, MemberExpr, IntExpr, StrExpr, BytesExpr, UnicodeExpr, FloatExpr, OpExpr, UnaryExpr, IndexExpr, CastExpr, RevealTypeExpr, TypeApplication, ListExpr, TupleExpr, DictExpr, LambdaExpr, SuperExpr, SliceExpr, Context, Expression, ListComprehension, GeneratorExpr, SetExpr, MypyFile, Decorator, ConditionalExpr, ComparisonExpr, TempNode, SetComprehension, DictionaryComprehension, ComplexExpr, EllipsisExpr, StarExpr, AwaitExpr, YieldExpr, YieldFromExpr, TypedDictExpr, PromoteExpr, NewTypeExpr, NamedTupleExpr, TypeVarExpr, TypeAliasExpr, BackquoteExpr, EnumCallExpr, ARG_POS, ARG_NAMED, ARG_STAR, ARG_STAR2, MODULE_REF, TVAR, LITERAL_TYPE, ) from mypy.literals import literal from mypy import nodes import mypy.checker from mypy import types from mypy.sametypes import is_same_type from mypy.erasetype import replace_meta_vars from mypy.messages import MessageBuilder from mypy import messages from mypy.infer import infer_type_arguments, infer_function_type_arguments from mypy import join from mypy.meet import narrow_declared_type from mypy.maptype import map_instance_to_supertype from mypy.subtypes import is_subtype, is_equivalent, find_member, non_method_protocol_members from mypy import applytype from mypy import erasetype from mypy.checkmember import analyze_member_access, type_object_type, bind_self from mypy.constraints import get_actual_type from mypy.checkstrformat import StringFormatterChecker from mypy.expandtype import expand_type_by_instance, freshen_function_type_vars from mypy.util import split_module_names from mypy.typevars import fill_typevars from mypy.visitor import ExpressionVisitor from mypy.plugin import Plugin, MethodContext, MethodSigContext, FunctionContext from mypy.typeanal import make_optional_type from mypy import experiments # Type of callback user for checking individual function arguments. See # check_args() below for details. ArgChecker = Callable[[Type, Type, int, Type, int, int, CallableType, Context, MessageBuilder], None] def extract_refexpr_names(expr: RefExpr) -> Set[str]: """Recursively extracts all module references from a reference expression. Note that currently, the only two subclasses of RefExpr are NameExpr and MemberExpr.""" output = set() # type: Set[str] while expr.kind == MODULE_REF or expr.fullname is not None: if expr.kind == MODULE_REF and expr.fullname is not None: # If it's None, something's wrong (perhaps due to an # import cycle or a suppressed error). For now we just # skip it. output.add(expr.fullname) if isinstance(expr, NameExpr): is_suppressed_import = isinstance(expr.node, Var) and expr.node.is_suppressed_import if isinstance(expr.node, TypeInfo): # Reference to a class or a nested class output.update(split_module_names(expr.node.module_name)) elif expr.fullname is not None and '.' in expr.fullname and not is_suppressed_import: # Everything else (that is not a silenced import within a class) output.add(expr.fullname.rsplit('.', 1)[0]) break elif isinstance(expr, MemberExpr): if isinstance(expr.expr, RefExpr): expr = expr.expr else: break else: raise AssertionError("Unknown RefExpr subclass: {}".format(type(expr))) return output class Finished(Exception): """Raised if we can terminate overload argument check early (no match).""" class ExpressionChecker(ExpressionVisitor[Type]): """Expression type checker. This class works closely together with checker.TypeChecker. """ # Some services are provided by a TypeChecker instance. chk = None # type: mypy.checker.TypeChecker # This is shared with TypeChecker, but stored also here for convenience. msg = None # type: MessageBuilder # Type context for type inference type_context = None # type: List[Optional[Type]] strfrm_checker = None # type: StringFormatterChecker plugin = None # type: Plugin def __init__(self, chk: 'mypy.checker.TypeChecker', msg: MessageBuilder, plugin: Plugin) -> None: """Construct an expression type checker.""" self.chk = chk self.msg = msg self.plugin = plugin self.type_context = [None] self.strfrm_checker = StringFormatterChecker(self, self.chk, self.msg) def visit_name_expr(self, e: NameExpr) -> Type: """Type check a name expression. It can be of any kind: local, member or global. """ self.chk.module_refs.update(extract_refexpr_names(e)) result = self.analyze_ref_expr(e) return self.narrow_type_from_binder(e, result) def analyze_ref_expr(self, e: RefExpr, lvalue: bool = False) -> Type: result = None # type: Optional[Type] node = e.node if isinstance(node, Var): # Variable reference. result = self.analyze_var_ref(node, e) if isinstance(result, PartialType): if result.type is None: # 'None' partial type. It has a well-defined type. In an lvalue context # we want to preserve the knowledge of it being a partial type. if not lvalue: result = NoneTyp() else: partial_types = self.chk.find_partial_types(node) if partial_types is not None and not self.chk.current_node_deferred: context = partial_types[node] self.msg.fail(messages.NEED_ANNOTATION_FOR_VAR, context) result = AnyType(TypeOfAny.special_form) elif isinstance(node, FuncDef): # Reference to a global function. result = function_type(node, self.named_type('builtins.function')) elif isinstance(node, OverloadedFuncDef) and node.type is not None: # node.type is None when there are multiple definitions of a function # and it's decorated by somthing that is not typing.overload result = node.type elif isinstance(node, TypeInfo): # Reference to a type object. result = type_object_type(node, self.named_type) if isinstance(self.type_context[-1], TypeType): # This is the type in a Type[] expression, so substitute type # variables with Any. result = erasetype.erase_typevars(result) elif isinstance(node, MypyFile): # Reference to a module object. try: result = self.named_type('types.ModuleType') except KeyError: # In test cases might 'types' may not be available. # Fall back to a dummy 'object' type instead to # avoid a crash. result = self.named_type('builtins.object') elif isinstance(node, Decorator): result = self.analyze_var_ref(node.var, e) else: # Unknown reference; use any type implicitly to avoid # generating extra type errors. result = AnyType(TypeOfAny.from_error) assert result is not None return result def analyze_var_ref(self, var: Var, context: Context) -> Type: if var.type: return var.type else: if not var.is_ready and self.chk.in_checked_function(): self.chk.handle_cannot_determine_type(var.name(), context) # Implicit 'Any' type. return AnyType(TypeOfAny.special_form) def visit_call_expr(self, e: CallExpr, allow_none_return: bool = False) -> Type: """Type check a call expression.""" if e.analyzed: # It's really a special form that only looks like a call. return self.accept(e.analyzed, self.type_context[-1]) if isinstance(e.callee, NameExpr) and isinstance(e.callee.node, TypeInfo) and \ e.callee.node.typeddict_type is not None: # Use named fallback for better error messages. typeddict_type = e.callee.node.typeddict_type.copy_modified( fallback=Instance(e.callee.node, [])) return self.check_typeddict_call(typeddict_type, e.arg_kinds, e.arg_names, e.args, e) if (isinstance(e.callee, NameExpr) and e.callee.name in ('isinstance', 'issubclass') and len(e.args) == 2): for typ in mypy.checker.flatten(e.args[1]): if isinstance(typ, NameExpr): node = None try: node = self.chk.lookup_qualified(typ.name) except KeyError: # Undefined names should already be reported in semantic analysis. pass if ((isinstance(typ, IndexExpr) and isinstance(typ.analyzed, (TypeApplication, TypeAliasExpr))) # node.kind == TYPE_ALIAS only for aliases like It = Iterable[int]. or (isinstance(typ, NameExpr) and node and node.kind == nodes.TYPE_ALIAS)): self.msg.type_arguments_not_allowed(e) if isinstance(typ, RefExpr) and isinstance(typ.node, TypeInfo): if typ.node.typeddict_type: self.msg.fail(messages.CANNOT_ISINSTANCE_TYPEDDICT, e) elif typ.node.is_newtype: self.msg.fail(messages.CANNOT_ISINSTANCE_NEWTYPE, e) self.try_infer_partial_type(e) type_context = None if isinstance(e.callee, LambdaExpr): formal_to_actual = map_actuals_to_formals( e.arg_kinds, e.arg_names, e.callee.arg_kinds, e.callee.arg_names, lambda i: self.accept(e.args[i])) arg_types = [join.join_type_list([self.accept(e.args[j]) for j in formal_to_actual[i]]) for i in range(len(e.callee.arg_kinds))] type_context = CallableType(arg_types, e.callee.arg_kinds, e.callee.arg_names, ret_type=self.object_type(), fallback=self.named_type('builtins.function')) callee_type = self.accept(e.callee, type_context, always_allow_any=True) if (self.chk.options.disallow_untyped_calls and self.chk.in_checked_function() and isinstance(callee_type, CallableType) and callee_type.implicit): return self.msg.untyped_function_call(callee_type, e) # Figure out the full name of the callee for plugin lookup. object_type = None if not isinstance(e.callee, RefExpr): fullname = None else: fullname = e.callee.fullname if (fullname is None and isinstance(e.callee, MemberExpr) and isinstance(callee_type, FunctionLike)): # For method calls we include the defining class for the method # in the full name (example: 'typing.Mapping.get'). callee_expr_type = self.chk.type_map.get(e.callee.expr) info = None # TODO: Support fallbacks of other kinds of types as well? if isinstance(callee_expr_type, Instance): info = callee_expr_type.type elif isinstance(callee_expr_type, TypedDictType): info = callee_expr_type.fallback.type.get_containing_type_info(e.callee.name) if info: fullname = '{}.{}'.format(info.fullname(), e.callee.name) object_type = callee_expr_type # Apply plugin signature hook that may generate a better signature. signature_hook = self.plugin.get_method_signature_hook(fullname) if signature_hook: assert object_type is not None callee_type = self.apply_method_signature_hook( e, callee_type, object_type, signature_hook) ret_type = self.check_call_expr_with_callee_type(callee_type, e, fullname, object_type) if isinstance(e.callee, RefExpr) and len(e.args) == 2: if e.callee.fullname in ('builtins.isinstance', 'builtins.issubclass'): self.check_runtime_protocol_test(e) if e.callee.fullname == 'builtins.issubclass': self.check_protocol_issubclass(e) if isinstance(ret_type, UninhabitedType) and not ret_type.ambiguous: self.chk.binder.unreachable() if not allow_none_return and isinstance(ret_type, NoneTyp): self.chk.msg.does_not_return_value(callee_type, e) return AnyType(TypeOfAny.from_error) return ret_type def check_runtime_protocol_test(self, e: CallExpr) -> None: for expr in mypy.checker.flatten(e.args[1]): tp = self.chk.type_map[expr] if (isinstance(tp, CallableType) and tp.is_type_obj() and tp.type_object().is_protocol and not tp.type_object().runtime_protocol): self.chk.fail('Only @runtime protocols can be used with' ' instance and class checks', e) def check_protocol_issubclass(self, e: CallExpr) -> None: for expr in mypy.checker.flatten(e.args[1]): tp = self.chk.type_map[expr] if (isinstance(tp, CallableType) and tp.is_type_obj() and tp.type_object().is_protocol): attr_members = non_method_protocol_members(tp.type_object()) if attr_members: self.chk.msg.report_non_method_protocol(tp.type_object(), attr_members, e) def check_typeddict_call(self, callee: TypedDictType, arg_kinds: List[int], arg_names: Sequence[Optional[str]], args: List[Expression], context: Context) -> Type: if len(args) >= 1 and all([ak == ARG_NAMED for ak in arg_kinds]): # ex: Point(x=42, y=1337) assert all(arg_name is not None for arg_name in arg_names) item_names = cast(List[str], arg_names) item_args = args return self.check_typeddict_call_with_kwargs( callee, OrderedDict(zip(item_names, item_args)), context) if len(args) == 1 and arg_kinds[0] == ARG_POS: unique_arg = args[0] if isinstance(unique_arg, DictExpr): # ex: Point({'x': 42, 'y': 1337}) return self.check_typeddict_call_with_dict(callee, unique_arg, context) if isinstance(unique_arg, CallExpr) and isinstance(unique_arg.analyzed, DictExpr): # ex: Point(dict(x=42, y=1337)) return self.check_typeddict_call_with_dict(callee, unique_arg.analyzed, context) if len(args) == 0: # ex: EmptyDict() return self.check_typeddict_call_with_kwargs( callee, OrderedDict(), context) self.chk.fail(messages.INVALID_TYPEDDICT_ARGS, context) return AnyType(TypeOfAny.from_error) def check_typeddict_call_with_dict(self, callee: TypedDictType, kwargs: DictExpr, context: Context) -> Type: item_name_exprs = [item[0] for item in kwargs.items] item_args = [item[1] for item in kwargs.items] item_names = [] # List[str] for item_name_expr in item_name_exprs: if not isinstance(item_name_expr, StrExpr): self.chk.fail(messages.TYPEDDICT_KEY_MUST_BE_STRING_LITERAL, item_name_expr) return AnyType(TypeOfAny.from_error) item_names.append(item_name_expr.value) return self.check_typeddict_call_with_kwargs( callee, OrderedDict(zip(item_names, item_args)), context) def check_typeddict_call_with_kwargs(self, callee: TypedDictType, kwargs: 'OrderedDict[str, Expression]', context: Context) -> Type: if not (callee.required_keys <= set(kwargs.keys()) <= set(callee.items.keys())): expected_keys = [key for key in callee.items.keys() if key in callee.required_keys or key in kwargs.keys()] actual_keys = kwargs.keys() self.msg.unexpected_typeddict_keys( callee, expected_keys=expected_keys, actual_keys=list(actual_keys), context=context) return AnyType(TypeOfAny.from_error) for (item_name, item_expected_type) in callee.items.items(): if item_name in kwargs: item_value = kwargs[item_name] self.chk.check_simple_assignment( lvalue_type=item_expected_type, rvalue=item_value, context=item_value, msg=messages.INCOMPATIBLE_TYPES, lvalue_name='TypedDict item "{}"'.format(item_name), rvalue_name='expression') return callee # Types and methods that can be used to infer partial types. item_args = {'builtins.list': ['append'], 'builtins.set': ['add', 'discard'], } container_args = {'builtins.list': {'extend': ['builtins.list']}, 'builtins.dict': {'update': ['builtins.dict']}, 'builtins.set': {'update': ['builtins.set', 'builtins.list']}, } def try_infer_partial_type(self, e: CallExpr) -> None: if isinstance(e.callee, MemberExpr) and isinstance(e.callee.expr, RefExpr): var = cast(Var, e.callee.expr.node) partial_types = self.chk.find_partial_types(var) if partial_types is not None and not self.chk.current_node_deferred: partial_type = var.type if (partial_type is None or not isinstance(partial_type, PartialType) or partial_type.type is None): # A partial None type -> can't infer anything. return typename = partial_type.type.fullname() methodname = e.callee.name # Sometimes we can infer a full type for a partial List, Dict or Set type. # TODO: Don't infer argument expression twice. if (typename in self.item_args and methodname in self.item_args[typename] and e.arg_kinds == [ARG_POS]): item_type = self.accept(e.args[0]) full_item_type = UnionType.make_simplified_union( [item_type, partial_type.inner_types[0]]) if mypy.checker.is_valid_inferred_type(full_item_type): var.type = self.chk.named_generic_type(typename, [full_item_type]) del partial_types[var] elif (typename in self.container_args and methodname in self.container_args[typename] and e.arg_kinds == [ARG_POS]): arg_type = self.accept(e.args[0]) if isinstance(arg_type, Instance): arg_typename = arg_type.type.fullname() if arg_typename in self.container_args[typename][methodname]: full_item_types = [ UnionType.make_simplified_union([item_type, prev_type]) for item_type, prev_type in zip(arg_type.args, partial_type.inner_types) ] if all(mypy.checker.is_valid_inferred_type(item_type) for item_type in full_item_types): var.type = self.chk.named_generic_type(typename, list(full_item_types)) del partial_types[var] def apply_function_plugin(self, arg_types: List[Type], inferred_ret_type: Type, arg_kinds: List[int], formal_to_actual: List[List[int]], args: List[Expression], num_formals: int, fullname: str, object_type: Optional[Type], context: Context) -> Type: """Use special case logic to infer the return type of a specific named function/method. Caller must ensure that a plugin hook exists. There are two different cases: - If object_type is None, the caller must ensure that a function hook exists for fullname. - If object_type is not None, the caller must ensure that a method hook exists for fullname. Return the inferred return type. """ formal_arg_types = [[] for _ in range(num_formals)] # type: List[List[Type]] formal_arg_exprs = [[] for _ in range(num_formals)] # type: List[List[Expression]] for formal, actuals in enumerate(formal_to_actual): for actual in actuals: formal_arg_types[formal].append(arg_types[actual]) formal_arg_exprs[formal].append(args[actual]) if object_type is None: # Apply function plugin callback = self.plugin.get_function_hook(fullname) assert callback is not None # Assume that caller ensures this return callback( FunctionContext(formal_arg_types, inferred_ret_type, formal_arg_exprs, context, self.chk)) else: # Apply method plugin method_callback = self.plugin.get_method_hook(fullname) assert method_callback is not None # Assume that caller ensures this return method_callback( MethodContext(object_type, formal_arg_types, inferred_ret_type, formal_arg_exprs, context, self.chk)) def apply_method_signature_hook( self, e: CallExpr, callee: FunctionLike, object_type: Type, signature_hook: Callable[[MethodSigContext], CallableType]) -> FunctionLike: """Apply a plugin hook that may infer a more precise signature for a method.""" if isinstance(callee, CallableType): arg_kinds = e.arg_kinds arg_names = e.arg_names args = e.args num_formals = len(callee.arg_kinds) formal_to_actual = map_actuals_to_formals( arg_kinds, arg_names, callee.arg_kinds, callee.arg_names, lambda i: self.accept(args[i])) formal_arg_exprs = [[] for _ in range(num_formals)] # type: List[List[Expression]] for formal, actuals in enumerate(formal_to_actual): for actual in actuals: formal_arg_exprs[formal].append(args[actual]) return signature_hook( MethodSigContext(object_type, formal_arg_exprs, callee, e, self.chk)) else: assert isinstance(callee, Overloaded) items = [] for item in callee.items(): adjusted = self.apply_method_signature_hook(e, item, object_type, signature_hook) assert isinstance(adjusted, CallableType) items.append(adjusted) return Overloaded(items) def check_call_expr_with_callee_type(self, callee_type: Type, e: CallExpr, callable_name: Optional[str], object_type: Optional[Type]) -> Type: """Type check call expression. The given callee type overrides the type of the callee expression. """ return self.check_call(callee_type, e.args, e.arg_kinds, e, e.arg_names, callable_node=e.callee, callable_name=callable_name, object_type=object_type)[0] def check_call(self, callee: Type, args: List[Expression], arg_kinds: List[int], context: Context, arg_names: Optional[Sequence[Optional[str]]] = None, callable_node: Optional[Expression] = None, arg_messages: Optional[MessageBuilder] = None, callable_name: Optional[str] = None, object_type: Optional[Type] = None) -> Tuple[Type, Type]: """Type check a call. Also infer type arguments if the callee is a generic function. Return (result type, inferred callee type). Arguments: callee: type of the called value args: actual argument expressions arg_kinds: contains nodes.ARG_* constant for each argument in args describing whether the argument is positional, *arg, etc. arg_names: names of arguments (optional) callable_node: associate the inferred callable type to this node, if specified arg_messages: TODO callable_name: Fully-qualified name of the function/method to call, or None if unavaiable (examples: 'builtins.open', 'typing.Mapping.get') object_type: If callable_name refers to a method, the type of the object on which the method is being called """ arg_messages = arg_messages or self.msg if isinstance(callee, CallableType): if callable_name is None and callee.name: callable_name = callee.name if (isinstance(callable_node, RefExpr) and callable_node.fullname in ('enum.Enum', 'enum.IntEnum', 'enum.Flag', 'enum.IntFlag')): # An Enum() call that failed SemanticAnalyzerPass2.check_enum_call(). return callee.ret_type, callee if (callee.is_type_obj() and callee.type_object().is_abstract # Exceptions for Type[...] and classmethod first argument and not callee.from_type_type and not callee.is_classmethod_class and not callee.type_object().fallback_to_any): type = callee.type_object() self.msg.cannot_instantiate_abstract_class( callee.type_object().name(), type.abstract_attributes, context) elif (callee.is_type_obj() and callee.type_object().is_protocol # Exceptions for Type[...] and classmethod first argument and not callee.from_type_type and not callee.is_classmethod_class): self.chk.fail('Cannot instantiate protocol class "{}"' .format(callee.type_object().name()), context) formal_to_actual = map_actuals_to_formals( arg_kinds, arg_names, callee.arg_kinds, callee.arg_names, lambda i: self.accept(args[i])) if callee.is_generic(): callee = freshen_function_type_vars(callee) callee = self.infer_function_type_arguments_using_context( callee, context) callee = self.infer_function_type_arguments( callee, args, arg_kinds, formal_to_actual, context) arg_types = self.infer_arg_types_in_context2( callee, args, arg_kinds, formal_to_actual) self.check_argument_count(callee, arg_types, arg_kinds, arg_names, formal_to_actual, context, self.msg) self.check_argument_types(arg_types, arg_kinds, callee, formal_to_actual, context, messages=arg_messages) if (callee.is_type_obj() and (len(arg_types) == 1) and is_equivalent(callee.ret_type, self.named_type('builtins.type'))): callee = callee.copy_modified(ret_type=TypeType.make_normalized(arg_types[0])) if callable_node: # Store the inferred callable type. self.chk.store_type(callable_node, callee) if (callable_name and ((object_type is None and self.plugin.get_function_hook(callable_name)) or (object_type is not None and self.plugin.get_method_hook(callable_name)))): ret_type = self.apply_function_plugin( arg_types, callee.ret_type, arg_kinds, formal_to_actual, args, len(callee.arg_types), callable_name, object_type, context) callee = callee.copy_modified(ret_type=ret_type) return callee.ret_type, callee elif isinstance(callee, Overloaded): # Type check arguments in empty context. They will be checked again # later in a context derived from the signature; these types are # only used to pick a signature variant. self.msg.disable_errors() arg_types = self.infer_arg_types_in_context(None, args) self.msg.enable_errors() target = self.overload_call_target(arg_types, arg_kinds, arg_names, callee, context, messages=arg_messages) return self.check_call(target, args, arg_kinds, context, arg_names, arg_messages=arg_messages, callable_name=callable_name, object_type=object_type) elif isinstance(callee, AnyType) or not self.chk.in_checked_function(): self.infer_arg_types_in_context(None, args) if isinstance(callee, AnyType): return (AnyType(TypeOfAny.from_another_any, source_any=callee), AnyType(TypeOfAny.from_another_any, source_any=callee)) else: return AnyType(TypeOfAny.special_form), AnyType(TypeOfAny.special_form) elif isinstance(callee, UnionType): self.msg.disable_type_names += 1 results = [self.check_call(subtype, args, arg_kinds, context, arg_names, arg_messages=arg_messages) for subtype in callee.relevant_items()] self.msg.disable_type_names -= 1 return (UnionType.make_simplified_union([res[0] for res in results]), callee) elif isinstance(callee, Instance): call_function = analyze_member_access('__call__', callee, context, False, False, False, self.named_type, self.not_ready_callback, self.msg, original_type=callee, chk=self.chk) return self.check_call(call_function, args, arg_kinds, context, arg_names, callable_node, arg_messages) elif isinstance(callee, TypeVarType): return self.check_call(callee.upper_bound, args, arg_kinds, context, arg_names, callable_node, arg_messages) elif isinstance(callee, TypeType): # Pass the original Type[] as context since that's where errors should go. item = self.analyze_type_type_callee(callee.item, callee) return self.check_call(item, args, arg_kinds, context, arg_names, callable_node, arg_messages) else: return self.msg.not_callable(callee, context), AnyType(TypeOfAny.from_error) def analyze_type_type_callee(self, item: Type, context: Context) -> Type: """Analyze the callee X in X(...) where X is Type[item]. Return a Y that we can pass to check_call(Y, ...). """ if isinstance(item, AnyType): return AnyType(TypeOfAny.from_another_any, source_any=item) if isinstance(item, Instance): res = type_object_type(item.type, self.named_type) if isinstance(res, CallableType): res = res.copy_modified(from_type_type=True) return expand_type_by_instance(res, item) if isinstance(item, UnionType): return UnionType([self.analyze_type_type_callee(item, context) for item in item.relevant_items()], item.line) if isinstance(item, TypeVarType): # Pretend we're calling the typevar's upper bound, # i.e. its constructor (a poor approximation for reality, # but better than AnyType...), but replace the return type # with typevar. callee = self.analyze_type_type_callee(item.upper_bound, context) # type: Optional[Type] if isinstance(callee, CallableType): if callee.is_generic(): callee = None else: callee = callee.copy_modified(ret_type=item) elif isinstance(callee, Overloaded): if callee.items()[0].is_generic(): callee = None else: callee = Overloaded([c.copy_modified(ret_type=item) for c in callee.items()]) if callee: return callee self.msg.unsupported_type_type(item, context) return AnyType(TypeOfAny.from_error) def infer_arg_types_in_context(self, callee: Optional[CallableType], args: List[Expression]) -> List[Type]: """Infer argument expression types using a callable type as context. For example, if callee argument 2 has type List[int], infer the argument expression with List[int] type context. """ # TODO Always called with callee as None, i.e. empty context. res = [] # type: List[Type] fixed = len(args) if callee: fixed = min(fixed, callee.max_fixed_args()) ctx = None for i, arg in enumerate(args): if i < fixed: if callee and i < len(callee.arg_types): ctx = callee.arg_types[i] arg_type = self.accept(arg, ctx) else: if callee and callee.is_var_arg: arg_type = self.accept(arg, callee.arg_types[-1]) else: arg_type = self.accept(arg) if has_erased_component(arg_type): res.append(NoneTyp()) else: res.append(arg_type) return res def infer_arg_types_in_context2( self, callee: CallableType, args: List[Expression], arg_kinds: List[int], formal_to_actual: List[List[int]]) -> List[Type]: """Infer argument expression types using a callable type as context. For example, if callee argument 2 has type List[int], infer the argument expression with List[int] type context. Returns the inferred types of *actual arguments*. """ res = [None] * len(args) # type: List[Optional[Type]] for i, actuals in enumerate(formal_to_actual): for ai in actuals: if arg_kinds[ai] not in (nodes.ARG_STAR, nodes.ARG_STAR2): res[ai] = self.accept(args[ai], callee.arg_types[i]) # Fill in the rest of the argument types. for i, t in enumerate(res): if not t: res[i] = self.accept(args[i]) assert all(tp is not None for tp in res) return cast(List[Type], res) def infer_function_type_arguments_using_context( self, callable: CallableType, error_context: Context) -> CallableType: """Unify callable return type to type context to infer type vars. For example, if the return type is set[t] where 't' is a type variable of callable, and if the context is set[int], return callable modified by substituting 't' with 'int'. """ ctx = self.type_context[-1] if not ctx: return callable # The return type may have references to type metavariables that # we are inferring right now. We must consider them as indeterminate # and they are not potential results; thus we replace them with the # special ErasedType type. On the other hand, class type variables are # valid results. erased_ctx = replace_meta_vars(ctx, ErasedType()) ret_type = callable.ret_type if isinstance(ret_type, TypeVarType): if ret_type.values or (not isinstance(ctx, Instance) or not ctx.args): # The return type is a type variable. If it has values, we can't easily restrict # type inference to conform to the valid values. If it's unrestricted, we could # infer a too general type for the type variable if we use context, and this could # result in confusing and spurious type errors elsewhere. # # Give up and just use function arguments for type inference. As an exception, # if the context is a generic instance type, actually use it as context, as # this *seems* to usually be the reasonable thing to do. # # See also github issues #462 and #360. ret_type = NoneTyp() args = infer_type_arguments(callable.type_var_ids(), ret_type, erased_ctx) # Only substitute non-Uninhabited and non-erased types. new_args = [] # type: List[Optional[Type]] for arg in args: if has_uninhabited_component(arg) or has_erased_component(arg): new_args.append(None) else: new_args.append(arg) return self.apply_generic_arguments(callable, new_args, error_context) def infer_function_type_arguments(self, callee_type: CallableType, args: List[Expression], arg_kinds: List[int], formal_to_actual: List[List[int]], context: Context) -> CallableType: """Infer the type arguments for a generic callee type. Infer based on the types of arguments. Return a derived callable type that has the arguments applied. """ if self.chk.in_checked_function(): # Disable type errors during type inference. There may be errors # due to partial available context information at this time, but # these errors can be safely ignored as the arguments will be # inferred again later. self.msg.disable_errors() arg_types = self.infer_arg_types_in_context2( callee_type, args, arg_kinds, formal_to_actual) self.msg.enable_errors() arg_pass_nums = self.get_arg_infer_passes( callee_type.arg_types, formal_to_actual, len(args)) pass1_args = [] # type: List[Optional[Type]] for i, arg in enumerate(arg_types): if arg_pass_nums[i] > 1: pass1_args.append(None) else: pass1_args.append(arg) inferred_args = infer_function_type_arguments( callee_type, pass1_args, arg_kinds, formal_to_actual, strict=self.chk.in_checked_function()) if 2 in arg_pass_nums: # Second pass of type inference. (callee_type, inferred_args) = self.infer_function_type_arguments_pass2( callee_type, args, arg_kinds, formal_to_actual, inferred_args, context) if callee_type.special_sig == 'dict' and len(inferred_args) == 2 and ( ARG_NAMED in arg_kinds or ARG_STAR2 in arg_kinds): # HACK: Infer str key type for dict(...) with keyword args. The type system # can't represent this so we special case it, as this is a pretty common # thing. This doesn't quite work with all possible subclasses of dict # if they shuffle type variables around, as we assume that there is a 1-1 # correspondence with dict type variables. This is a marginal issue and # a little tricky to fix so it's left unfixed for now. first_arg = inferred_args[0] if isinstance(first_arg, (NoneTyp, UninhabitedType)): inferred_args[0] = self.named_type('builtins.str') elif not first_arg or not is_subtype(self.named_type('builtins.str'), first_arg): self.msg.fail(messages.KEYWORD_ARGUMENT_REQUIRES_STR_KEY_TYPE, context) else: # In dynamically typed functions use implicit 'Any' types for # type variables. inferred_args = [AnyType(TypeOfAny.unannotated)] * len(callee_type.variables) return self.apply_inferred_arguments(callee_type, inferred_args, context) def infer_function_type_arguments_pass2( self, callee_type: CallableType, args: List[Expression], arg_kinds: List[int], formal_to_actual: List[List[int]], old_inferred_args: Sequence[Optional[Type]], context: Context) -> Tuple[CallableType, List[Optional[Type]]]: """Perform second pass of generic function type argument inference. The second pass is needed for arguments with types such as Callable[[T], S], where both T and S are type variables, when the actual argument is a lambda with inferred types. The idea is to infer the type variable T in the first pass (based on the types of other arguments). This lets us infer the argument and return type of the lambda expression and thus also the type variable S in this second pass. Return (the callee with type vars applied, inferred actual arg types). """ # None or erased types in inferred types mean that there was not enough # information to infer the argument. Replace them with None values so # that they are not applied yet below. inferred_args = list(old_inferred_args) for i, arg in enumerate(inferred_args): if isinstance(arg, (NoneTyp, UninhabitedType)) or has_erased_component(arg): inferred_args[i] = None callee_type = self.apply_generic_arguments(callee_type, inferred_args, context) arg_types = self.infer_arg_types_in_context2( callee_type, args, arg_kinds, formal_to_actual) inferred_args = infer_function_type_arguments( callee_type, arg_types, arg_kinds, formal_to_actual) return callee_type, inferred_args def get_arg_infer_passes(self, arg_types: List[Type], formal_to_actual: List[List[int]], num_actuals: int) -> List[int]: """Return pass numbers for args for two-pass argument type inference. For each actual, the pass number is either 1 (first pass) or 2 (second pass). Two-pass argument type inference primarily lets us infer types of lambdas more effectively. """ res = [1] * num_actuals for i, arg in enumerate(arg_types): if arg.accept(ArgInferSecondPassQuery()): for j in formal_to_actual[i]: res[j] = 2 return res def apply_inferred_arguments(self, callee_type: CallableType, inferred_args: Sequence[Optional[Type]], context: Context) -> CallableType: """Apply inferred values of type arguments to a generic function. Inferred_args contains the values of function type arguments. """ # Report error if some of the variables could not be solved. In that # case assume that all variables have type Any to avoid extra # bogus error messages. for i, inferred_type in enumerate(inferred_args): if not inferred_type or has_erased_component(inferred_type): # Could not infer a non-trivial type for a type variable. self.msg.could_not_infer_type_arguments( callee_type, i + 1, context) inferred_args = [AnyType(TypeOfAny.from_error)] * len(inferred_args) # Apply the inferred types to the function type. In this case the # return type must be CallableType, since we give the right number of type # arguments. return self.apply_generic_arguments(callee_type, inferred_args, context) def check_argument_count(self, callee: CallableType, actual_types: List[Type], actual_kinds: List[int], actual_names: Optional[Sequence[Optional[str]]], formal_to_actual: List[List[int]], context: Optional[Context], messages: Optional[MessageBuilder]) -> bool: """Check that there is a value for all required arguments to a function. Also check that there are no duplicate values for arguments. Report found errors using 'messages' if it's not None. If 'messages' is given, 'context' must also be given. Return False if there were any errors. Otherwise return True """ # TODO(jukka): We could return as soon as we find an error if messages is None. formal_kinds = callee.arg_kinds # Collect list of all actual arguments matched to formal arguments. all_actuals = [] # type: List[int] for actuals in formal_to_actual: all_actuals.extend(actuals) is_unexpected_arg_error = False # Keep track of errors to avoid duplicate errors. ok = True # False if we've found any error. for i, kind in enumerate(actual_kinds): if i not in all_actuals and ( kind != nodes.ARG_STAR or not is_empty_tuple(actual_types[i])): # Extra actual: not matched by a formal argument. ok = False if kind != nodes.ARG_NAMED: if messages: assert context, "Internal error: messages given without context" messages.too_many_arguments(callee, context) else: if messages: assert context, "Internal error: messages given without context" assert actual_names, "Internal error: named kinds without names given" act_name = actual_names[i] assert act_name is not None messages.unexpected_keyword_argument( callee, act_name, context) is_unexpected_arg_error = True elif kind == nodes.ARG_STAR and ( nodes.ARG_STAR not in formal_kinds): actual_type = actual_types[i] if isinstance(actual_type, TupleType): if all_actuals.count(i) < len(actual_type.items): # Too many tuple items as some did not match. if messages: assert context, "Internal error: messages given without context" messages.too_many_arguments(callee, context) ok = False # *args can be applied even if the function takes a fixed # number of positional arguments. This may succeed at runtime. for i, kind in enumerate(formal_kinds): if kind == nodes.ARG_POS and (not formal_to_actual[i] and not is_unexpected_arg_error): # No actual for a mandatory positional formal. if messages: assert context, "Internal error: messages given without context" messages.too_few_arguments(callee, context, actual_names) ok = False elif kind == nodes.ARG_NAMED and (not formal_to_actual[i] and not is_unexpected_arg_error): # No actual for a mandatory named formal if messages: argname = callee.arg_names[i] assert argname is not None assert context, "Internal error: messages given without context" messages.missing_named_argument(callee, context, argname) ok = False elif kind in [nodes.ARG_POS, nodes.ARG_OPT, nodes.ARG_NAMED, nodes.ARG_NAMED_OPT] and is_duplicate_mapping( formal_to_actual[i], actual_kinds): if (self.chk.in_checked_function() or isinstance(actual_types[formal_to_actual[i][0]], TupleType)): if messages: assert context, "Internal error: messages given without context" messages.duplicate_argument_value(callee, i, context) ok = False elif (kind in (nodes.ARG_NAMED, nodes.ARG_NAMED_OPT) and formal_to_actual[i] and actual_kinds[formal_to_actual[i][0]] not in [nodes.ARG_NAMED, nodes.ARG_STAR2]): # Positional argument when expecting a keyword argument. if messages: assert context, "Internal error: messages given without context" messages.too_many_positional_arguments(callee, context) ok = False return ok def check_argument_types(self, arg_types: List[Type], arg_kinds: List[int], callee: CallableType, formal_to_actual: List[List[int]], context: Context, messages: Optional[MessageBuilder] = None, check_arg: Optional[ArgChecker] = None) -> None: """Check argument types against a callable type. Report errors if the argument types are not compatible. """ messages = messages or self.msg check_arg = check_arg or self.check_arg # Keep track of consumed tuple *arg items. tuple_counter = [0] for i, actuals in enumerate(formal_to_actual): for actual in actuals: arg_type = arg_types[actual] if arg_type is None: continue # Some kind of error was already reported. # Check that a *arg is valid as varargs. if (arg_kinds[actual] == nodes.ARG_STAR and not self.is_valid_var_arg(arg_type)): messages.invalid_var_arg(arg_type, context) if (arg_kinds[actual] == nodes.ARG_STAR2 and not self.is_valid_keyword_var_arg(arg_type)): is_mapping = is_subtype(arg_type, self.chk.named_type('typing.Mapping')) messages.invalid_keyword_var_arg(arg_type, is_mapping, context) # Get the type of an individual actual argument (for *args # and **args this is the item type, not the collection type). if (isinstance(arg_type, TupleType) and tuple_counter[0] >= len(arg_type.items) and arg_kinds[actual] == nodes.ARG_STAR): # The tuple is exhausted. Continue with further arguments. continue actual_type = get_actual_type(arg_type, arg_kinds[actual], tuple_counter) check_arg(actual_type, arg_type, arg_kinds[actual], callee.arg_types[i], actual + 1, i + 1, callee, context, messages) # There may be some remaining tuple varargs items that haven't # been checked yet. Handle them. tuplet = arg_types[actual] if (callee.arg_kinds[i] == nodes.ARG_STAR and arg_kinds[actual] == nodes.ARG_STAR and isinstance(tuplet, TupleType)): while tuple_counter[0] < len(tuplet.items): actual_type = get_actual_type(arg_type, arg_kinds[actual], tuple_counter) check_arg(actual_type, arg_type, arg_kinds[actual], callee.arg_types[i], actual + 1, i + 1, callee, context, messages) def check_arg(self, caller_type: Type, original_caller_type: Type, caller_kind: int, callee_type: Type, n: int, m: int, callee: CallableType, context: Context, messages: MessageBuilder) -> None: """Check the type of a single argument in a call.""" if isinstance(caller_type, DeletedType): messages.deleted_as_rvalue(caller_type, context) # Only non-abstract non-protocol class can be given where Type[...] is expected... elif (isinstance(caller_type, CallableType) and isinstance(callee_type, TypeType) and caller_type.is_type_obj() and (caller_type.type_object().is_abstract or caller_type.type_object().is_protocol) and isinstance(callee_type.item, Instance) and (callee_type.item.type.is_abstract or callee_type.item.type.is_protocol) and # ...except for classmethod first argument not caller_type.is_classmethod_class): self.msg.concrete_only_call(callee_type, context) elif not is_subtype(caller_type, callee_type): if self.chk.should_suppress_optional_error([caller_type, callee_type]): return messages.incompatible_argument(n, m, callee, original_caller_type, caller_kind, context) if (isinstance(original_caller_type, (Instance, TupleType, TypedDictType)) and isinstance(callee_type, Instance) and callee_type.type.is_protocol): self.msg.report_protocol_problems(original_caller_type, callee_type, context) if (isinstance(callee_type, CallableType) and isinstance(original_caller_type, Instance)): call = find_member('__call__', original_caller_type, original_caller_type) if call: self.msg.note_call(original_caller_type, call, context) def overload_call_target(self, arg_types: List[Type], arg_kinds: List[int], arg_names: Optional[Sequence[Optional[str]]], overload: Overloaded, context: Context, messages: Optional[MessageBuilder] = None) -> Type: """Infer the correct overload item to call with given argument types. The return value may be CallableType or AnyType (if an unique item could not be determined). """ messages = messages or self.msg # TODO: For overlapping signatures we should try to get a more precise # result than 'Any'. match = [] # type: List[CallableType] best_match = 0 for typ in overload.items(): similarity = self.erased_signature_similarity(arg_types, arg_kinds, arg_names, typ, context=context) if similarity > 0 and similarity >= best_match: if (match and not is_same_type(match[-1].ret_type, typ.ret_type) and (not mypy.checker.is_more_precise_signature(match[-1], typ) or (any(isinstance(arg, AnyType) for arg in arg_types) and any_arg_causes_overload_ambiguity( match + [typ], arg_types, arg_kinds, arg_names)))): # Ambiguous return type. Either the function overload is # overlapping (which we don't handle very well here) or the # caller has provided some Any argument types; in either # case we'll fall back to Any. It's okay to use Any types # in calls. # # Overlapping overload items are generally fine if the # overlapping is only possible when there is multiple # inheritance, as this is rare. See docstring of # mypy.meet.is_overlapping_types for more about this. # # Note that there is no ambiguity if the items are # covariant in both argument types and return types with # respect to type precision. We'll pick the best/closest # match. # # TODO: Consider returning a union type instead if the # overlapping is NOT due to Any types? return AnyType(TypeOfAny.special_form) else: match.append(typ) best_match = max(best_match, similarity) if not match: if not self.chk.should_suppress_optional_error(arg_types): messages.no_variant_matches_arguments(overload, arg_types, context) return AnyType(TypeOfAny.from_error) else: if len(match) == 1: return match[0] else: # More than one signature matches. Pick the first *non-erased* # matching signature, or default to the first one if none # match. for m in match: if self.match_signature_types(arg_types, arg_kinds, arg_names, m, context=context): return m return match[0] def erased_signature_similarity(self, arg_types: List[Type], arg_kinds: List[int], arg_names: Optional[Sequence[Optional[str]]], callee: CallableType, context: Context) -> int: """Determine whether arguments could match the signature at runtime. Return similarity level (0 = no match, 1 = can match, 2 = non-promotion match). See overload_arg_similarity for a discussion of similarity levels. """ formal_to_actual = map_actuals_to_formals(arg_kinds, arg_names, callee.arg_kinds, callee.arg_names, lambda i: arg_types[i]) if not self.check_argument_count(callee, arg_types, arg_kinds, arg_names, formal_to_actual, None, None): # Too few or many arguments -> no match. return 0 similarity = 2 def check_arg(caller_type: Type, original_caller_type: Type, caller_kind: int, callee_type: Type, n: int, m: int, callee: CallableType, context: Context, messages: MessageBuilder) -> None: nonlocal similarity similarity = min(similarity, overload_arg_similarity(caller_type, callee_type)) if similarity == 0: # No match -- exit early since none of the remaining work can change # the result. raise Finished try: self.check_argument_types(arg_types, arg_kinds, callee, formal_to_actual, context=context, check_arg=check_arg) except Finished: pass return similarity def match_signature_types(self, arg_types: List[Type], arg_kinds: List[int], arg_names: Optional[Sequence[Optional[str]]], callee: CallableType, context: Context) -> bool: """Determine whether arguments types match the signature. Assume that argument counts are compatible. Return True if arguments match. """ formal_to_actual = map_actuals_to_formals(arg_kinds, arg_names, callee.arg_kinds, callee.arg_names, lambda i: arg_types[i]) ok = True def check_arg(caller_type: Type, original_caller_type: Type, caller_kind: int, callee_type: Type, n: int, m: int, callee: CallableType, context: Context, messages: MessageBuilder) -> None: nonlocal ok if not is_subtype(caller_type, callee_type): ok = False self.check_argument_types(arg_types, arg_kinds, callee, formal_to_actual, context=context, check_arg=check_arg) return ok def apply_generic_arguments(self, callable: CallableType, types: Sequence[Optional[Type]], context: Context) -> CallableType: """Simple wrapper around mypy.applytype.apply_generic_arguments.""" return applytype.apply_generic_arguments(callable, types, self.msg, context) def visit_member_expr(self, e: MemberExpr) -> Type: """Visit member expression (of form e.id).""" self.chk.module_refs.update(extract_refexpr_names(e)) result = self.analyze_ordinary_member_access(e, False) return self.narrow_type_from_binder(e, result) def analyze_ordinary_member_access(self, e: MemberExpr, is_lvalue: bool) -> Type: """Analyse member expression or member lvalue.""" if e.kind is not None: # This is a reference to a module attribute. return self.analyze_ref_expr(e) else: # This is a reference to a non-module attribute. original_type = self.accept(e.expr) member_type = analyze_member_access( e.name, original_type, e, is_lvalue, False, False, self.named_type, self.not_ready_callback, self.msg, original_type=original_type, chk=self.chk) if is_lvalue: return member_type else: return self.analyze_descriptor_access(original_type, member_type, e) def analyze_descriptor_access(self, instance_type: Type, descriptor_type: Type, context: Context) -> Type: """Type check descriptor access. Arguments: instance_type: The type of the instance on which the descriptor attribute is being accessed (the type of ``a`` in ``a.f`` when ``f`` is a descriptor). descriptor_type: The type of the descriptor attribute being accessed (the type of ``f`` in ``a.f`` when ``f`` is a descriptor). context: The node defining the context of this inference. Return: The return type of the appropriate ``__get__`` overload for the descriptor. """ if not isinstance(descriptor_type, Instance): return descriptor_type if not descriptor_type.type.has_readable_member('__get__'): return descriptor_type dunder_get = descriptor_type.type.get_method('__get__') if dunder_get is None: self.msg.fail("{}.__get__ is not callable".format(descriptor_type), context) return AnyType(TypeOfAny.from_error) function = function_type(dunder_get, self.named_type('builtins.function')) bound_method = bind_self(function, descriptor_type) typ = map_instance_to_supertype(descriptor_type, dunder_get.info) dunder_get_type = expand_type_by_instance(bound_method, typ) if isinstance(instance_type, FunctionLike) and instance_type.is_type_obj(): owner_type = instance_type.items()[0].ret_type instance_type = NoneTyp() elif isinstance(instance_type, TypeType): owner_type = instance_type.item instance_type = NoneTyp() else: owner_type = instance_type _, inferred_dunder_get_type = self.check_call( dunder_get_type, [TempNode(instance_type), TempNode(TypeType.make_normalized(owner_type))], [nodes.ARG_POS, nodes.ARG_POS], context) if isinstance(inferred_dunder_get_type, AnyType): # check_call failed, and will have reported an error return inferred_dunder_get_type if not isinstance(inferred_dunder_get_type, CallableType): self.msg.fail("{}.__get__ is not callable".format(descriptor_type), context) return AnyType(TypeOfAny.from_error) return inferred_dunder_get_type.ret_type def analyze_external_member_access(self, member: str, base_type: Type, context: Context) -> Type: """Analyse member access that is external, i.e. it cannot refer to private definitions. Return the result type. """ # TODO remove; no private definitions in mypy return analyze_member_access(member, base_type, context, False, False, False, self.named_type, self.not_ready_callback, self.msg, original_type=base_type, chk=self.chk) def visit_int_expr(self, e: IntExpr) -> Type: """Type check an integer literal (trivial).""" return self.named_type('builtins.int') def visit_str_expr(self, e: StrExpr) -> Type: """Type check a string literal (trivial).""" return self.named_type('builtins.str') def visit_bytes_expr(self, e: BytesExpr) -> Type: """Type check a bytes literal (trivial).""" return self.named_type('builtins.bytes') def visit_unicode_expr(self, e: UnicodeExpr) -> Type: """Type check a unicode literal (trivial).""" return self.named_type('builtins.unicode') def visit_float_expr(self, e: FloatExpr) -> Type: """Type check a float literal (trivial).""" return self.named_type('builtins.float') def visit_complex_expr(self, e: ComplexExpr) -> Type: """Type check a complex literal.""" return self.named_type('builtins.complex') def visit_ellipsis(self, e: EllipsisExpr) -> Type: """Type check '...'.""" if self.chk.options.python_version[0] >= 3: return self.named_type('builtins.ellipsis') else: # '...' is not valid in normal Python 2 code, but it can # be used in stubs. The parser makes sure that we only # get this far if we are in a stub, and we can safely # return 'object' as ellipsis is special cased elsewhere. # The builtins.ellipsis type does not exist in Python 2. return self.named_type('builtins.object') def visit_op_expr(self, e: OpExpr) -> Type: """Type check a binary operator expression.""" if e.op == 'and' or e.op == 'or': return self.check_boolean_op(e, e) if e.op == '*' and isinstance(e.left, ListExpr): # Expressions of form [...] * e get special type inference. return self.check_list_multiply(e) if e.op == '%': pyversion = self.chk.options.python_version if pyversion[0] == 3: if isinstance(e.left, BytesExpr) and pyversion[1] >= 5: return self.strfrm_checker.check_str_interpolation(e.left, e.right) if isinstance(e.left, StrExpr): return self.strfrm_checker.check_str_interpolation(e.left, e.right) elif pyversion[0] <= 2: if isinstance(e.left, (StrExpr, BytesExpr, UnicodeExpr)): return self.strfrm_checker.check_str_interpolation(e.left, e.right) left_type = self.accept(e.left) if e.op in nodes.op_methods: method = self.get_operator_method(e.op) result, method_type = self.check_op(method, left_type, e.right, e, allow_reverse=True) e.method_type = method_type return result else: raise RuntimeError('Unknown operator {}'.format(e.op)) def visit_comparison_expr(self, e: ComparisonExpr) -> Type: """Type check a comparison expression. Comparison expressions are type checked consecutive-pair-wise That is, 'a < b > c == d' is check as 'a < b and b > c and c == d' """ result = None # Check each consecutive operand pair and their operator for left, right, operator in zip(e.operands, e.operands[1:], e.operators): left_type = self.accept(left) method_type = None # type: Optional[mypy.types.Type] if operator == 'in' or operator == 'not in': right_type = self.accept(right) # always validate the right operand # Keep track of whether we get type check errors (these won't be reported, they # are just to verify whether something is valid typing wise). local_errors = self.msg.copy() local_errors.disable_count = 0 sub_result, method_type = self.check_op_local('__contains__', right_type, left, e, local_errors) if isinstance(right_type, PartialType): # We don't really know if this is an error or not, so just shut up. pass elif (local_errors.is_errors() and # is_valid_var_arg is True for any Iterable self.is_valid_var_arg(right_type)): itertype = self.chk.analyze_iterable_item_type(right) method_type = CallableType( [left_type], [nodes.ARG_POS], [None], self.bool_type(), self.named_type('builtins.function')) sub_result = self.bool_type() if not is_subtype(left_type, itertype): self.msg.unsupported_operand_types('in', left_type, right_type, e) else: self.msg.add_errors(local_errors) if operator == 'not in': sub_result = self.bool_type() elif operator in nodes.op_methods: method = self.get_operator_method(operator) sub_result, method_type = self.check_op(method, left_type, right, e, allow_reverse=True) elif operator == 'is' or operator == 'is not': self.accept(right) # validate the right operand sub_result = self.bool_type() method_type = None else: raise RuntimeError('Unknown comparison operator {}'.format(operator)) e.method_types.append(method_type) # Determine type of boolean-and of result and sub_result if result is None: result = sub_result else: result = join.join_types(result, sub_result) assert result is not None return result def get_operator_method(self, op: str) -> str: if op == '/' and self.chk.options.python_version[0] == 2: # TODO also check for "from __future__ import division" return '__div__' else: return nodes.op_methods[op] def _check_op_for_errors(self, method: str, base_type: Type, arg: Expression, context: Context ) -> Tuple[Tuple[Type, Type], MessageBuilder]: """Type check a binary operation which maps to a method call. Return ((result type, inferred operator method type), error message). """ local_errors = self.msg.copy() local_errors.disable_count = 0 result = self.check_op_local(method, base_type, arg, context, local_errors) return result, local_errors def check_op_local(self, method: str, base_type: Type, arg: Expression, context: Context, local_errors: MessageBuilder) -> Tuple[Type, Type]: """Type check a binary operation which maps to a method call. Return tuple (result type, inferred operator method type). """ method_type = analyze_member_access(method, base_type, context, False, False, True, self.named_type, self.not_ready_callback, local_errors, original_type=base_type, chk=self.chk) callable_name = None object_type = None if isinstance(base_type, Instance): # TODO: Find out in which class the method was defined originally? # TODO: Support non-Instance types. callable_name = '{}.{}'.format(base_type.type.fullname(), method) object_type = base_type return self.check_call(method_type, [arg], [nodes.ARG_POS], context, arg_messages=local_errors, callable_name=callable_name, object_type=object_type) def check_op(self, method: str, base_type: Type, arg: Expression, context: Context, allow_reverse: bool = False) -> Tuple[Type, Type]: """Type check a binary operation which maps to a method call. Return tuple (result type, inferred operator method type). """ # Use a local error storage for errors related to invalid argument # type (but NOT other errors). This error may need to be suppressed # for operators which support __rX methods. local_errors = self.msg.copy() local_errors.disable_count = 0 if not allow_reverse or self.has_member(base_type, method): result = self.check_op_local(method, base_type, arg, context, local_errors) if allow_reverse: arg_type = self.chk.type_map[arg] if isinstance(arg_type, AnyType): # If the right operand has type Any, we can't make any # conjectures about the type of the result, since the # operand could have a __r method that returns anything. any_type = AnyType(TypeOfAny.from_another_any, source_any=arg_type) result = any_type, result[1] success = not local_errors.is_errors() else: error_any = AnyType(TypeOfAny.from_error) result = error_any, error_any success = False if success or not allow_reverse or isinstance(base_type, AnyType): # We were able to call the normal variant of the operator method, # or there was some problem not related to argument type # validity, or the operator has no __rX method. In any case, we # don't need to consider the __rX method. self.msg.add_errors(local_errors) return result else: # Calling the operator method was unsuccessful. Try the __rX # method of the other operand instead. rmethod = self.get_reverse_op_method(method) arg_type = self.accept(arg) base_arg_node = TempNode(base_type) # In order to be consistent with showing an error about the lhs not matching if neither # the lhs nor the rhs have a compatible signature, we keep track of the first error # message generated when considering __rX methods and __cmp__ methods for Python 2. first_error = None # type: Optional[Tuple[Tuple[Type, Type], MessageBuilder]] if self.has_member(arg_type, rmethod): result, local_errors = self._check_op_for_errors(rmethod, arg_type, base_arg_node, context) if not local_errors.is_errors(): return result first_error = first_error or (result, local_errors) # If we've failed to find an __rX method and we're checking Python 2, check to see if # there is a __cmp__ method on the lhs or on the rhs. if (self.chk.options.python_version[0] == 2 and method in nodes.ops_falling_back_to_cmp): cmp_method = nodes.comparison_fallback_method if self.has_member(base_type, cmp_method): # First check the if the lhs has a __cmp__ method that works result, local_errors = self._check_op_for_errors(cmp_method, base_type, arg, context) if not local_errors.is_errors(): return result first_error = first_error or (result, local_errors) if self.has_member(arg_type, cmp_method): # Failed to find a __cmp__ method on the lhs, check if # the rhs as a __cmp__ method that can operate on lhs result, local_errors = self._check_op_for_errors(cmp_method, arg_type, base_arg_node, context) if not local_errors.is_errors(): return result first_error = first_error or (result, local_errors) if first_error: # We found either a __rX method, a __cmp__ method on the base_type, or a __cmp__ # method on the rhs and failed match. Return the error for the first of these to # fail. self.msg.add_errors(first_error[1]) return first_error[0] else: # No __rX method or __cmp__. Do deferred type checking to # produce error message that we may have missed previously. # TODO Fix type checking an expression more than once. return self.check_op_local(method, base_type, arg, context, self.msg) def get_reverse_op_method(self, method: str) -> str: if method == '__div__' and self.chk.options.python_version[0] == 2: return '__rdiv__' else: return nodes.reverse_op_methods[method] def check_boolean_op(self, e: OpExpr, context: Context) -> Type: """Type check a boolean operation ('and' or 'or').""" # A boolean operation can evaluate to either of the operands. # We use the current type context to guide the type inference of of # the left operand. We also use the left operand type to guide the type # inference of the right operand so that expressions such as # '[1] or []' are inferred correctly. ctx = self.type_context[-1] left_type = self.accept(e.left, ctx) assert e.op in ('and', 'or') # Checked by visit_op_expr if e.op == 'and': right_map, left_map = self.chk.find_isinstance_check(e.left) restricted_left_type = false_only(left_type) result_is_left = not left_type.can_be_true elif e.op == 'or': left_map, right_map = self.chk.find_isinstance_check(e.left) restricted_left_type = true_only(left_type) result_is_left = not left_type.can_be_false if e.right_unreachable: right_map = None elif e.right_always: left_map = None # If right_map is None then we know mypy considers the right branch # to be unreachable and therefore any errors found in the right branch # should be suppressed. if right_map is None: self.msg.disable_errors() try: right_type = self.analyze_cond_branch(right_map, e.right, left_type) finally: if right_map is None: self.msg.enable_errors() if right_map is None: # The boolean expression is statically known to be the left value assert left_map is not None # find_isinstance_check guarantees this return left_type if left_map is None: # The boolean expression is statically known to be the right value assert right_map is not None # find_isinstance_check guarantees this return right_type if isinstance(restricted_left_type, UninhabitedType): # The left operand can never be the result return right_type elif result_is_left: # The left operand is always the result return left_type else: return UnionType.make_simplified_union([restricted_left_type, right_type]) def check_list_multiply(self, e: OpExpr) -> Type: """Type check an expression of form '[...] * e'. Type inference is special-cased for this common construct. """ right_type = self.accept(e.right) if is_subtype(right_type, self.named_type('builtins.int')): # Special case: [...] * . Use the type context of the # OpExpr, since the multiplication does not affect the type. left_type = self.accept(e.left, type_context=self.type_context[-1]) else: left_type = self.accept(e.left) result, method_type = self.check_op('__mul__', left_type, e.right, e) e.method_type = method_type return result def visit_unary_expr(self, e: UnaryExpr) -> Type: """Type check an unary operation ('not', '-', '+' or '~').""" operand_type = self.accept(e.expr) op = e.op if op == 'not': result = self.bool_type() # type: Type else: method = nodes.unary_op_methods[op] method_type = self.analyze_external_member_access(method, operand_type, e) result, method_type = self.check_call(method_type, [], [], e) e.method_type = method_type return result def visit_index_expr(self, e: IndexExpr) -> Type: """Type check an index expression (base[index]). It may also represent type application. """ result = self.visit_index_expr_helper(e) return self.narrow_type_from_binder(e, result) def visit_index_expr_helper(self, e: IndexExpr) -> Type: if e.analyzed: # It's actually a type application. return self.accept(e.analyzed) left_type = self.accept(e.base) if isinstance(left_type, TupleType) and self.chk.in_checked_function(): # Special case for tuples. They return a more specific type when # indexed by an integer literal. index = e.index if isinstance(index, SliceExpr): return self.visit_tuple_slice_helper(left_type, index) n = self._get_value(index) if n is not None: if n < 0: n += len(left_type.items) if n >= 0 and n < len(left_type.items): return left_type.items[n] else: self.chk.fail(messages.TUPLE_INDEX_OUT_OF_RANGE, e) return AnyType(TypeOfAny.from_error) else: return self.nonliteral_tuple_index_helper(left_type, index) elif isinstance(left_type, TypedDictType): return self.visit_typeddict_index_expr(left_type, e.index) elif (isinstance(left_type, CallableType) and left_type.is_type_obj() and left_type.type_object().is_enum): return self.visit_enum_index_expr(left_type.type_object(), e.index, e) else: result, method_type = self.check_op('__getitem__', left_type, e.index, e) e.method_type = method_type return result def visit_tuple_slice_helper(self, left_type: TupleType, slic: SliceExpr) -> Type: begin = None end = None stride = None if slic.begin_index: begin = self._get_value(slic.begin_index) if begin is None: return self.nonliteral_tuple_index_helper(left_type, slic) if slic.end_index: end = self._get_value(slic.end_index) if end is None: return self.nonliteral_tuple_index_helper(left_type, slic) if slic.stride: stride = self._get_value(slic.stride) if stride is None: return self.nonliteral_tuple_index_helper(left_type, slic) return left_type.slice(begin, stride, end) def nonliteral_tuple_index_helper(self, left_type: TupleType, index: Expression) -> Type: index_type = self.accept(index) expected_type = UnionType.make_union([self.named_type('builtins.int'), self.named_type('builtins.slice')]) if not self.chk.check_subtype(index_type, expected_type, index, messages.INVALID_TUPLE_INDEX_TYPE, 'actual type', 'expected type'): return AnyType(TypeOfAny.from_error) else: return UnionType.make_simplified_union(left_type.items) def _get_value(self, index: Expression) -> Optional[int]: if isinstance(index, IntExpr): return index.value elif isinstance(index, UnaryExpr): if index.op == '-': operand = index.expr if isinstance(operand, IntExpr): return -1 * operand.value return None def visit_typeddict_index_expr(self, td_type: TypedDictType, index: Expression) -> Type: if not isinstance(index, (StrExpr, UnicodeExpr)): self.msg.typeddict_key_must_be_string_literal(td_type, index) return AnyType(TypeOfAny.from_error) item_name = index.value item_type = td_type.items.get(item_name) if item_type is None: self.msg.typeddict_key_not_found(td_type, item_name, index) return AnyType(TypeOfAny.from_error) return item_type def visit_enum_index_expr(self, enum_type: TypeInfo, index: Expression, context: Context) -> Type: string_type = self.named_type('builtins.str') # type: Type if self.chk.options.python_version[0] < 3: string_type = UnionType.make_union([string_type, self.named_type('builtins.unicode')]) self.chk.check_subtype(self.accept(index), string_type, context, "Enum index should be a string", "actual index type") return Instance(enum_type, []) def visit_cast_expr(self, expr: CastExpr) -> Type: """Type check a cast expression.""" source_type = self.accept(expr.expr, type_context=AnyType(TypeOfAny.special_form), allow_none_return=True, always_allow_any=True) target_type = expr.type options = self.chk.options if options.warn_redundant_casts and is_same_type(source_type, target_type): self.msg.redundant_cast(target_type, expr) if options.disallow_any_unimported and has_any_from_unimported_type(target_type): self.msg.unimported_type_becomes_any("Target type of cast", target_type, expr) check_for_explicit_any(target_type, self.chk.options, self.chk.is_typeshed_stub, self.msg, context=expr) return target_type def visit_reveal_type_expr(self, expr: RevealTypeExpr) -> Type: """Type check a reveal_type expression.""" revealed_type = self.accept(expr.expr, type_context=self.type_context[-1]) if not self.chk.current_node_deferred: self.msg.reveal_type(revealed_type, expr) if not self.chk.in_checked_function(): self.msg.note("'reveal_type' always outputs 'Any' in unchecked functions", expr) return revealed_type def visit_type_application(self, tapp: TypeApplication) -> Type: """Type check a type application (expr[type, ...]).""" tp = self.accept(tapp.expr) if isinstance(tp, CallableType): if not tp.is_type_obj(): self.chk.fail(messages.ONLY_CLASS_APPLICATION, tapp) if len(tp.variables) != len(tapp.types): self.msg.incompatible_type_application(len(tp.variables), len(tapp.types), tapp) return AnyType(TypeOfAny.from_error) return self.apply_generic_arguments(tp, tapp.types, tapp) elif isinstance(tp, Overloaded): if not tp.is_type_obj(): self.chk.fail(messages.ONLY_CLASS_APPLICATION, tapp) for item in tp.items(): if len(item.variables) != len(tapp.types): self.msg.incompatible_type_application(len(item.variables), len(tapp.types), tapp) return AnyType(TypeOfAny.from_error) return Overloaded([self.apply_generic_arguments(item, tapp.types, tapp) for item in tp.items()]) if isinstance(tp, AnyType): return AnyType(TypeOfAny.from_another_any, source_any=tp) return AnyType(TypeOfAny.special_form) def visit_type_alias_expr(self, alias: TypeAliasExpr) -> Type: """Get type of a type alias (could be generic) in a runtime expression.""" if isinstance(alias.type, Instance) and alias.type.invalid: # An invalid alias, error already has been reported return AnyType(TypeOfAny.from_error) item = alias.type if not alias.in_runtime: # We don't replace TypeVar's with Any for alias used as Alias[T](42). item = set_any_tvars(item, alias.tvars, alias.line, alias.column) if isinstance(item, Instance): # Normally we get a callable type (or overloaded) with .is_type_obj() true # representing the class's constructor tp = type_object_type(item.type, self.named_type) else: # This type is invalid in most runtime contexts # and corresponding an error will be reported. return alias.fallback if isinstance(tp, CallableType): if len(tp.variables) != len(item.args): self.msg.incompatible_type_application(len(tp.variables), len(item.args), item) return AnyType(TypeOfAny.from_error) return self.apply_generic_arguments(tp, item.args, item) elif isinstance(tp, Overloaded): for it in tp.items(): if len(it.variables) != len(item.args): self.msg.incompatible_type_application(len(it.variables), len(item.args), item) return AnyType(TypeOfAny.from_error) return Overloaded([self.apply_generic_arguments(it, item.args, item) for it in tp.items()]) return AnyType(TypeOfAny.special_form) def visit_list_expr(self, e: ListExpr) -> Type: """Type check a list expression [...].""" return self.check_lst_expr(e.items, 'builtins.list', '', e) def visit_set_expr(self, e: SetExpr) -> Type: return self.check_lst_expr(e.items, 'builtins.set', '', e) def check_lst_expr(self, items: List[Expression], fullname: str, tag: str, context: Context) -> Type: # Translate into type checking a generic function call. # Used for list and set expressions, as well as for tuples # containing star expressions that don't refer to a # Tuple. (Note: "lst" stands for list-set-tuple. :-) tvdef = TypeVarDef('T', 'T', -1, [], self.object_type()) tv = TypeVarType(tvdef) constructor = CallableType( [tv], [nodes.ARG_STAR], [None], self.chk.named_generic_type(fullname, [tv]), self.named_type('builtins.function'), name=tag, variables=[tvdef]) return self.check_call(constructor, [(i.expr if isinstance(i, StarExpr) else i) for i in items], [(nodes.ARG_STAR if isinstance(i, StarExpr) else nodes.ARG_POS) for i in items], context)[0] def visit_tuple_expr(self, e: TupleExpr) -> Type: """Type check a tuple expression.""" # Try to determine type context for type inference. type_context = self.type_context[-1] type_context_items = None if isinstance(type_context, UnionType): tuples_in_context = [t for t in type_context.items if (isinstance(t, TupleType) and len(t.items) == len(e.items)) or is_named_instance(t, 'builtins.tuple')] if len(tuples_in_context) == 1: type_context = tuples_in_context[0] else: # There are either no relevant tuples in the Union, or there is # more than one. Either way, we can't decide on a context. pass if isinstance(type_context, TupleType): type_context_items = type_context.items elif type_context and is_named_instance(type_context, 'builtins.tuple'): assert isinstance(type_context, Instance) if type_context.args: type_context_items = [type_context.args[0]] * len(e.items) # NOTE: it's possible for the context to have a different # number of items than e. In that case we use those context # items that match a position in e, and we'll worry about type # mismatches later. # Infer item types. Give up if there's a star expression # that's not a Tuple. items = [] # type: List[Type] j = 0 # Index into type_context_items; irrelevant if type_context_items is none for i in range(len(e.items)): item = e.items[i] if isinstance(item, StarExpr): # Special handling for star expressions. # TODO: If there's a context, and item.expr is a # TupleExpr, flatten it, so we can benefit from the # context? Counterargument: Why would anyone write # (1, *(2, 3)) instead of (1, 2, 3) except in a test? tt = self.accept(item.expr) if isinstance(tt, TupleType): items.extend(tt.items) j += len(tt.items) else: # A star expression that's not a Tuple. # Treat the whole thing as a variable-length tuple. return self.check_lst_expr(e.items, 'builtins.tuple', '', e) else: if not type_context_items or j >= len(type_context_items): tt = self.accept(item) else: tt = self.accept(item, type_context_items[j]) j += 1 items.append(tt) fallback_item = join.join_type_list(items) return TupleType(items, self.chk.named_generic_type('builtins.tuple', [fallback_item])) def visit_dict_expr(self, e: DictExpr) -> Type: """Type check a dict expression. Translate it into a call to dict(), with provisions for **expr. """ # if the dict literal doesn't match TypedDict, check_typeddict_call_with_dict reports # an error, but returns the TypedDict type that matches the literal it found # that would cause a second error when that TypedDict type is returned upstream # to avoid the second error, we always return TypedDict type that was requested typeddict_context = self.find_typeddict_context(self.type_context[-1]) if typeddict_context: self.check_typeddict_call_with_dict( callee=typeddict_context, kwargs=e, context=e ) return typeddict_context.copy_modified() # Collect function arguments, watching out for **expr. args = [] # type: List[Expression] # Regular "key: value" stargs = [] # type: List[Expression] # For "**expr" for key, value in e.items: if key is None: stargs.append(value) else: args.append(TupleExpr([key, value])) # Define type variables (used in constructors below). ktdef = TypeVarDef('KT', 'KT', -1, [], self.object_type()) vtdef = TypeVarDef('VT', 'VT', -2, [], self.object_type()) kt = TypeVarType(ktdef) vt = TypeVarType(vtdef) rv = None # Call dict(*args), unless it's empty and stargs is not. if args or not stargs: # The callable type represents a function like this: # # def (*v: Tuple[kt, vt]) -> Dict[kt, vt]: ... constructor = CallableType( [TupleType([kt, vt], self.named_type('builtins.tuple'))], [nodes.ARG_STAR], [None], self.chk.named_generic_type('builtins.dict', [kt, vt]), self.named_type('builtins.function'), name='', variables=[ktdef, vtdef]) rv = self.check_call(constructor, args, [nodes.ARG_POS] * len(args), e)[0] else: # dict(...) will be called below. pass # Call rv.update(arg) for each arg in **stargs, # except if rv isn't set yet, then set rv = dict(arg). if stargs: for arg in stargs: if rv is None: constructor = CallableType( [self.chk.named_generic_type('typing.Mapping', [kt, vt])], [nodes.ARG_POS], [None], self.chk.named_generic_type('builtins.dict', [kt, vt]), self.named_type('builtins.function'), name='', variables=[ktdef, vtdef]) rv = self.check_call(constructor, [arg], [nodes.ARG_POS], arg)[0] else: method = self.analyze_external_member_access('update', rv, arg) self.check_call(method, [arg], [nodes.ARG_POS], arg) assert rv is not None return rv def find_typeddict_context(self, context: Optional[Type]) -> Optional[TypedDictType]: if isinstance(context, TypedDictType): return context elif isinstance(context, UnionType): items = [] for item in context.items: item_context = self.find_typeddict_context(item) if item_context: items.append(item_context) if len(items) == 1: # Only one union item is TypedDict, so use the context as it's unambiguous. return items[0] # No TypedDict type in context. return None def visit_lambda_expr(self, e: LambdaExpr) -> Type: """Type check lambda expression.""" inferred_type, type_override = self.infer_lambda_type_using_context(e) if not inferred_type: self.chk.return_types.append(AnyType(TypeOfAny.special_form)) # No useful type context. ret_type = self.accept(e.expr(), allow_none_return=True) fallback = self.named_type('builtins.function') self.chk.return_types.pop() return callable_type(e, fallback, ret_type) else: # Type context available. self.chk.return_types.append(inferred_type.ret_type) self.chk.check_func_item(e, type_override=type_override) if e.expr() not in self.chk.type_map: self.accept(e.expr(), allow_none_return=True) ret_type = self.chk.type_map[e.expr()] if isinstance(ret_type, NoneTyp): # For "lambda ...: None", just use type from the context. # Important when the context is Callable[..., None] which # really means Void. See #1425. self.chk.return_types.pop() return inferred_type self.chk.return_types.pop() return replace_callable_return_type(inferred_type, ret_type) def infer_lambda_type_using_context(self, e: LambdaExpr) -> Tuple[Optional[CallableType], Optional[CallableType]]: """Try to infer lambda expression type using context. Return None if could not infer type. The second item in the return type is the type_override parameter for check_func_item. """ # TODO also accept 'Any' context ctx = self.type_context[-1] if isinstance(ctx, UnionType): callables = [t for t in ctx.relevant_items() if isinstance(t, CallableType)] if len(callables) == 1: ctx = callables[0] if not ctx or not isinstance(ctx, CallableType): return None, None # The context may have function type variables in it. We replace them # since these are the type variables we are ultimately trying to infer; # they must be considered as indeterminate. We use ErasedType since it # does not affect type inference results (it is for purposes like this # only). callable_ctx = replace_meta_vars(ctx, ErasedType()) assert isinstance(callable_ctx, CallableType) arg_kinds = [arg.kind for arg in e.arguments] if callable_ctx.is_ellipsis_args: # Fill in Any arguments to match the arguments of the lambda. callable_ctx = callable_ctx.copy_modified( is_ellipsis_args=False, arg_types=[AnyType(TypeOfAny.special_form)] * len(arg_kinds), arg_kinds=arg_kinds ) if ARG_STAR in arg_kinds or ARG_STAR2 in arg_kinds: # TODO treat this case appropriately return callable_ctx, None if callable_ctx.arg_kinds != arg_kinds: # Incompatible context; cannot use it to infer types. self.chk.fail(messages.CANNOT_INFER_LAMBDA_TYPE, e) return None, None return callable_ctx, callable_ctx def visit_super_expr(self, e: SuperExpr) -> Type: """Type check a super expression (non-lvalue).""" self.check_super_arguments(e) t = self.analyze_super(e, False) return t def check_super_arguments(self, e: SuperExpr) -> None: """Check arguments in a super(...) call.""" if ARG_STAR in e.call.arg_kinds: self.chk.fail('Varargs not supported with "super"', e) elif e.call.args and set(e.call.arg_kinds) != {ARG_POS}: self.chk.fail('"super" only accepts positional arguments', e) elif len(e.call.args) == 1: self.chk.fail('"super" with a single argument not supported', e) elif len(e.call.args) > 2: self.chk.fail('Too many arguments for "super"', e) elif self.chk.options.python_version[0] == 2 and len(e.call.args) == 0: self.chk.fail('Too few arguments for "super"', e) elif len(e.call.args) == 2: type_obj_type = self.accept(e.call.args[0]) instance_type = self.accept(e.call.args[1]) if isinstance(type_obj_type, FunctionLike) and type_obj_type.is_type_obj(): type_info = type_obj_type.type_object() elif isinstance(type_obj_type, TypeType): item = type_obj_type.item if isinstance(item, AnyType): # Could be anything. return if isinstance(item, TupleType): item = item.fallback # Handle named tuples and other Tuple[...] subclasses. if not isinstance(item, Instance): # A complicated type object type. Too tricky, give up. # TODO: Do something more clever here. self.chk.fail('Unsupported argument 1 for "super"', e) return type_info = item.type elif isinstance(type_obj_type, AnyType): return else: self.msg.first_argument_for_super_must_be_type(type_obj_type, e) return if isinstance(instance_type, (Instance, TupleType, TypeVarType)): if isinstance(instance_type, TypeVarType): # Needed for generic self. instance_type = instance_type.upper_bound if not isinstance(instance_type, (Instance, TupleType)): # Too tricky, give up. # TODO: Do something more clever here. self.chk.fail(messages.UNSUPPORTED_ARGUMENT_2_FOR_SUPER, e) return if isinstance(instance_type, TupleType): # Needed for named tuples and other Tuple[...] subclasses. instance_type = instance_type.fallback if type_info not in instance_type.type.mro: self.chk.fail('Argument 2 for "super" not an instance of argument 1', e) elif isinstance(instance_type, TypeType) or (isinstance(instance_type, FunctionLike) and instance_type.is_type_obj()): # TODO: Check whether this is a valid type object here. pass elif not isinstance(instance_type, AnyType): self.chk.fail(messages.UNSUPPORTED_ARGUMENT_2_FOR_SUPER, e) def analyze_super(self, e: SuperExpr, is_lvalue: bool) -> Type: """Type check a super expression.""" if e.info and e.info.bases: # TODO fix multiple inheritance etc if len(e.info.mro) < 2: self.chk.fail('Internal error: unexpected mro for {}: {}'.format( e.info.name(), e.info.mro), e) return AnyType(TypeOfAny.from_error) for base in e.info.mro[1:]: if e.name in base.names or base == e.info.mro[-1]: if e.info.fallback_to_any and base == e.info.mro[-1]: # There's an undefined base class, and we're # at the end of the chain. That's not an error. return AnyType(TypeOfAny.special_form) if not self.chk.in_checked_function(): return AnyType(TypeOfAny.unannotated) if self.chk.scope.active_class() is not None: self.chk.fail('super() outside of a method is not supported', e) return AnyType(TypeOfAny.from_error) method = self.chk.scope.top_function() assert method is not None args = method.arguments # super() in a function with empty args is an error; we # need something in declared_self. if not args: self.chk.fail( 'super() requires one or more positional arguments in ' 'enclosing function', e) return AnyType(TypeOfAny.from_error) declared_self = args[0].variable.type or fill_typevars(e.info) return analyze_member_access(name=e.name, typ=fill_typevars(e.info), node=e, is_lvalue=False, is_super=True, is_operator=False, builtin_type=self.named_type, not_ready_callback=self.not_ready_callback, msg=self.msg, override_info=base, original_type=declared_self, chk=self.chk) assert False, 'unreachable' else: # Invalid super. This has been reported by the semantic analyzer. return AnyType(TypeOfAny.from_error) def visit_slice_expr(self, e: SliceExpr) -> Type: expected = make_optional_type(self.named_type('builtins.int')) for index in [e.begin_index, e.end_index, e.stride]: if index: t = self.accept(index) self.chk.check_subtype(t, expected, index, messages.INVALID_SLICE_INDEX) return self.named_type('builtins.slice') def visit_list_comprehension(self, e: ListComprehension) -> Type: return self.check_generator_or_comprehension( e.generator, 'builtins.list', '') def visit_set_comprehension(self, e: SetComprehension) -> Type: return self.check_generator_or_comprehension( e.generator, 'builtins.set', '') def visit_generator_expr(self, e: GeneratorExpr) -> Type: # If any of the comprehensions use async for, the expression will return an async generator # object if any(e.is_async): typ = 'typing.AsyncIterator' else: typ = 'typing.Iterator' return self.check_generator_or_comprehension(e, typ, '') def check_generator_or_comprehension(self, gen: GeneratorExpr, type_name: str, id_for_messages: str) -> Type: """Type check a generator expression or a list comprehension.""" with self.chk.binder.frame_context(can_skip=True, fall_through=0): self.check_for_comp(gen) # Infer the type of the list comprehension by using a synthetic generic # callable type. tvdef = TypeVarDef('T', 'T', -1, [], self.object_type()) tv = TypeVarType(tvdef) constructor = CallableType( [tv], [nodes.ARG_POS], [None], self.chk.named_generic_type(type_name, [tv]), self.chk.named_type('builtins.function'), name=id_for_messages, variables=[tvdef]) return self.check_call(constructor, [gen.left_expr], [nodes.ARG_POS], gen)[0] def visit_dictionary_comprehension(self, e: DictionaryComprehension) -> Type: """Type check a dictionary comprehension.""" with self.chk.binder.frame_context(can_skip=True, fall_through=0): self.check_for_comp(e) # Infer the type of the list comprehension by using a synthetic generic # callable type. ktdef = TypeVarDef('KT', 'KT', -1, [], self.object_type()) vtdef = TypeVarDef('VT', 'VT', -2, [], self.object_type()) kt = TypeVarType(ktdef) vt = TypeVarType(vtdef) constructor = CallableType( [kt, vt], [nodes.ARG_POS, nodes.ARG_POS], [None, None], self.chk.named_generic_type('builtins.dict', [kt, vt]), self.chk.named_type('builtins.function'), name='', variables=[ktdef, vtdef]) return self.check_call(constructor, [e.key, e.value], [nodes.ARG_POS, nodes.ARG_POS], e)[0] def check_for_comp(self, e: Union[GeneratorExpr, DictionaryComprehension]) -> None: """Check the for_comp part of comprehensions. That is the part from 'for': ... for x in y if z Note: This adds the type information derived from the condlists to the current binder. """ for index, sequence, conditions, is_async in zip(e.indices, e.sequences, e.condlists, e.is_async): if is_async: sequence_type = self.chk.analyze_async_iterable_item_type(sequence) else: sequence_type = self.chk.analyze_iterable_item_type(sequence) self.chk.analyze_index_variables(index, sequence_type, True, e) for condition in conditions: self.accept(condition) # values are only part of the comprehension when all conditions are true true_map, _ = mypy.checker.find_isinstance_check(condition, self.chk.type_map) if true_map: for var, type in true_map.items(): self.chk.binder.put(var, type) def visit_conditional_expr(self, e: ConditionalExpr) -> Type: cond_type = self.accept(e.cond) if self.chk.options.strict_boolean: is_bool = (isinstance(cond_type, Instance) and cond_type.type.fullname() == 'builtins.bool') if not (is_bool or isinstance(cond_type, AnyType)): self.chk.fail(messages.NON_BOOLEAN_IN_CONDITIONAL, e) ctx = self.type_context[-1] # Gain type information from isinstance if it is there # but only for the current expression if_map, else_map = self.chk.find_isinstance_check(e.cond) if_type = self.analyze_cond_branch(if_map, e.if_expr, context=ctx) if not mypy.checker.is_valid_inferred_type(if_type): # Analyze the right branch disregarding the left branch. else_type = self.analyze_cond_branch(else_map, e.else_expr, context=ctx) # If it would make a difference, re-analyze the left # branch using the right branch's type as context. if ctx is None or not is_equivalent(else_type, ctx): # TODO: If it's possible that the previous analysis of # the left branch produced errors that are avoided # using this context, suppress those errors. if_type = self.analyze_cond_branch(if_map, e.if_expr, context=else_type) else: # Analyze the right branch in the context of the left # branch's type. else_type = self.analyze_cond_branch(else_map, e.else_expr, context=if_type) res = join.join_types(if_type, else_type) return res def analyze_cond_branch(self, map: Optional[Dict[Expression, Type]], node: Expression, context: Optional[Type]) -> Type: with self.chk.binder.frame_context(can_skip=True, fall_through=0): if map is None: # We still need to type check node, in case we want to # process it for isinstance checks later self.accept(node, type_context=context) return UninhabitedType() self.chk.push_type_map(map) return self.accept(node, type_context=context) def visit_backquote_expr(self, e: BackquoteExpr) -> Type: self.accept(e.expr) return self.named_type('builtins.str') # # Helpers # def accept(self, node: Expression, type_context: Optional[Type] = None, allow_none_return: bool = False, always_allow_any: bool = False, ) -> Type: """Type check a node in the given type context. If allow_none_return is True and this expression is a call, allow it to return None. This applies only to this expression and not any subexpressions. """ self.type_context.append(type_context) try: if allow_none_return and isinstance(node, CallExpr): typ = self.visit_call_expr(node, allow_none_return=True) elif allow_none_return and isinstance(node, YieldFromExpr): typ = self.visit_yield_from_expr(node, allow_none_return=True) else: typ = node.accept(self) except Exception as err: report_internal_error(err, self.chk.errors.file, node.line, self.chk.errors, self.chk.options) self.type_context.pop() assert typ is not None self.chk.store_type(node, typ) if (self.chk.options.disallow_any_expr and not always_allow_any and not self.chk.is_stub and self.chk.in_checked_function() and has_any_type(typ)): self.msg.disallowed_any_type(typ, node) if not self.chk.in_checked_function(): return AnyType(TypeOfAny.unannotated) else: return typ def named_type(self, name: str) -> Instance: """Return an instance type with type given by the name and no type arguments. Alias for TypeChecker.named_type. """ return self.chk.named_type(name) def is_valid_var_arg(self, typ: Type) -> bool: """Is a type valid as a *args argument?""" return (isinstance(typ, TupleType) or is_subtype(typ, self.chk.named_generic_type('typing.Iterable', [AnyType(TypeOfAny.special_form)])) or isinstance(typ, AnyType)) def is_valid_keyword_var_arg(self, typ: Type) -> bool: """Is a type valid as a **kwargs argument?""" if self.chk.options.python_version[0] >= 3: return is_subtype(typ, self.chk.named_generic_type( 'typing.Mapping', [self.named_type('builtins.str'), AnyType(TypeOfAny.special_form)])) else: return ( is_subtype(typ, self.chk.named_generic_type( 'typing.Mapping', [self.named_type('builtins.str'), AnyType(TypeOfAny.special_form)])) or is_subtype(typ, self.chk.named_generic_type( 'typing.Mapping', [self.named_type('builtins.unicode'), AnyType(TypeOfAny.special_form)]))) def has_member(self, typ: Type, member: str) -> bool: """Does type have member with the given name?""" # TODO TupleType => also consider tuple attributes if isinstance(typ, Instance): return typ.type.has_readable_member(member) if isinstance(typ, CallableType) and typ.is_type_obj(): return typ.fallback.type.has_readable_member(member) elif isinstance(typ, AnyType): return True elif isinstance(typ, UnionType): result = all(self.has_member(x, member) for x in typ.relevant_items()) return result elif isinstance(typ, TupleType): return self.has_member(typ.fallback, member) else: return False def not_ready_callback(self, name: str, context: Context) -> None: """Called when we can't infer the type of a variable because it's not ready yet. Either defer type checking of the enclosing function to the next pass or report an error. """ self.chk.handle_cannot_determine_type(name, context) def visit_yield_expr(self, e: YieldExpr) -> Type: return_type = self.chk.return_types[-1] expected_item_type = self.chk.get_generator_yield_type(return_type, False) if e.expr is None: if (not isinstance(expected_item_type, (NoneTyp, AnyType)) and self.chk.in_checked_function()): self.chk.fail(messages.YIELD_VALUE_EXPECTED, e) else: actual_item_type = self.accept(e.expr, expected_item_type) self.chk.check_subtype(actual_item_type, expected_item_type, e, messages.INCOMPATIBLE_TYPES_IN_YIELD, 'actual type', 'expected type') return self.chk.get_generator_receive_type(return_type, False) def visit_await_expr(self, e: AwaitExpr) -> Type: expected_type = self.type_context[-1] if expected_type is not None: expected_type = self.chk.named_generic_type('typing.Awaitable', [expected_type]) actual_type = self.accept(e.expr, expected_type) if isinstance(actual_type, AnyType): return AnyType(TypeOfAny.from_another_any, source_any=actual_type) return self.check_awaitable_expr(actual_type, e, messages.INCOMPATIBLE_TYPES_IN_AWAIT) def check_awaitable_expr(self, t: Type, ctx: Context, msg: str) -> Type: """Check the argument to `await` and extract the type of value. Also used by `async for` and `async with`. """ if not self.chk.check_subtype(t, self.named_type('typing.Awaitable'), ctx, msg, 'actual type', 'expected type'): return AnyType(TypeOfAny.special_form) else: method = self.analyze_external_member_access('__await__', t, ctx) generator = self.check_call(method, [], [], ctx)[0] return self.chk.get_generator_return_type(generator, False) def visit_yield_from_expr(self, e: YieldFromExpr, allow_none_return: bool = False) -> Type: # NOTE: Whether `yield from` accepts an `async def` decorated # with `@types.coroutine` (or `@asyncio.coroutine`) depends on # whether the generator containing the `yield from` is itself # thus decorated. But it accepts a generator regardless of # how it's decorated. return_type = self.chk.return_types[-1] # TODO: What should the context for the sub-expression be? # If the containing function has type Generator[X, Y, ...], # the context should be Generator[X, Y, T], where T is the # context of the 'yield from' itself (but it isn't known). subexpr_type = self.accept(e.expr) # Check that the expr is an instance of Iterable and get the type of the iterator produced # by __iter__. if isinstance(subexpr_type, AnyType): iter_type = AnyType(TypeOfAny.from_another_any, source_any=subexpr_type) # type: Type elif self.chk.type_is_iterable(subexpr_type): if is_async_def(subexpr_type) and not has_coroutine_decorator(return_type): self.chk.msg.yield_from_invalid_operand_type(subexpr_type, e) iter_method_type = self.analyze_external_member_access( '__iter__', subexpr_type, AnyType(TypeOfAny.special_form)) any_type = AnyType(TypeOfAny.special_form) generic_generator_type = self.chk.named_generic_type('typing.Generator', [any_type, any_type, any_type]) iter_type, _ = self.check_call(iter_method_type, [], [], context=generic_generator_type) else: if not (is_async_def(subexpr_type) and has_coroutine_decorator(return_type)): self.chk.msg.yield_from_invalid_operand_type(subexpr_type, e) iter_type = AnyType(TypeOfAny.from_error) else: iter_type = self.check_awaitable_expr(subexpr_type, e, messages.INCOMPATIBLE_TYPES_IN_YIELD_FROM) # Check that the iterator's item type matches the type yielded by the Generator function # containing this `yield from` expression. expected_item_type = self.chk.get_generator_yield_type(return_type, False) actual_item_type = self.chk.get_generator_yield_type(iter_type, False) self.chk.check_subtype(actual_item_type, expected_item_type, e, messages.INCOMPATIBLE_TYPES_IN_YIELD_FROM, 'actual type', 'expected type') # Determine the type of the entire yield from expression. if (isinstance(iter_type, Instance) and iter_type.type.fullname() == 'typing.Generator'): expr_type = self.chk.get_generator_return_type(iter_type, False) else: # Non-Generators don't return anything from `yield from` expressions. # However special-case Any (which might be produced by an error). if isinstance(actual_item_type, AnyType): expr_type = AnyType(TypeOfAny.from_another_any, source_any=actual_item_type) else: expr_type = NoneTyp() if not allow_none_return and isinstance(expr_type, NoneTyp): self.chk.msg.does_not_return_value(None, e) return expr_type def visit_temp_node(self, e: TempNode) -> Type: return e.type def visit_type_var_expr(self, e: TypeVarExpr) -> Type: return AnyType(TypeOfAny.special_form) def visit_newtype_expr(self, e: NewTypeExpr) -> Type: return AnyType(TypeOfAny.special_form) def visit_namedtuple_expr(self, e: NamedTupleExpr) -> Type: tuple_type = e.info.tuple_type if tuple_type: if (self.chk.options.disallow_any_unimported and has_any_from_unimported_type(tuple_type)): self.msg.unimported_type_becomes_any("NamedTuple type", tuple_type, e) check_for_explicit_any(tuple_type, self.chk.options, self.chk.is_typeshed_stub, self.msg, context=e) return AnyType(TypeOfAny.special_form) def visit_enum_call_expr(self, e: EnumCallExpr) -> Type: for name, value in zip(e.items, e.values): if value is not None: typ = self.accept(value) if not isinstance(typ, AnyType): var = e.info.names[name].node if isinstance(var, Var): # Inline TypeCheker.set_inferred_type(), # without the lvalue. (This doesn't really do # much, since the value attribute is defined # to have type Any in the typeshed stub.) var.type = typ var.is_inferred = True return AnyType(TypeOfAny.special_form) def visit_typeddict_expr(self, e: TypedDictExpr) -> Type: return AnyType(TypeOfAny.special_form) def visit__promote_expr(self, e: PromoteExpr) -> Type: return e.type def visit_star_expr(self, e: StarExpr) -> StarType: return StarType(self.accept(e.expr)) def object_type(self) -> Instance: """Return instance type 'object'.""" return self.named_type('builtins.object') def bool_type(self) -> Instance: """Return instance type 'bool'.""" return self.named_type('builtins.bool') def narrow_type_from_binder(self, expr: Expression, known_type: Type) -> Type: if literal(expr) >= LITERAL_TYPE: restriction = self.chk.binder.get(expr) if restriction: ans = narrow_declared_type(known_type, restriction) return ans return known_type def has_any_type(t: Type) -> bool: """Whether t contains an Any type""" return t.accept(HasAnyType()) class HasAnyType(types.TypeQuery[bool]): def __init__(self) -> None: super().__init__(any) def visit_any(self, t: AnyType) -> bool: return t.type_of_any != TypeOfAny.special_form # special forms are not real Any types def has_coroutine_decorator(t: Type) -> bool: """Whether t came from a function decorated with `@coroutine`.""" return isinstance(t, Instance) and t.type.fullname() == 'typing.AwaitableGenerator' def is_async_def(t: Type) -> bool: """Whether t came from a function defined using `async def`.""" # In check_func_def(), when we see a function decorated with # `@typing.coroutine` or `@async.coroutine`, we change the # return type to typing.AwaitableGenerator[...], so that its # type is compatible with either Generator or Awaitable. # But for the check here we need to know whether the original # function (before decoration) was an `async def`. The # AwaitableGenerator type conveniently preserves the original # type as its 4th parameter (3rd when using 0-origin indexing # :-), so that we can recover that information here. # (We really need to see whether the original, undecorated # function was an `async def`, which is orthogonal to its # decorations.) if (isinstance(t, Instance) and t.type.fullname() == 'typing.AwaitableGenerator' and len(t.args) >= 4): t = t.args[3] return isinstance(t, Instance) and t.type.fullname() == 'typing.Awaitable' def map_actuals_to_formals(caller_kinds: List[int], caller_names: Optional[Sequence[Optional[str]]], callee_kinds: List[int], callee_names: Sequence[Optional[str]], caller_arg_type: Callable[[int], Type]) -> List[List[int]]: """Calculate mapping between actual (caller) args and formals. The result contains a list of caller argument indexes mapping to each callee argument index, indexed by callee index. The caller_arg_type argument should evaluate to the type of the actual argument type with the given index. """ ncallee = len(callee_kinds) map = [[] for i in range(ncallee)] # type: List[List[int]] j = 0 for i, kind in enumerate(caller_kinds): if kind == nodes.ARG_POS: if j < ncallee: if callee_kinds[j] in [nodes.ARG_POS, nodes.ARG_OPT, nodes.ARG_NAMED, nodes.ARG_NAMED_OPT]: map[j].append(i) j += 1 elif callee_kinds[j] == nodes.ARG_STAR: map[j].append(i) elif kind == nodes.ARG_STAR: # We need to know the actual type to map varargs. argt = caller_arg_type(i) if isinstance(argt, TupleType): # A tuple actual maps to a fixed number of formals. for _ in range(len(argt.items)): if j < ncallee: if callee_kinds[j] != nodes.ARG_STAR2: map[j].append(i) else: break if callee_kinds[j] != nodes.ARG_STAR: j += 1 else: # Assume that it is an iterable (if it isn't, there will be # an error later). while j < ncallee: if callee_kinds[j] in (nodes.ARG_NAMED, nodes.ARG_NAMED_OPT, nodes.ARG_STAR2): break else: map[j].append(i) if callee_kinds[j] == nodes.ARG_STAR: break j += 1 elif kind in (nodes.ARG_NAMED, nodes.ARG_NAMED_OPT): assert caller_names is not None, "Internal error: named kinds without names given" name = caller_names[i] if name in callee_names: map[callee_names.index(name)].append(i) elif nodes.ARG_STAR2 in callee_kinds: map[callee_kinds.index(nodes.ARG_STAR2)].append(i) else: assert kind == nodes.ARG_STAR2 for j in range(ncallee): # TODO tuple varargs complicate this no_certain_match = ( not map[j] or caller_kinds[map[j][0]] == nodes.ARG_STAR) if ((callee_names[j] and no_certain_match) or callee_kinds[j] == nodes.ARG_STAR2): map[j].append(i) return map def is_empty_tuple(t: Type) -> bool: return isinstance(t, TupleType) and not t.items def is_duplicate_mapping(mapping: List[int], actual_kinds: List[int]) -> bool: # Multiple actuals can map to the same formal only if they both come from # varargs (*args and **kwargs); in this case at runtime it is possible that # there are no duplicates. We need to allow this, as the convention # f(..., *args, **kwargs) is common enough. return len(mapping) > 1 and not ( len(mapping) == 2 and actual_kinds[mapping[0]] == nodes.ARG_STAR and actual_kinds[mapping[1]] == nodes.ARG_STAR2) def replace_callable_return_type(c: CallableType, new_ret_type: Type) -> CallableType: """Return a copy of a callable type with a different return type.""" return c.copy_modified(ret_type=new_ret_type) class ArgInferSecondPassQuery(types.TypeQuery[bool]): """Query whether an argument type should be inferred in the second pass. The result is True if the type has a type variable in a callable return type anywhere. For example, the result for Callable[[], T] is True if t is a type variable. """ def __init__(self) -> None: super().__init__(any) def visit_callable_type(self, t: CallableType) -> bool: return self.query_types(t.arg_types) or t.accept(HasTypeVarQuery()) class HasTypeVarQuery(types.TypeQuery[bool]): """Visitor for querying whether a type has a type variable component.""" def __init__(self) -> None: super().__init__(any) def visit_type_var(self, t: TypeVarType) -> bool: return True def has_erased_component(t: Optional[Type]) -> bool: return t is not None and t.accept(HasErasedComponentsQuery()) class HasErasedComponentsQuery(types.TypeQuery[bool]): """Visitor for querying whether a type has an erased component.""" def __init__(self) -> None: super().__init__(any) def visit_erased_type(self, t: ErasedType) -> bool: return True def has_uninhabited_component(t: Optional[Type]) -> bool: return t is not None and t.accept(HasUninhabitedComponentsQuery()) class HasUninhabitedComponentsQuery(types.TypeQuery[bool]): """Visitor for querying whether a type has an UninhabitedType component.""" def __init__(self) -> None: super().__init__(any) def visit_uninhabited_type(self, t: UninhabitedType) -> bool: return True def overload_arg_similarity(actual: Type, formal: Type) -> int: """Return if caller argument (actual) is compatible with overloaded signature arg (formal). Return a similarity level: 0: no match 1: actual is compatible, but only using type promotions (e.g. int vs float) 2: actual is compatible without type promotions (e.g. int vs object) The distinction is important in cases where multiple overload items match. We want give priority to higher similarity matches. """ # Replace type variables with their upper bounds. Overloading # resolution is based on runtime behavior which erases type # parameters, so no need to handle type variables occurring within # a type. if isinstance(actual, TypeVarType): actual = actual.erase_to_union_or_bound() if isinstance(formal, TypeVarType): formal = formal.erase_to_union_or_bound() if (isinstance(actual, UninhabitedType) or isinstance(actual, AnyType) or isinstance(formal, AnyType) or (isinstance(actual, Instance) and actual.type.fallback_to_any)): # These could match anything at runtime. return 2 if isinstance(formal, CallableType): if isinstance(actual, (CallableType, Overloaded)): # TODO: do more sophisticated callable matching return 2 if isinstance(actual, TypeType): return 2 if is_subtype(actual, formal) else 0 if isinstance(actual, NoneTyp): if not experiments.STRICT_OPTIONAL: # NoneTyp matches anything if we're not doing strict Optional checking return 2 else: # NoneType is a subtype of object if isinstance(formal, Instance) and formal.type.fullname() == "builtins.object": return 2 if isinstance(actual, UnionType): return max(overload_arg_similarity(item, formal) for item in actual.relevant_items()) if isinstance(formal, UnionType): return max(overload_arg_similarity(actual, item) for item in formal.relevant_items()) if isinstance(formal, TypeType): if isinstance(actual, TypeType): # Since Type[T] is covariant, check if actual = Type[A] is # a subtype of formal = Type[F]. return overload_arg_similarity(actual.item, formal.item) elif isinstance(actual, FunctionLike) and actual.is_type_obj(): # Check if the actual is a constructor of some sort. # Note that this is this unsound, since we don't check the __init__ signature. return overload_arg_similarity(actual.items()[0].ret_type, formal.item) else: return 0 if isinstance(actual, TypedDictType): if isinstance(formal, TypedDictType): # Don't support overloading based on the keys or value types of a TypedDict since # that would be complicated and probably only marginally useful. return 2 return overload_arg_similarity(actual.fallback, formal) if isinstance(formal, Instance): if isinstance(actual, CallableType): actual = actual.fallback if isinstance(actual, Overloaded): actual = actual.items()[0].fallback if isinstance(actual, TupleType): actual = actual.fallback if isinstance(actual, Instance): # First perform a quick check (as an optimization) and fall back to generic # subtyping algorithm if type promotions are possible (e.g., int vs. float). if formal.type in actual.type.mro: return 2 elif formal.type.is_protocol and is_subtype(actual, erasetype.erase_type(formal)): return 2 elif actual.type._promote and is_subtype(actual, formal): return 1 else: return 0 elif isinstance(actual, TypeType): item = actual.item if formal.type.fullname() in {"builtins.object", "builtins.type"}: return 2 elif isinstance(item, Instance) and item.type.metaclass_type: # FIX: this does not handle e.g. Union of instances return overload_arg_similarity(item.type.metaclass_type, formal) else: return 0 else: return 0 if isinstance(actual, UnboundType) or isinstance(formal, UnboundType): # Either actual or formal is the result of an error; shut up. return 2 # Fall back to a conservative equality check for the remaining kinds of type. return 2 if is_same_type(erasetype.erase_type(actual), erasetype.erase_type(formal)) else 0 def any_arg_causes_overload_ambiguity(items: List[CallableType], arg_types: List[Type], arg_kinds: List[int], arg_names: Optional[Sequence[Optional[str]]]) -> bool: """May an Any actual argument cause ambiguous result type on call to overloaded function? Note that this sometimes returns True even if there is no ambiguity, since a correct implementation would be complex (and the call would be imprecisely typed due to Any types anyway). Args: items: Overload items matching the actual arguments arg_types: Actual argument types arg_kinds: Actual argument kinds arg_names: Actual argument names """ actual_to_formal = [ map_formals_to_actuals( arg_kinds, arg_names, item.arg_kinds, item.arg_names, lambda i: arg_types[i]) for item in items ] for arg_idx, arg_type in enumerate(arg_types): if isinstance(arg_type, AnyType): matching_formals_unfiltered = [(item_idx, lookup[arg_idx]) for item_idx, lookup in enumerate(actual_to_formal) if lookup[arg_idx]] matching_formals = [] for item_idx, formals in matching_formals_unfiltered: if len(formals) > 1: # An actual maps to multiple formals -- give up as too # complex, just assume it overlaps. return True matching_formals.append((item_idx, items[item_idx].arg_types[formals[0]])) if (not all_same_types(t for _, t in matching_formals) and not all_same_types(items[idx].ret_type for idx, _ in matching_formals)): # Any maps to multiple different types, and the return types of these items differ. return True return False def all_same_types(types: Iterable[Type]) -> bool: types = list(types) if len(types) == 0: return True return all(is_same_type(t, types[0]) for t in types[1:]) def map_formals_to_actuals(caller_kinds: List[int], caller_names: Optional[Sequence[Optional[str]]], callee_kinds: List[int], callee_names: List[Optional[str]], caller_arg_type: Callable[[int], Type]) -> List[List[int]]: """Calculate the reverse mapping of map_actuals_to_formals.""" formal_to_actual = map_actuals_to_formals(caller_kinds, caller_names, callee_kinds, callee_names, caller_arg_type) # Now reverse the mapping. actual_to_formal = [[] for _ in caller_kinds] # type: List[List[int]] for formal, actuals in enumerate(formal_to_actual): for actual in actuals: actual_to_formal[actual].append(formal) return actual_to_formal mypy-0.560/mypy/checkmember.py0000644€tŠÔÚ€2›s®0000010051713215007205022510 0ustar jukkaDROPBOX\Domain Users00000000000000"""Type checking of attribute access""" from typing import cast, Callable, List, Optional, TypeVar from mypy.types import ( Type, Instance, AnyType, TupleType, TypedDictType, CallableType, FunctionLike, TypeVarDef, Overloaded, TypeVarType, UnionType, PartialType, UninhabitedType, TypeOfAny, DeletedType, NoneTyp, TypeType, function_type, get_type_vars, ) from mypy.nodes import ( TypeInfo, FuncBase, Var, FuncDef, SymbolNode, Context, MypyFile, TypeVarExpr, ARG_POS, ARG_STAR, ARG_STAR2, Decorator, OverloadedFuncDef, ) from mypy.messages import MessageBuilder from mypy.maptype import map_instance_to_supertype from mypy.expandtype import expand_type_by_instance, expand_type, freshen_function_type_vars from mypy.infer import infer_type_arguments from mypy.typevars import fill_typevars from mypy.plugin import Plugin, AttributeContext from mypy import messages from mypy import subtypes from mypy import meet MYPY = False if MYPY: # import for forward declaration only import mypy.checker from mypy import experiments def analyze_member_access(name: str, typ: Type, node: Context, is_lvalue: bool, is_super: bool, is_operator: bool, builtin_type: Callable[[str], Instance], not_ready_callback: Callable[[str, Context], None], msg: MessageBuilder, *, original_type: Type, chk: 'mypy.checker.TypeChecker', override_info: Optional[TypeInfo] = None) -> Type: """Return the type of attribute `name` of typ. This is a general operation that supports various different variations: 1. lvalue or non-lvalue access (i.e. setter or getter access) 2. supertype access (when using super(); is_super == True and override_info should refer to the supertype) original_type is the most precise inferred or declared type of the base object that we have available. typ is generally a supertype of original_type. When looking for an attribute of typ, we may perform recursive calls targeting the fallback type, for example. original_type is always the type used in the initial call. """ # TODO: this and following functions share some logic with subtypes.find_member, # consider refactoring. if isinstance(typ, Instance): if name == '__init__' and not is_super: # Accessing __init__ in statically typed code would compromise # type safety unless used via super(). msg.fail(messages.CANNOT_ACCESS_INIT, node) return AnyType(TypeOfAny.from_error) # The base object has an instance type. info = typ.type if override_info: info = override_info if (experiments.find_occurrences and info.name() == experiments.find_occurrences[0] and name == experiments.find_occurrences[1]): msg.note("Occurrence of '{}.{}'".format(*experiments.find_occurrences), node) # Look up the member. First look up the method dictionary. method = info.get_method(name) if method: if method.is_property: assert isinstance(method, OverloadedFuncDef) first_item = cast(Decorator, method.items[0]) return analyze_var(name, first_item.var, typ, info, node, is_lvalue, msg, original_type, not_ready_callback, chk=chk) if is_lvalue: msg.cant_assign_to_method(node) signature = function_type(method, builtin_type('builtins.function')) signature = freshen_function_type_vars(signature) if name == '__new__': # __new__ is special and behaves like a static method -- don't strip # the first argument. pass else: signature = bind_self(signature, original_type) typ = map_instance_to_supertype(typ, method.info) member_type = expand_type_by_instance(signature, typ) freeze_type_vars(member_type) return member_type else: # Not a method. return analyze_member_var_access(name, typ, info, node, is_lvalue, is_super, builtin_type, not_ready_callback, msg, original_type=original_type, chk=chk) elif isinstance(typ, AnyType): # The base object has dynamic type. return AnyType(TypeOfAny.from_another_any, source_any=typ) elif isinstance(typ, NoneTyp): if chk.should_suppress_optional_error([typ]): return AnyType(TypeOfAny.from_error) # The only attribute NoneType has are those it inherits from object return analyze_member_access(name, builtin_type('builtins.object'), node, is_lvalue, is_super, is_operator, builtin_type, not_ready_callback, msg, original_type=original_type, chk=chk) elif isinstance(typ, UnionType): # The base object has dynamic type. msg.disable_type_names += 1 results = [analyze_member_access(name, subtype, node, is_lvalue, is_super, is_operator, builtin_type, not_ready_callback, msg, original_type=original_type, chk=chk) for subtype in typ.relevant_items()] msg.disable_type_names -= 1 return UnionType.make_simplified_union(results) elif isinstance(typ, TupleType): # Actually look up from the fallback instance type. return analyze_member_access(name, typ.fallback, node, is_lvalue, is_super, is_operator, builtin_type, not_ready_callback, msg, original_type=original_type, chk=chk) elif isinstance(typ, TypedDictType): # Actually look up from the fallback instance type. return analyze_member_access(name, typ.fallback, node, is_lvalue, is_super, is_operator, builtin_type, not_ready_callback, msg, original_type=original_type, chk=chk) elif isinstance(typ, FunctionLike) and typ.is_type_obj(): # Class attribute. # TODO super? ret_type = typ.items()[0].ret_type if isinstance(ret_type, TupleType): ret_type = ret_type.fallback if isinstance(ret_type, Instance): if not is_operator: # When Python sees an operator (eg `3 == 4`), it automatically translates that # into something like `int.__eq__(3, 4)` instead of `(3).__eq__(4)` as an # optimization. # # While it normally it doesn't matter which of the two versions are used, it # does cause inconsistencies when working with classes. For example, translating # `int == int` to `int.__eq__(int)` would not work since `int.__eq__` is meant to # compare two int _instances_. What we really want is `type(int).__eq__`, which # is meant to compare two types or classes. # # This check makes sure that when we encounter an operator, we skip looking up # the corresponding method in the current instance to avoid this edge case. # See https://github.com/python/mypy/pull/1787 for more info. result = analyze_class_attribute_access(ret_type, name, node, is_lvalue, builtin_type, not_ready_callback, msg, original_type=original_type) if result: return result # Look up from the 'type' type. return analyze_member_access(name, typ.fallback, node, is_lvalue, is_super, is_operator, builtin_type, not_ready_callback, msg, original_type=original_type, chk=chk) else: assert False, 'Unexpected type {}'.format(repr(ret_type)) elif isinstance(typ, FunctionLike): # Look up from the 'function' type. return analyze_member_access(name, typ.fallback, node, is_lvalue, is_super, is_operator, builtin_type, not_ready_callback, msg, original_type=original_type, chk=chk) elif isinstance(typ, TypeVarType): return analyze_member_access(name, typ.upper_bound, node, is_lvalue, is_super, is_operator, builtin_type, not_ready_callback, msg, original_type=original_type, chk=chk) elif isinstance(typ, DeletedType): msg.deleted_as_rvalue(typ, node) return AnyType(TypeOfAny.from_error) elif isinstance(typ, TypeType): # Similar to FunctionLike + is_type_obj() above. item = None fallback = builtin_type('builtins.type') ignore_messages = msg.copy() ignore_messages.disable_errors() if isinstance(typ.item, Instance): item = typ.item elif isinstance(typ.item, AnyType): return analyze_member_access(name, fallback, node, is_lvalue, is_super, is_operator, builtin_type, not_ready_callback, ignore_messages, original_type=original_type, chk=chk) elif isinstance(typ.item, TypeVarType): if isinstance(typ.item.upper_bound, Instance): item = typ.item.upper_bound elif isinstance(typ.item, TupleType): item = typ.item.fallback elif isinstance(typ.item, FunctionLike) and typ.item.is_type_obj(): item = typ.item.fallback elif isinstance(typ.item, TypeType): # Access member on metaclass object via Type[Type[C]] if isinstance(typ.item.item, Instance): item = typ.item.item.type.metaclass_type if item and not is_operator: # See comment above for why operators are skipped result = analyze_class_attribute_access(item, name, node, is_lvalue, builtin_type, not_ready_callback, msg, original_type=original_type) if result: if not (isinstance(result, AnyType) and item.type.fallback_to_any): return result else: # We don't want errors on metaclass lookup for classes with Any fallback msg = ignore_messages if item is not None: fallback = item.type.metaclass_type or fallback return analyze_member_access(name, fallback, node, is_lvalue, is_super, is_operator, builtin_type, not_ready_callback, msg, original_type=original_type, chk=chk) if chk.should_suppress_optional_error([typ]): return AnyType(TypeOfAny.from_error) return msg.has_no_attr(original_type, typ, name, node) def analyze_member_var_access(name: str, itype: Instance, info: TypeInfo, node: Context, is_lvalue: bool, is_super: bool, builtin_type: Callable[[str], Instance], not_ready_callback: Callable[[str, Context], None], msg: MessageBuilder, original_type: Type, chk: 'mypy.checker.TypeChecker') -> Type: """Analyse attribute access that does not target a method. This is logically part of analyze_member_access and the arguments are similar. original_type is the type of E in the expression E.var """ # It was not a method. Try looking up a variable. v = lookup_member_var_or_accessor(info, name, is_lvalue) vv = v if isinstance(vv, Decorator): # The associated Var node of a decorator contains the type. v = vv.var if isinstance(vv, TypeInfo): # If the associated variable is a TypeInfo synthesize a Var node for # the purposes of type checking. This enables us to type check things # like accessing class attributes on an inner class. v = Var(name, type=type_object_type(vv, builtin_type)) v.info = info if isinstance(v, Var): return analyze_var(name, v, itype, info, node, is_lvalue, msg, original_type, not_ready_callback, chk=chk) elif isinstance(v, FuncDef): assert False, "Did not expect a function" elif not v and name not in ['__getattr__', '__setattr__', '__getattribute__']: if not is_lvalue: for method_name in ('__getattribute__', '__getattr__'): method = info.get_method(method_name) # __getattribute__ is defined on builtins.object and returns Any, so without # the guard this search will always find object.__getattribute__ and conclude # that the attribute exists if method and method.info.fullname() != 'builtins.object': function = function_type(method, builtin_type('builtins.function')) bound_method = bind_self(function, original_type) typ = map_instance_to_supertype(itype, method.info) getattr_type = expand_type_by_instance(bound_method, typ) if isinstance(getattr_type, CallableType): return getattr_type.ret_type else: setattr_meth = info.get_method('__setattr__') if setattr_meth and setattr_meth.info.fullname() != 'builtins.object': setattr_func = function_type(setattr_meth, builtin_type('builtins.function')) bound_type = bind_self(setattr_func, original_type) typ = map_instance_to_supertype(itype, setattr_meth.info) setattr_type = expand_type_by_instance(bound_type, typ) if isinstance(setattr_type, CallableType) and len(setattr_type.arg_types) > 0: return setattr_type.arg_types[-1] if itype.type.fallback_to_any: return AnyType(TypeOfAny.special_form) # Could not find the member. if is_super: msg.undefined_in_superclass(name, node) return AnyType(TypeOfAny.from_error) else: if chk and chk.should_suppress_optional_error([itype]): return AnyType(TypeOfAny.from_error) return msg.has_no_attr(original_type, itype, name, node) def analyze_var(name: str, var: Var, itype: Instance, info: TypeInfo, node: Context, is_lvalue: bool, msg: MessageBuilder, original_type: Type, not_ready_callback: Callable[[str, Context], None], *, chk: 'mypy.checker.TypeChecker') -> Type: """Analyze access to an attribute via a Var node. This is conceptually part of analyze_member_access and the arguments are similar. itype is the class object in which var is dedined original_type is the type of E in the expression E.var """ # Found a member variable. itype = map_instance_to_supertype(itype, var.info) typ = var.type if typ: if isinstance(typ, PartialType): return handle_partial_attribute_type(typ, is_lvalue, msg, var) t = expand_type_by_instance(typ, itype) if is_lvalue and var.is_property and not var.is_settable_property: # TODO allow setting attributes in subclass (although it is probably an error) msg.read_only_property(name, info, node) if is_lvalue and var.is_classvar: msg.cant_assign_to_classvar(name, node) result = t if var.is_initialized_in_class and isinstance(t, FunctionLike) and not t.is_type_obj(): if is_lvalue: if var.is_property: if not var.is_settable_property: msg.read_only_property(name, info, node) else: msg.cant_assign_to_method(node) if not var.is_staticmethod: # Class-level function objects and classmethods become bound methods: # the former to the instance, the latter to the class. functype = t # Use meet to narrow original_type to the dispatched type. # For example, assume # * A.f: Callable[[A1], None] where A1 <: A (maybe A1 == A) # * B.f: Callable[[B1], None] where B1 <: B (maybe B1 == B) # * x: Union[A1, B1] # In `x.f`, when checking `x` against A1 we assume x is compatible with A # and similarly for B1 when checking agains B dispatched_type = meet.meet_types(original_type, itype) check_self_arg(functype, dispatched_type, var.is_classmethod, node, name, msg) signature = bind_self(functype, original_type, var.is_classmethod) if var.is_property: # A property cannot have an overloaded type => the cast is fine. assert isinstance(signature, CallableType) result = signature.ret_type else: result = signature else: if not var.is_ready: not_ready_callback(var.name(), node) # Implicit 'Any' type. result = AnyType(TypeOfAny.special_form) fullname = '{}.{}'.format(var.info.fullname(), name) hook = chk.plugin.get_attribute_hook(fullname) if hook: result = hook(AttributeContext(original_type, result, node, chk)) return result def freeze_type_vars(member_type: Type) -> None: if isinstance(member_type, CallableType): for v in member_type.variables: v.id.meta_level = 0 if isinstance(member_type, Overloaded): for it in member_type.items(): for v in it.variables: v.id.meta_level = 0 def handle_partial_attribute_type(typ: PartialType, is_lvalue: bool, msg: MessageBuilder, context: Context) -> Type: if typ.type is None: # 'None' partial type. It has a well-defined type -- 'None'. # In an lvalue context we want to preserver the knowledge of # it being a partial type. if not is_lvalue: return NoneTyp() return typ else: msg.fail(messages.NEED_ANNOTATION_FOR_VAR, context) return AnyType(TypeOfAny.from_error) def lookup_member_var_or_accessor(info: TypeInfo, name: str, is_lvalue: bool) -> Optional[SymbolNode]: """Find the attribute/accessor node that refers to a member of a type.""" # TODO handle lvalues node = info.get(name) if node: return node.node else: return None def check_self_arg(functype: FunctionLike, dispatched_arg_type: Type, is_classmethod: bool, context: Context, name: str, msg: MessageBuilder) -> None: """For x.f where A.f: A1 -> T, check that meet(type(x), A) <: A1 for each overload. dispatched_arg_type is meet(B, A) in the following example def g(x: B): x.f class A: f: Callable[[A1], None] """ # TODO: this is too strict. We can return filtered overloads for matching definitions for item in functype.items(): if not item.arg_types or item.arg_kinds[0] not in (ARG_POS, ARG_STAR): # No positional first (self) argument (*args is okay). msg.no_formal_self(name, item, context) else: selfarg = item.arg_types[0] if is_classmethod: dispatched_arg_type = TypeType.make_normalized(dispatched_arg_type) if not subtypes.is_subtype(dispatched_arg_type, erase_to_bound(selfarg)): msg.incompatible_self_argument(name, dispatched_arg_type, item, is_classmethod, context) def analyze_class_attribute_access(itype: Instance, name: str, context: Context, is_lvalue: bool, builtin_type: Callable[[str], Instance], not_ready_callback: Callable[[str, Context], None], msg: MessageBuilder, original_type: Type) -> Optional[Type]: """original_type is the type of E in the expression E.var""" node = itype.type.get(name) if not node: if itype.type.fallback_to_any: return AnyType(TypeOfAny.special_form) return None is_decorated = isinstance(node.node, Decorator) is_method = is_decorated or isinstance(node.node, FuncDef) if is_lvalue: if is_method: msg.cant_assign_to_method(context) if isinstance(node.node, TypeInfo): msg.fail(messages.CANNOT_ASSIGN_TO_TYPE, context) if itype.type.is_enum and not (is_lvalue or is_decorated or is_method): return itype t = node.type if t: if isinstance(t, PartialType): symnode = node.node assert symnode is not None return handle_partial_attribute_type(t, is_lvalue, msg, symnode) if not is_method and (isinstance(t, TypeVarType) or get_type_vars(t)): msg.fail(messages.GENERIC_INSTANCE_VAR_CLASS_ACCESS, context) is_classmethod = is_decorated and cast(Decorator, node.node).func.is_class return add_class_tvars(t, itype, is_classmethod, builtin_type, original_type) elif isinstance(node.node, Var): not_ready_callback(name, context) return AnyType(TypeOfAny.special_form) if isinstance(node.node, TypeVarExpr): msg.fail('Type variable "{}.{}" cannot be used as an expression'.format( itype.type.name(), name), context) return AnyType(TypeOfAny.from_error) if isinstance(node.node, TypeInfo): return type_object_type(node.node, builtin_type) if isinstance(node.node, MypyFile): # Reference to a module object. return builtin_type('types.ModuleType') if is_decorated: # TODO: Return type of decorated function. This is quick hack to work around #998. return AnyType(TypeOfAny.special_form) else: return function_type(cast(FuncBase, node.node), builtin_type('builtins.function')) def add_class_tvars(t: Type, itype: Instance, is_classmethod: bool, builtin_type: Callable[[str], Instance], original_type: Type) -> Type: """Instantiate type variables during analyze_class_attribute_access, e.g T and Q in the following: def A(Generic(T)): @classmethod def foo(cls: Type[Q]) -> Tuple[T, Q]: ... class B(A): pass B.foo() original_type is the value of the type B in the expression B.foo() """ # TODO: verify consistency between Q and T info = itype.type # type: TypeInfo if isinstance(t, CallableType): # TODO: Should we propagate type variable values? tvars = [TypeVarDef(n, n, i + 1, [], builtin_type('builtins.object'), tv.variance) for (i, n), tv in zip(enumerate(info.type_vars), info.defn.type_vars)] if is_classmethod: t = bind_self(t, original_type, is_classmethod=True) return t.copy_modified(variables=tvars + t.variables) elif isinstance(t, Overloaded): return Overloaded([cast(CallableType, add_class_tvars(item, itype, is_classmethod, builtin_type, original_type)) for item in t.items()]) return t def type_object_type(info: TypeInfo, builtin_type: Callable[[str], Instance]) -> Type: """Return the type of a type object. For a generic type G with type variables T and S the type is generally of form Callable[..., G[T, S]] where ... are argument types for the __init__/__new__ method (without the self argument). Also, the fallback type will be 'type' instead of 'function'. """ init_method = info.get_method('__init__') if not init_method: # Must be an invalid class definition. return AnyType(TypeOfAny.from_error) else: fallback = info.metaclass_type or builtin_type('builtins.type') if init_method.info.fullname() == 'builtins.object': # No non-default __init__ -> look at __new__ instead. new_method = info.get_method('__new__') if new_method and new_method.info.fullname() != 'builtins.object': # Found one! Get signature from __new__. return type_object_type_from_function(new_method, info, fallback) # Both are defined by object. But if we've got a bogus # base class, we can't know for sure, so check for that. if info.fallback_to_any: # Construct a universal callable as the prototype. any_type = AnyType(TypeOfAny.special_form) sig = CallableType(arg_types=[any_type, any_type], arg_kinds=[ARG_STAR, ARG_STAR2], arg_names=["_args", "_kwds"], ret_type=any_type, fallback=builtin_type('builtins.function')) return class_callable(sig, info, fallback, None) # Construct callable type based on signature of __init__. Adjust # return type and insert type arguments. return type_object_type_from_function(init_method, info, fallback) def type_object_type_from_function(init_or_new: FuncBase, info: TypeInfo, fallback: Instance) -> FunctionLike: signature = bind_self(function_type(init_or_new, fallback)) # The __init__ method might come from a generic superclass # (init_or_new.info) with type variables that do not map # identically to the type variables of the class being constructed # (info). For example # # class A(Generic[T]): def __init__(self, x: T) -> None: pass # class B(A[List[T]], Generic[T]): pass # # We need to first map B's __init__ to the type (List[T]) -> None. signature = cast(FunctionLike, map_type_from_supertype(signature, info, init_or_new.info)) special_sig = None # type: Optional[str] if init_or_new.info.fullname() == 'builtins.dict': # Special signature! special_sig = 'dict' if isinstance(signature, CallableType): return class_callable(signature, info, fallback, special_sig) else: # Overloaded __init__/__new__. assert isinstance(signature, Overloaded) items = [] # type: List[CallableType] for item in signature.items(): items.append(class_callable(item, info, fallback, special_sig)) return Overloaded(items) def class_callable(init_type: CallableType, info: TypeInfo, type_type: Instance, special_sig: Optional[str]) -> CallableType: """Create a type object type based on the signature of __init__.""" variables = [] # type: List[TypeVarDef] variables.extend(info.defn.type_vars) variables.extend(init_type.variables) callable_type = init_type.copy_modified( ret_type=fill_typevars(info), fallback=type_type, name=None, variables=variables, special_sig=special_sig) c = callable_type.with_name(info.name()) return c def map_type_from_supertype(typ: Type, sub_info: TypeInfo, super_info: TypeInfo) -> Type: """Map type variables in a type defined in a supertype context to be valid in the subtype context. Assume that the result is unique; if more than one type is possible, return one of the alternatives. For example, assume . class D(Generic[S]) ... . class C(D[E[T]], Generic[T]) ... Now S in the context of D would be mapped to E[T] in the context of C. """ # Create the type of self in subtype, of form t[a1, ...]. inst_type = fill_typevars(sub_info) if isinstance(inst_type, TupleType): inst_type = inst_type.fallback # Map the type of self to supertype. This gets us a description of the # supertype type variables in terms of subtype variables, i.e. t[t1, ...] # so that any type variables in tN are to be interpreted in subtype # context. inst_type = map_instance_to_supertype(inst_type, super_info) # Finally expand the type variables in type with those in the previously # constructed type. Note that both type and inst_type may have type # variables, but in type they are interpreted in supertype context while # in inst_type they are interpreted in subtype context. This works even if # the names of type variables in supertype and subtype overlap. return expand_type_by_instance(typ, inst_type) F = TypeVar('F', bound=FunctionLike) def bind_self(method: F, original_type: Optional[Type] = None, is_classmethod: bool = False) -> F: """Return a copy of `method`, with the type of its first parameter (usually self or cls) bound to original_type. If the type of `self` is a generic type (T, or Type[T] for classmethods), instantiate every occurrence of type with original_type in the rest of the signature and in the return type. original_type is the type of E in the expression E.copy(). It is None in compatibility checks. In this case we treat it as the erasure of the declared type of self. This way we can express "the type of self". For example: T = TypeVar('T', bound='A') class A: def copy(self: T) -> T: ... class B(A): pass b = B().copy() # type: B """ if isinstance(method, Overloaded): return cast(F, Overloaded([bind_self(c, original_type) for c in method.items()])) assert isinstance(method, CallableType) func = method if not func.arg_types: # invalid method. return something return cast(F, func) if func.arg_kinds[0] == ARG_STAR: # The signature is of the form 'def foo(*args, ...)'. # In this case we shouldn't drop the first arg, # since func will be absorbed by the *args. # TODO: infer bounds on the type of *args? return cast(F, func) self_param_type = func.arg_types[0] if func.variables and (isinstance(self_param_type, TypeVarType) or (isinstance(self_param_type, TypeType) and isinstance(self_param_type.item, TypeVarType))): if original_type is None: # Type check method override # XXX value restriction as union? original_type = erase_to_bound(self_param_type) ids = [x.id for x in func.variables] typearg = infer_type_arguments(ids, self_param_type, original_type)[0] if (is_classmethod and isinstance(typearg, UninhabitedType) and isinstance(original_type, (Instance, TypeVarType, TupleType))): # In case we call a classmethod through an instance x, fallback to type(x) # TODO: handle Union typearg = infer_type_arguments(ids, self_param_type, TypeType(original_type))[0] def expand(target: Type) -> Type: assert typearg is not None return expand_type(target, {func.variables[0].id: typearg}) arg_types = [expand(x) for x in func.arg_types[1:]] ret_type = expand(func.ret_type) variables = func.variables[1:] else: arg_types = func.arg_types[1:] ret_type = func.ret_type variables = func.variables if isinstance(original_type, CallableType) and original_type.is_type_obj(): original_type = TypeType.make_normalized(original_type.ret_type) res = func.copy_modified(arg_types=arg_types, arg_kinds=func.arg_kinds[1:], arg_names=func.arg_names[1:], variables=variables, ret_type=ret_type, bound_args=[original_type]) return cast(F, res) def erase_to_bound(t: Type) -> Type: if isinstance(t, TypeVarType): return t.upper_bound if isinstance(t, TypeType): if isinstance(t.item, TypeVarType): return TypeType.make_normalized(t.item.upper_bound) return t mypy-0.560/mypy/checkstrformat.py0000644€tŠÔÚ€2›s®0000003546413215007205023272 0ustar jukkaDROPBOX\Domain Users00000000000000"""Expression type checker. This file is conceptually part of ExpressionChecker and TypeChecker.""" import re from typing import cast, List, Tuple, Dict, Callable, Union, Optional from mypy.types import ( Type, AnyType, TupleType, Instance, UnionType, TypeOfAny ) from mypy.nodes import ( StrExpr, BytesExpr, UnicodeExpr, TupleExpr, DictExpr, Context, Expression, StarExpr ) if False: # break import cycle only needed for mypy import mypy.checker import mypy.checkexpr from mypy import messages from mypy.messages import MessageBuilder FormatStringExpr = Union[StrExpr, BytesExpr, UnicodeExpr] Checkers = Tuple[Callable[[Expression], None], Callable[[Type], None]] class ConversionSpecifier: def __init__(self, key: str, flags: str, width: str, precision: str, type: str) -> None: self.key = key self.flags = flags self.width = width self.precision = precision self.type = type def has_key(self) -> bool: return self.key is not None def has_star(self) -> bool: return self.width == '*' or self.precision == '*' class StringFormatterChecker: """String interpolation/formatter type checker. This class works closely together with checker.ExpressionChecker. """ # Some services are provided by a TypeChecker instance. chk = None # type: mypy.checker.TypeChecker # This is shared with TypeChecker, but stored also here for convenience. msg = None # type: MessageBuilder # Some services are provided by a ExpressionChecker instance. exprchk = None # type: mypy.checkexpr.ExpressionChecker def __init__(self, exprchk: 'mypy.checkexpr.ExpressionChecker', chk: 'mypy.checker.TypeChecker', msg: MessageBuilder) -> None: """Construct an expression type checker.""" self.chk = chk self.exprchk = exprchk self.msg = msg # TODO: In Python 3, the bytes formatting has a more restricted set of options # compared to string formatting. def check_str_interpolation(self, expr: FormatStringExpr, replacements: Expression) -> Type: """Check the types of the 'replacements' in a string interpolation expression: str % replacements """ specifiers = self.parse_conversion_specifiers(expr.value) has_mapping_keys = self.analyze_conversion_specifiers(specifiers, expr) if isinstance(expr, BytesExpr) and (3, 0) <= self.chk.options.python_version < (3, 5): self.msg.fail('Bytes formatting is only supported in Python 3.5 and later', replacements) return AnyType(TypeOfAny.from_error) if has_mapping_keys is None: pass # Error was reported elif has_mapping_keys: self.check_mapping_str_interpolation(specifiers, replacements, expr) else: self.check_simple_str_interpolation(specifiers, replacements, expr) if isinstance(expr, BytesExpr): return self.named_type('builtins.bytes') elif isinstance(expr, UnicodeExpr): return self.named_type('builtins.unicode') elif isinstance(expr, StrExpr): return self.named_type('builtins.str') else: assert False def parse_conversion_specifiers(self, format: str) -> List[ConversionSpecifier]: key_regex = r'(\(([^()]*)\))?' # (optional) parenthesised sequence of characters flags_regex = r'([#0\-+ ]*)' # (optional) sequence of flags width_regex = r'(\*|[1-9][0-9]*)?' # (optional) minimum field width (* or numbers) precision_regex = r'(?:\.(\*|[0-9]+)?)?' # (optional) . followed by * of numbers length_mod_regex = r'[hlL]?' # (optional) length modifier (unused) type_regex = r'(.)?' # conversion type regex = ('%' + key_regex + flags_regex + width_regex + precision_regex + length_mod_regex + type_regex) specifiers = [] # type: List[ConversionSpecifier] for parens_key, key, flags, width, precision, type in re.findall(regex, format): if parens_key == '': key = None specifiers.append(ConversionSpecifier(key, flags, width, precision, type)) return specifiers def analyze_conversion_specifiers(self, specifiers: List[ConversionSpecifier], context: Context) -> Optional[bool]: has_star = any(specifier.has_star() for specifier in specifiers) has_key = any(specifier.has_key() for specifier in specifiers) all_have_keys = all( specifier.has_key() or specifier.type == '%' for specifier in specifiers ) if has_key and has_star: self.msg.string_interpolation_with_star_and_key(context) return None if has_key and not all_have_keys: self.msg.string_interpolation_mixing_key_and_non_keys(context) return None return has_key def check_simple_str_interpolation(self, specifiers: List[ConversionSpecifier], replacements: Expression, expr: FormatStringExpr) -> None: checkers = self.build_replacement_checkers(specifiers, replacements, expr) if checkers is None: return rhs_type = self.accept(replacements) rep_types = [] # type: List[Type] if isinstance(rhs_type, TupleType): rep_types = rhs_type.items elif isinstance(rhs_type, AnyType): return else: rep_types = [rhs_type] if len(checkers) > len(rep_types): self.msg.too_few_string_formatting_arguments(replacements) elif len(checkers) < len(rep_types): self.msg.too_many_string_formatting_arguments(replacements) else: if len(checkers) == 1: check_node, check_type = checkers[0] if isinstance(rhs_type, TupleType) and len(rhs_type.items) == 1: check_type(rhs_type.items[0]) else: check_node(replacements) elif (isinstance(replacements, TupleExpr) and not any(isinstance(item, StarExpr) for item in replacements.items)): for checks, rep_node in zip(checkers, replacements.items): check_node, check_type = checks check_node(rep_node) else: for checks, rep_type in zip(checkers, rep_types): check_node, check_type = checks check_type(rep_type) def check_mapping_str_interpolation(self, specifiers: List[ConversionSpecifier], replacements: Expression, expr: FormatStringExpr) -> None: if (isinstance(replacements, DictExpr) and all(isinstance(k, (StrExpr, BytesExpr)) for k, v in replacements.items)): mapping = {} # type: Dict[str, Type] for k, v in replacements.items: key_str = cast(StrExpr, k).value mapping[key_str] = self.accept(v) for specifier in specifiers: if specifier.type == '%': # %% is allowed in mappings, no checking is required continue if specifier.key not in mapping: self.msg.key_not_in_mapping(specifier.key, replacements) return rep_type = mapping[specifier.key] expected_type = self.conversion_type(specifier.type, replacements, expr) if expected_type is None: return self.chk.check_subtype(rep_type, expected_type, replacements, messages.INCOMPATIBLE_TYPES_IN_STR_INTERPOLATION, 'expression has type', 'placeholder with key \'%s\' has type' % specifier.key) else: rep_type = self.accept(replacements) any_type = AnyType(TypeOfAny.special_form) dict_type = self.chk.named_generic_type('builtins.dict', [any_type, any_type]) self.chk.check_subtype(rep_type, dict_type, replacements, messages.FORMAT_REQUIRES_MAPPING, 'expression has type', 'expected type for mapping is') def build_replacement_checkers(self, specifiers: List[ConversionSpecifier], context: Context, expr: FormatStringExpr ) -> Optional[List[Checkers]]: checkers = [] # type: List[Checkers] for specifier in specifiers: checker = self.replacement_checkers(specifier, context, expr) if checker is None: return None checkers.extend(checker) return checkers def replacement_checkers(self, specifier: ConversionSpecifier, context: Context, expr: FormatStringExpr) -> Optional[List[Checkers]]: """Returns a list of tuples of two functions that check whether a replacement is of the right type for the specifier. The first functions take a node and checks its type in the right type context. The second function just checks a type. """ checkers = [] # type: List[Checkers] if specifier.width == '*': checkers.append(self.checkers_for_star(context)) if specifier.precision == '*': checkers.append(self.checkers_for_star(context)) if specifier.type == 'c': c = self.checkers_for_c_type(specifier.type, context, expr) if c is None: return None checkers.append(c) elif specifier.type != '%': c = self.checkers_for_regular_type(specifier.type, context, expr) if c is None: return None checkers.append(c) return checkers def checkers_for_star(self, context: Context) -> Checkers: """Returns a tuple of check functions that check whether, respectively, a node or a type is compatible with a star in a conversion specifier """ expected = self.named_type('builtins.int') def check_type(type: Type) -> None: expected = self.named_type('builtins.int') self.chk.check_subtype(type, expected, context, '* wants int') def check_expr(expr: Expression) -> None: type = self.accept(expr, expected) check_type(type) return check_expr, check_type def checkers_for_regular_type(self, type: str, context: Context, expr: FormatStringExpr) -> Optional[Checkers]: """Returns a tuple of check functions that check whether, respectively, a node or a type is compatible with 'type'. Return None in case of an """ expected_type = self.conversion_type(type, context, expr) if expected_type is None: return None def check_type(type: Type) -> None: assert expected_type is not None self.chk.check_subtype(type, expected_type, context, messages.INCOMPATIBLE_TYPES_IN_STR_INTERPOLATION, 'expression has type', 'placeholder has type') def check_expr(expr: Expression) -> None: type = self.accept(expr, expected_type) check_type(type) return check_expr, check_type def checkers_for_c_type(self, type: str, context: Context, expr: FormatStringExpr) -> Optional[Checkers]: """Returns a tuple of check functions that check whether, respectively, a node or a type is compatible with 'type' that is a character type """ expected_type = self.conversion_type(type, context, expr) if expected_type is None: return None def check_type(type: Type) -> None: assert expected_type is not None self.chk.check_subtype(type, expected_type, context, messages.INCOMPATIBLE_TYPES_IN_STR_INTERPOLATION, 'expression has type', 'placeholder has type') def check_expr(expr: Expression) -> None: """int, or str with length 1""" type = self.accept(expr, expected_type) if isinstance(expr, (StrExpr, BytesExpr)) and len(cast(StrExpr, expr).value) != 1: self.msg.requires_int_or_char(context) check_type(type) return check_expr, check_type def conversion_type(self, p: str, context: Context, expr: FormatStringExpr) -> Optional[Type]: """Return the type that is accepted for a string interpolation conversion specifier type. Note that both Python's float (e.g. %f) and integer (e.g. %d) specifier types accept both float and integers. """ if p == 'b': if self.chk.options.python_version < (3, 5): self.msg.fail("Format character 'b' is only supported in Python 3.5 and later", context) return None if not isinstance(expr, BytesExpr): self.msg.fail("Format character 'b' is only supported on bytes patterns", context) return None return self.named_type('builtins.bytes') elif p == 'a': if self.chk.options.python_version < (3, 0): self.msg.fail("Format character 'a' is only supported in Python 3", context) return None # todo: return type object? return AnyType(TypeOfAny.special_form) elif p in ['s', 'r']: return AnyType(TypeOfAny.special_form) elif p in ['d', 'i', 'o', 'u', 'x', 'X', 'e', 'E', 'f', 'F', 'g', 'G']: return UnionType([self.named_type('builtins.int'), self.named_type('builtins.float')]) elif p in ['c']: return UnionType([self.named_type('builtins.int'), self.named_type('builtins.float'), self.named_type('builtins.str')]) else: self.msg.unsupported_placeholder(p, context) return None # # Helpers # def named_type(self, name: str) -> Instance: """Return an instance type with type given by the name and no type arguments. Alias for TypeChecker.named_type. """ return self.chk.named_type(name) def accept(self, expr: Expression, context: Optional[Type] = None) -> Type: """Type check a node. Alias for TypeChecker.accept.""" return self.chk.expr_checker.accept(expr, context) mypy-0.560/mypy/constraints.py0000644€tŠÔÚ€2›s®0000005635613215007205022625 0ustar jukkaDROPBOX\Domain Users00000000000000"""Type inference constraints.""" from typing import Iterable, List, Optional, Sequence from mypy import experiments from mypy.types import ( CallableType, Type, TypeVisitor, UnboundType, AnyType, NoneTyp, TypeVarType, Instance, TupleType, TypedDictType, UnionType, Overloaded, ErasedType, PartialType, DeletedType, UninhabitedType, TypeType, TypeVarId, TypeQuery, is_named_instance, TypeOfAny ) from mypy.maptype import map_instance_to_supertype from mypy import nodes import mypy.subtypes from mypy.sametypes import is_same_type from mypy.erasetype import erase_typevars SUBTYPE_OF = 0 # type: int SUPERTYPE_OF = 1 # type: int class Constraint: """A representation of a type constraint. It can be either T <: type or T :> type (T is a type variable). """ type_var = None # type: TypeVarId op = 0 # SUBTYPE_OF or SUPERTYPE_OF target = None # type: Type def __init__(self, type_var: TypeVarId, op: int, target: Type) -> None: self.type_var = type_var self.op = op self.target = target def __repr__(self) -> str: op_str = '<:' if self.op == SUPERTYPE_OF: op_str = ':>' return '{} {} {}'.format(self.type_var, op_str, self.target) def infer_constraints_for_callable( callee: CallableType, arg_types: Sequence[Optional[Type]], arg_kinds: List[int], formal_to_actual: List[List[int]]) -> List[Constraint]: """Infer type variable constraints for a callable and actual arguments. Return a list of constraints. """ constraints = [] # type: List[Constraint] tuple_counter = [0] for i, actuals in enumerate(formal_to_actual): for actual in actuals: actual_arg_type = arg_types[actual] if actual_arg_type is None: continue actual_type = get_actual_type(actual_arg_type, arg_kinds[actual], tuple_counter) c = infer_constraints(callee.arg_types[i], actual_type, SUPERTYPE_OF) constraints.extend(c) return constraints def get_actual_type(arg_type: Type, kind: int, tuple_counter: List[int]) -> Type: """Return the type of an actual argument with the given kind. If the argument is a *arg, return the individual argument item. """ if kind == nodes.ARG_STAR: if isinstance(arg_type, Instance): if arg_type.type.fullname() == 'builtins.list': # List *arg. return arg_type.args[0] elif arg_type.args: # TODO try to map type arguments to Iterable return arg_type.args[0] else: return AnyType(TypeOfAny.from_error) elif isinstance(arg_type, TupleType): # Get the next tuple item of a tuple *arg. tuple_counter[0] += 1 return arg_type.items[tuple_counter[0] - 1] else: return AnyType(TypeOfAny.from_error) elif kind == nodes.ARG_STAR2: if isinstance(arg_type, Instance) and (arg_type.type.fullname() == 'builtins.dict'): # Dict **arg. TODO more general (Mapping) return arg_type.args[1] else: return AnyType(TypeOfAny.from_error) else: # No translation for other kinds. return arg_type def infer_constraints(template: Type, actual: Type, direction: int) -> List[Constraint]: """Infer type constraints. Match a template type, which may contain type variable references, recursively against a type which does not contain (the same) type variable references. The result is a list of type constrains of form 'T is a supertype/subtype of x', where T is a type variable present in the template and x is a type without reference to type variables present in the template. Assume T and S are type variables. Now the following results can be calculated (read as '(template, actual) --> result'): (T, X) --> T :> X (X[T], X[Y]) --> T <: Y and T :> Y ((T, T), (X, Y)) --> T :> X and T :> Y ((T, S), (X, Y)) --> T :> X and S :> Y (X[T], Any) --> T <: Any and T :> Any The constraints are represented as Constraint objects. """ # If the template is simply a type variable, emit a Constraint directly. # We need to handle this case before handling Unions for two reasons: # 1. "T <: Union[U1, U2]" is not equivalent to "T <: U1 or T <: U2", # because T can itself be a union (notably, Union[U1, U2] itself). # 2. "T :> Union[U1, U2]" is logically equivalent to "T :> U1 and # T :> U2", but they are not equivalent to the constraint solver, # which never introduces new Union types (it uses join() instead). if isinstance(template, TypeVarType): return [Constraint(template.id, direction, actual)] # Now handle the case of either template or actual being a Union. # For a Union to be a subtype of another type, every item of the Union # must be a subtype of that type, so concatenate the constraints. if direction == SUBTYPE_OF and isinstance(template, UnionType): res = [] for t_item in template.items: res.extend(infer_constraints(t_item, actual, direction)) return res if direction == SUPERTYPE_OF and isinstance(actual, UnionType): res = [] for a_item in actual.items: res.extend(infer_constraints(template, a_item, direction)) return res # Now the potential subtype is known not to be a Union or a type # variable that we are solving for. In that case, for a Union to # be a supertype of the potential subtype, some item of the Union # must be a supertype of it. if direction == SUBTYPE_OF and isinstance(actual, UnionType): # If some of items is not a complete type, disregard that. items = simplify_away_incomplete_types(actual.items) # We infer constraints eagerly -- try to find constraints for a type # variable if possible. This seems to help with some real-world # use cases. return any_constraints( [infer_constraints_if_possible(template, a_item, direction) for a_item in items], eager=True) if direction == SUPERTYPE_OF and isinstance(template, UnionType): # When the template is a union, we are okay with leaving some # type variables indeterminate. This helps with some special # cases, though this isn't very principled. return any_constraints( [infer_constraints_if_possible(t_item, actual, direction) for t_item in template.items], eager=False) # Remaining cases are handled by ConstraintBuilderVisitor. return template.accept(ConstraintBuilderVisitor(actual, direction)) def infer_constraints_if_possible(template: Type, actual: Type, direction: int) -> Optional[List[Constraint]]: """Like infer_constraints, but return None if the input relation is known to be unsatisfiable, for example if template=List[T] and actual=int. (In this case infer_constraints would return [], just like it would for an automatically satisfied relation like template=List[T] and actual=object.) """ if (direction == SUBTYPE_OF and not mypy.subtypes.is_subtype(erase_typevars(template), actual)): return None if (direction == SUPERTYPE_OF and not mypy.subtypes.is_subtype(actual, erase_typevars(template))): return None return infer_constraints(template, actual, direction) def any_constraints(options: List[Optional[List[Constraint]]], eager: bool) -> List[Constraint]: """Deduce what we can from a collection of constraint lists. It's a given that at least one of the lists must be satisfied. A None element in the list of options represents an unsatisfiable constraint and is ignored. Ignore empty constraint lists if eager is true -- they are always trivially satisfiable. """ if eager: valid_options = [option for option in options if option] else: valid_options = [option for option in options if option is not None] if len(valid_options) == 1: return valid_options[0] elif (len(valid_options) > 1 and all(is_same_constraints(valid_options[0], c) for c in valid_options[1:])): # Multiple sets of constraints that are all the same. Just pick any one of them. # TODO: More generally, if a given (variable, direction) pair appears in # every option, combine the bounds with meet/join. return valid_options[0] # Otherwise, there are either no valid options or multiple, inconsistent valid # options. Give up and deduce nothing. return [] def is_same_constraints(x: List[Constraint], y: List[Constraint]) -> bool: for c1 in x: if not any(is_same_constraint(c1, c2) for c2 in y): return False for c1 in y: if not any(is_same_constraint(c1, c2) for c2 in x): return False return True def is_same_constraint(c1: Constraint, c2: Constraint) -> bool: return (c1.type_var == c2.type_var and c1.op == c2.op and is_same_type(c1.target, c2.target)) def simplify_away_incomplete_types(types: List[Type]) -> List[Type]: complete = [typ for typ in types if is_complete_type(typ)] if complete: return complete else: return types def is_complete_type(typ: Type) -> bool: """Is a type complete? A complete doesn't have uninhabited type components or (when not in strict optional mode) None components. """ return typ.accept(CompleteTypeVisitor()) class CompleteTypeVisitor(TypeQuery[bool]): def __init__(self) -> None: super().__init__(all) def visit_uninhabited_type(self, t: UninhabitedType) -> bool: return False class ConstraintBuilderVisitor(TypeVisitor[List[Constraint]]): """Visitor class for inferring type constraints.""" # The type that is compared against a template # TODO: The value may be None. Is that actually correct? actual = None # type: Type def __init__(self, actual: Type, direction: int) -> None: # Direction must be SUBTYPE_OF or SUPERTYPE_OF. self.actual = actual self.direction = direction # Trivial leaf types def visit_unbound_type(self, template: UnboundType) -> List[Constraint]: return [] def visit_any(self, template: AnyType) -> List[Constraint]: return [] def visit_none_type(self, template: NoneTyp) -> List[Constraint]: return [] def visit_uninhabited_type(self, template: UninhabitedType) -> List[Constraint]: return [] def visit_erased_type(self, template: ErasedType) -> List[Constraint]: return [] def visit_deleted_type(self, template: DeletedType) -> List[Constraint]: return [] # Errors def visit_partial_type(self, template: PartialType) -> List[Constraint]: # We can't do anything useful with a partial type here. assert False, "Internal error" # Non-trivial leaf type def visit_type_var(self, template: TypeVarType) -> List[Constraint]: assert False, ("Unexpected TypeVarType in ConstraintBuilderVisitor" " (should have been handled in infer_constraints)") # Non-leaf types def visit_instance(self, template: Instance) -> List[Constraint]: original_actual = actual = self.actual res = [] # type: List[Constraint] if isinstance(actual, CallableType) and actual.fallback is not None: actual = actual.fallback if isinstance(actual, TypedDictType): actual = actual.as_anonymous().fallback if isinstance(actual, Instance): instance = actual # We always try nominal inference if possible, # it is much faster than the structural one. if (self.direction == SUBTYPE_OF and template.type.has_base(instance.type.fullname())): mapped = map_instance_to_supertype(template, instance.type) for i in range(len(instance.args)): # The constraints for generic type parameters are # invariant. Include constraints from both directions # to achieve the effect. res.extend(infer_constraints( mapped.args[i], instance.args[i], self.direction)) res.extend(infer_constraints( mapped.args[i], instance.args[i], neg_op(self.direction))) return res elif (self.direction == SUPERTYPE_OF and instance.type.has_base(template.type.fullname())): mapped = map_instance_to_supertype(instance, template.type) for j in range(len(template.args)): # The constraints for generic type parameters are # invariant. res.extend(infer_constraints( template.args[j], mapped.args[j], self.direction)) res.extend(infer_constraints( template.args[j], mapped.args[j], neg_op(self.direction))) return res if (template.type.is_protocol and self.direction == SUPERTYPE_OF and # We avoid infinite recursion for structural subtypes by checking # whether this type already appeared in the inference chain. # This is a conservative way break the inference cycles. # It never produces any "false" constraints but gives up soon # on purely structural inference cycles, see #3829. not any(is_same_type(template, t) for t in template.type.inferring) and mypy.subtypes.is_subtype(instance, erase_typevars(template))): template.type.inferring.append(template) self.infer_constraints_from_protocol_members(res, instance, template, original_actual, template) template.type.inferring.pop() return res elif (instance.type.is_protocol and self.direction == SUBTYPE_OF and # We avoid infinite recursion for structural subtypes also here. not any(is_same_type(instance, i) for i in instance.type.inferring) and mypy.subtypes.is_subtype(erase_typevars(template), instance)): instance.type.inferring.append(instance) self.infer_constraints_from_protocol_members(res, instance, template, template, instance) instance.type.inferring.pop() return res if isinstance(actual, AnyType): # IDEA: Include both ways, i.e. add negation as well? return self.infer_against_any(template.args, actual) if (isinstance(actual, TupleType) and (is_named_instance(template, 'typing.Iterable') or is_named_instance(template, 'typing.Container') or is_named_instance(template, 'typing.Sequence') or is_named_instance(template, 'typing.Reversible')) and self.direction == SUPERTYPE_OF): for item in actual.items: cb = infer_constraints(template.args[0], item, SUPERTYPE_OF) res.extend(cb) return res elif isinstance(actual, TupleType) and self.direction == SUPERTYPE_OF: return infer_constraints(template, actual.fallback, self.direction) else: return [] def infer_constraints_from_protocol_members(self, res: List[Constraint], instance: Instance, template: Instance, subtype: Type, protocol: Instance) -> None: """Infer constraints for situations where either 'template' or 'instance' is a protocol. The 'protocol' is the one of two that is an instance of protocol type, 'subtype' is the type used to bind self during inference. Currently, we just infer constrains for every protocol member type (both ways for settable members). """ for member in protocol.type.protocol_members: inst = mypy.subtypes.find_member(member, instance, subtype) temp = mypy.subtypes.find_member(member, template, subtype) assert inst is not None and temp is not None # The above is safe since at this point we know that 'instance' is a subtype # of (erased) 'template', therefore it defines all protocol members res.extend(infer_constraints(temp, inst, self.direction)) if (mypy.subtypes.IS_SETTABLE in mypy.subtypes.get_member_flags(member, protocol.type)): # Settable members are invariant, add opposite constraints res.extend(infer_constraints(temp, inst, neg_op(self.direction))) def visit_callable_type(self, template: CallableType) -> List[Constraint]: if isinstance(self.actual, CallableType): cactual = self.actual # FIX verify argument counts # FIX what if one of the functions is generic res = [] # type: List[Constraint] # We can't infer constraints from arguments if the template is Callable[..., T] (with # literal '...'). if not template.is_ellipsis_args: # The lengths should match, but don't crash (it will error elsewhere). for t, a in zip(template.arg_types, cactual.arg_types): # Negate direction due to function argument type contravariance. res.extend(infer_constraints(t, a, neg_op(self.direction))) res.extend(infer_constraints(template.ret_type, cactual.ret_type, self.direction)) return res elif isinstance(self.actual, AnyType): # FIX what if generic res = self.infer_against_any(template.arg_types, self.actual) any_type = AnyType(TypeOfAny.from_another_any, source_any=self.actual) res.extend(infer_constraints(template.ret_type, any_type, self.direction)) return res elif isinstance(self.actual, Overloaded): return self.infer_against_overloaded(self.actual, template) elif isinstance(self.actual, TypeType): return infer_constraints(template.ret_type, self.actual.item, self.direction) elif isinstance(self.actual, Instance): # Instances with __call__ method defined are considered structural # subtypes of Callable with a compatible signature. call = mypy.subtypes.find_member('__call__', self.actual, self.actual) if call: return infer_constraints(template, call, self.direction) else: return [] else: return [] def infer_against_overloaded(self, overloaded: Overloaded, template: CallableType) -> List[Constraint]: # Create constraints by matching an overloaded type against a template. # This is tricky to do in general. We cheat by only matching against # the first overload item, and by only matching the return type. This # seems to work somewhat well, but we should really use a more # reliable technique. item = find_matching_overload_item(overloaded, template) return infer_constraints(template.ret_type, item.ret_type, self.direction) def visit_tuple_type(self, template: TupleType) -> List[Constraint]: actual = self.actual if isinstance(actual, TupleType) and len(actual.items) == len(template.items): res = [] # type: List[Constraint] for i in range(len(template.items)): res.extend(infer_constraints(template.items[i], actual.items[i], self.direction)) return res elif isinstance(actual, AnyType): return self.infer_against_any(template.items, actual) else: return [] def visit_typeddict_type(self, template: TypedDictType) -> List[Constraint]: actual = self.actual if isinstance(actual, TypedDictType): res = [] # type: List[Constraint] # NOTE: Non-matching keys are ignored. Compatibility is checked # elsewhere so this shouldn't be unsafe. for (item_name, template_item_type, actual_item_type) in template.zip(actual): res.extend(infer_constraints(template_item_type, actual_item_type, self.direction)) return res elif isinstance(actual, AnyType): return self.infer_against_any(template.items.values(), actual) else: return [] def visit_union_type(self, template: UnionType) -> List[Constraint]: assert False, ("Unexpected UnionType in ConstraintBuilderVisitor" " (should have been handled in infer_constraints)") def infer_against_any(self, types: Iterable[Type], any_type: AnyType) -> List[Constraint]: res = [] # type: List[Constraint] for t in types: res.extend(infer_constraints(t, any_type, self.direction)) return res def visit_overloaded(self, template: Overloaded) -> List[Constraint]: res = [] # type: List[Constraint] for t in template.items(): res.extend(infer_constraints(t, self.actual, self.direction)) return res def visit_type_type(self, template: TypeType) -> List[Constraint]: if isinstance(self.actual, CallableType): return infer_constraints(template.item, self.actual.ret_type, self.direction) elif isinstance(self.actual, Overloaded): return infer_constraints(template.item, self.actual.items()[0].ret_type, self.direction) elif isinstance(self.actual, TypeType): return infer_constraints(template.item, self.actual.item, self.direction) elif isinstance(self.actual, AnyType): return infer_constraints(template.item, self.actual, self.direction) else: return [] def neg_op(op: int) -> int: """Map SubtypeOf to SupertypeOf and vice versa.""" if op == SUBTYPE_OF: return SUPERTYPE_OF elif op == SUPERTYPE_OF: return SUBTYPE_OF else: raise ValueError('Invalid operator {}'.format(op)) def find_matching_overload_item(overloaded: Overloaded, template: CallableType) -> CallableType: """Disambiguate overload item against a template.""" items = overloaded.items() for item in items: # Return type may be indeterminate in the template, so ignore it when performing a # subtype check. if mypy.subtypes.is_callable_subtype(item, template, ignore_return=True): return item # Fall back to the first item if we can't find a match. This is totally arbitrary -- # maybe we should just bail out at this point. return items[0] mypy-0.560/mypy/defaults.py0000644€tŠÔÚ€2›s®0000000020213215007205022040 0ustar jukkaDROPBOX\Domain Users00000000000000PYTHON2_VERSION = (2, 7) PYTHON3_VERSION = (3, 6) PYTHON3_VERSION_MIN = (3, 3) CACHE_DIR = '.mypy_cache' CONFIG_FILE = 'mypy.ini' mypy-0.560/mypy/dmypy.py0000644€tŠÔÚ€2›s®0000002426513215007206021413 0ustar jukkaDROPBOX\Domain Users00000000000000"""Client for mypy daemon mode. Highly experimental! Only supports UNIX-like systems. This manages a daemon process which keeps useful state in memory rather than having to read it back from disk on each run. """ import argparse import json import os import signal import socket import sys import time from typing import Any, Callable, Dict, List, Mapping, Optional, Sequence, Tuple, TypeVar from mypy.dmypy_util import STATUS_FILE, receive # Argument parser. Subparsers are tied to action functions by the # @action(subparse) decorator. parser = argparse.ArgumentParser(description="Client for mypy daemon mode", fromfile_prefix_chars='@') parser.set_defaults(action=None) subparsers = parser.add_subparsers() start_parser = subparsers.add_parser('start', help="Start daemon") start_parser.add_argument('--log-file', metavar='FILE', type=str, help="Direct daemon stdout/stderr to FILE") start_parser.add_argument('flags', metavar='FLAG', nargs='*', type=str, help="Regular mypy flags (precede with --)") status_parser = subparsers.add_parser('status', help="Show daemon status") stop_parser = subparsers.add_parser('stop', help="Stop daemon (asks it politely to go away)") kill_parser = subparsers.add_parser('kill', help="Kill daemon (kills the process)") restart_parser = subparsers.add_parser('restart', help="Restart daemon (stop or kill followed by start)") restart_parser.add_argument('--log-file', metavar='FILE', type=str, help="Direct daemon stdout/stderr to FILE") restart_parser.add_argument('flags', metavar='FLAG', nargs='*', type=str, help="Regular mypy flags (precede with --)") check_parser = subparsers.add_parser('check', help="Check some files (requires running daemon)") check_parser.add_argument('-q', '--quiet', action='store_true', help="Suppress instrumentation stats") check_parser.add_argument('files', metavar='FILE', nargs='+', help="File (or directory) to check") recheck_parser = subparsers.add_parser('recheck', help="Check the same files as the most previous check run (requires running daemon)") recheck_parser.add_argument('-q', '--quiet', action='store_true', help="Suppress instrumentation stats") hang_parser = subparsers.add_parser('hang', help="Hang for 100 seconds") daemon_parser = subparsers.add_parser('daemon', help="Run daemon in foreground") daemon_parser.add_argument('flags', metavar='FLAG', nargs='*', type=str, help="Regular mypy flags (precede with --)") help_parser = subparsers.add_parser('help') def main() -> None: """The code is top-down.""" args = parser.parse_args() if not args.action: parser.print_usage() else: args.action(args) ActionFunction = Callable[[argparse.Namespace], None] def action(subparser: argparse.ArgumentParser) -> Callable[[ActionFunction], None]: """Decorator to tie an action function to a subparser.""" def register(func: ActionFunction) -> None: subparser.set_defaults(action=func) return register # Action functions (run in client from command line). # TODO: Use a separate exception instead of SystemExit to indicate failures. @action(start_parser) def do_start(args: argparse.Namespace) -> None: """Start daemon (it must not already be running). This is where mypy flags are set. Setting flags is a bit awkward; you have to use e.g.: dmypy start -- --strict since we don't want to duplicate mypy's huge list of flags. """ try: pid, sockname = get_status() except SystemExit as err: # Lazy import so this import doesn't slow down other commands. from mypy.dmypy_server import daemonize, Server if daemonize(Server(args.flags).serve, args.log_file): sys.exit(1) wait_for_server() else: sys.exit("Daemon is still alive") @action(status_parser) def do_status(args: argparse.Namespace) -> None: """Print daemon status. This verifies that it is responsive to requests. """ status = read_status() show_stats(status) check_status(status) try: response = request('status') except Exception as err: print("Daemon is stuck; consider %s kill" % sys.argv[0]) raise else: show_stats(response) @action(stop_parser) def do_stop(args: argparse.Namespace) -> None: """Stop daemon politely (via a request).""" try: response = request('stop') except Exception as err: sys.exit("Daemon is stuck; consider %s kill" % sys.argv[0]) else: if response: print("Stop response:", response) else: print("Daemon stopped") @action(kill_parser) def do_kill(args: argparse.Namespace) -> None: """Kill daemon rudely (by killing the process).""" pid, sockname = get_status() try: os.kill(pid, signal.SIGKILL) except os.error as err: sys.exit(str(err)) else: print("Daemon killed") @action(restart_parser) def do_restart(args: argparse.Namespace) -> None: """Restart daemon. We first try to stop it politely if it's running. This also sets mypy flags (and has the same issues as start). """ try: response = request('stop') except SystemExit: pass else: if response: sys.exit("Status: %s" % str(response)) else: print("Daemon stopped") # Lazy import so this import doesn't slow down other commands. from mypy.dmypy_server import daemonize, Server if daemonize(Server(args.flags).serve, args.log_file): sys.exit(1) wait_for_server() def wait_for_server(timeout: float = 5.0) -> None: """Wait until the server is up. Exit if it doesn't happen within the timeout. """ endtime = time.time() + timeout while time.time() < endtime: try: data = read_status() except SystemExit: # If the file isn't there yet, retry later. time.sleep(0.1) continue # If the file's content is bogus or the process is dead, fail. pid, sockname = check_status(data) print("Daemon started") return sys.exit("Timed out waiting for daemon to start") @action(check_parser) def do_check(args: argparse.Namespace) -> None: """Ask the daemon to check a list of files.""" t0 = time.time() response = request('check', files=args.files) t1 = time.time() response['roundtrip_time'] = t1 - t0 check_output(response, args.quiet) @action(recheck_parser) def do_recheck(args: argparse.Namespace) -> None: """Ask the daemon to check the same list of files it checked most recently. This doesn't work across daemon restarts. """ t0 = time.time() response = request('recheck') t1 = time.time() response['roundtrip_time'] = t1 - t0 check_output(response, args.quiet) def check_output(response: Dict[str, Any], quiet: bool) -> None: """Print the output from a check or recheck command.""" try: out, err, status = response['out'], response['err'], response['status'] except KeyError: sys.exit("Response: %s" % str(response)) sys.stdout.write(out) sys.stderr.write(err) if not quiet: show_stats(response) if status: sys.exit(status) def show_stats(response: Mapping[str, object]) -> None: for key, value in sorted(response.items()): if key not in ('out', 'err'): print("%-24s: %10s" % (key, "%.3f" % value if isinstance(value, float) else value)) @action(hang_parser) def do_hang(args: argparse.Namespace) -> None: """Hang for 100 seconds, as a debug hack.""" request('hang') @action(daemon_parser) def do_daemon(args: argparse.Namespace) -> None: """Serve requests in the foreground.""" # Lazy import so this import doesn't slow down other commands. from mypy.dmypy_server import Server Server(args.flags).serve() @action(help_parser) def do_help(args: argparse.Namespace) -> None: """Print full help (same as dmypy --help).""" parser.print_help() # Client-side infrastructure. def request(command: str, **kwds: object) -> Dict[str, Any]: """Send a request to the daemon. Return the JSON dict with the response. """ args = dict(kwds) if command: args.update(command=command) data = json.dumps(args) pid, sockname = get_status() sock = socket.socket(socket.AF_UNIX) sock.connect(sockname) sock.sendall(data.encode('utf8')) sock.shutdown(socket.SHUT_WR) try: response = receive(sock) except OSError as err: return {'error': str(err)} else: return response finally: sock.close() def get_status() -> Tuple[int, str]: """Read status file and check if the process is alive. Return (pid, sockname) on success. Raise SystemExit() if something's wrong. """ data = read_status() return check_status(data) def check_status(data: Dict[str, Any]) -> Tuple[int, str]: """Check if the process is alive. Return (pid, sockname) on success. Raise SystemExit() if something's wrong. """ if 'pid' not in data: raise SystemExit("Invalid status file (no pid field)") pid = data['pid'] if not isinstance(pid, int): raise SystemExit("pid field is not an int") try: os.kill(pid, 0) except OSError as err: raise SystemExit("Daemon has died") if 'sockname' not in data: raise SystemExit("Invalid status file (no sockname field)") sockname = data['sockname'] if not isinstance(sockname, str): raise SystemExit("sockname field is not a string") return pid, sockname def read_status() -> Dict[str, object]: """Read status file.""" if not os.path.isfile(STATUS_FILE): raise SystemExit("No status file found") with open(STATUS_FILE) as f: try: data = json.load(f) except Exception as err: raise SystemExit("Malformed status file (not JSON)") if not isinstance(data, dict): raise SystemExit("Invalid status file (not a dict)") return data # Run main(). if __name__ == '__main__': main() mypy-0.560/mypy/dmypy_server.py0000644€tŠÔÚ€2›s®0000002427113215007206022776 0ustar jukkaDROPBOX\Domain Users00000000000000"""Client for mypy daemon mode. Highly experimental! Only supports UNIX-like systems. This manages a daemon process which keeps useful state in memory rather than having to read it back from disk on each run. """ import gc import io import json import os import socket import sys import time from typing import Any, Callable, Dict, List, Mapping, Optional, Sequence # TODO: Import all mypy modules lazily to speed up client startup time. import mypy.build import mypy.errors import mypy.main from mypy.dmypy_util import STATUS_FILE, receive def daemonize(func: Callable[[], None], log_file: Optional[str] = None) -> int: """Arrange to call func() in a grandchild of the current process. Return 0 for success, exit status for failure, negative if subprocess killed by signal. """ # See https://stackoverflow.com/questions/473620/how-do-you-create-a-daemon-in-python sys.stdout.flush() sys.stderr.flush() pid = os.fork() if pid: # Parent process: wait for child in case things go bad there. npid, sts = os.waitpid(pid, 0) sig = sts & 0xff if sig: print("Child killed by signal", sig) return -sig sts = sts >> 8 if sts: print("Child exit status", sts) return sts # Child process: do a bunch of UNIX stuff and then fork a grandchild. try: os.setsid() # Detach controlling terminal os.umask(0o27) devnull = os.open('/dev/null', os.O_RDWR) os.dup2(devnull, 0) os.dup2(devnull, 1) os.dup2(devnull, 2) os.close(devnull) pid = os.fork() if pid: # Child is done, exit to parent. os._exit(0) # Grandchild: run the server. if log_file: sys.stdout = sys.stderr = open(log_file, 'a', buffering=1) fd = sys.stdout.fileno() os.dup2(fd, 2) os.dup2(fd, 1) func() finally: # Make sure we never get back into the caller. os._exit(1) # Server code. SOCKET_NAME = 'dmypy.sock' # In current directory. class Server: # NOTE: the instance is constructed in the parent process but # serve() is called in the grandchild (by daemonize()). def __init__(self, flags: List[str]) -> None: """Initialize the server with the desired mypy flags.""" self.saved_cache = {} # type: mypy.build.SavedCache sources, options = mypy.main.process_options(['-i'] + flags, False) if sources: sys.exit("dmypy: start/restart does not accept sources") if options.report_dirs: sys.exit("dmypy: start/restart cannot generate reports") if not options.incremental: sys.exit("dmypy: start/restart should not disable incremental mode") if options.quick_and_dirty: sys.exit("dmypy: start/restart should not specify quick_and_dirty mode") self.options = options if os.path.isfile(STATUS_FILE): os.unlink(STATUS_FILE) def serve(self) -> None: """Serve requests, synchronously (no thread or fork).""" try: sock = self.create_listening_socket() try: with open(STATUS_FILE, 'w') as f: json.dump({'pid': os.getpid(), 'sockname': sock.getsockname()}, f) f.write('\n') # I like my JSON with trailing newline while True: conn, addr = sock.accept() data = receive(conn) resp = {} # type: Dict[str, Any] if 'command' not in data: resp = {'error': "No command found in request"} else: command = data['command'] if not isinstance(command, str): resp = {'error': "Command is not a string"} else: command = data.pop('command') resp = self.run_command(command, data) try: conn.sendall(json.dumps(resp).encode('utf8')) except OSError as err: pass # Maybe the client hung up conn.close() if command == 'stop': sock.close() sys.exit(0) finally: os.unlink(STATUS_FILE) finally: os.unlink(self.sockname) def create_listening_socket(self) -> socket.socket: """Create the socket and set it up for listening.""" self.sockname = os.path.abspath(SOCKET_NAME) if os.path.exists(self.sockname): os.unlink(self.sockname) sock = socket.socket(socket.AF_UNIX) sock.bind(self.sockname) sock.listen(1) return sock def run_command(self, command: str, data: Mapping[str, object]) -> Dict[str, object]: """Run a specific command from the registry.""" key = 'cmd_' + command method = getattr(self.__class__, key, None) if method is None: return {'error': "Unrecognized command '%s'" % command} else: return method(self, **data) # Command functions (run in the server via RPC). def cmd_status(self) -> Dict[str, object]: """Return daemon status.""" res = {} # type: Dict[str, object] res.update(get_meminfo()) return res def cmd_stop(self) -> Dict[str, object]: """Stop daemon.""" return {} last_sources = None def cmd_check(self, files: Sequence[str]) -> Dict[str, object]: """Check a list of files.""" # TODO: Move this into check(), in case one of the args is a directory. # Capture stdout/stderr and catch SystemExit while processing the source list. save_stdout = sys.stdout save_stderr = sys.stderr try: sys.stdout = stdout = io.StringIO() sys.stderr = stderr = io.StringIO() self.last_sources = mypy.main.create_source_list(files, self.options) except SystemExit as err: return {'out': stdout.getvalue(), 'err': stderr.getvalue(), 'status': err.code} finally: sys.stdout = save_stdout sys.stderr = save_stderr return self.check(self.last_sources) def cmd_recheck(self) -> Dict[str, object]: """Check the same list of files we checked most recently.""" if not self.last_sources: return {'error': "Command 'recheck' is only valid after a 'check' command"} return self.check(self.last_sources) # Needed by tests. last_manager = None # type: Optional[mypy.build.BuildManager] def check(self, sources: List[mypy.build.BuildSource], alt_lib_path: Optional[str] = None) -> Dict[str, Any]: self.last_manager = None with GcLogger() as gc_result: try: # saved_cache is mutated in place. res = mypy.build.build(sources, self.options, saved_cache=self.saved_cache, alt_lib_path=alt_lib_path) msgs = res.errors self.last_manager = res.manager # type: Optional[mypy.build.BuildManager] except mypy.errors.CompileError as err: msgs = err.messages if msgs: msgs.append("") response = {'out': "\n".join(msgs), 'err': "", 'status': 1} else: response = {'out': "", 'err': "", 'status': 0} response.update(gc_result.get_stats()) response.update(get_meminfo()) if self.last_manager is not None: response.update(self.last_manager.stats_summary()) return response def cmd_hang(self) -> Dict[str, object]: """Hang for 100 seconds, as a debug hack.""" time.sleep(100) return {} # Misc utilities. class GcLogger: """Context manager to log GC stats and overall time.""" def __enter__(self) -> 'GcLogger': self.gc_start_time = None # type: Optional[float] self.gc_time = 0.0 self.gc_calls = 0 self.gc_collected = 0 self.gc_uncollectable = 0 gc.callbacks.append(self.gc_callback) self.start_time = time.time() return self def gc_callback(self, phase: str, info: Mapping[str, int]) -> None: if phase == 'start': assert self.gc_start_time is None, "Start phase out of sequence" self.gc_start_time = time.time() elif phase == 'stop': assert self.gc_start_time is not None, "Stop phase out of sequence" self.gc_calls += 1 self.gc_time += time.time() - self.gc_start_time self.gc_start_time = None self.gc_collected += info['collected'] self.gc_uncollectable += info['uncollectable'] else: assert False, "Unrecognized gc phase (%r)" % (phase,) def __exit__(self, *args: object) -> None: while self.gc_callback in gc.callbacks: gc.callbacks.remove(self.gc_callback) def get_stats(self) -> Dict[str, float]: end_time = time.time() result = {} result['gc_time'] = self.gc_time result['gc_calls'] = self.gc_calls result['gc_collected'] = self.gc_collected result['gc_uncollectable'] = self.gc_uncollectable result['build_time'] = end_time - self.start_time return result MiB = 2**20 def get_meminfo() -> Mapping[str, float]: # See https://stackoverflow.com/questions/938733/total-memory-used-by-python-process import resource # Since it doesn't exist on Windows. res = {} rusage = resource.getrusage(resource.RUSAGE_SELF) if sys.platform == 'darwin': factor = 1 else: factor = 1024 # Linux res['memory_maxrss_mib'] = rusage.ru_maxrss * factor / MiB # If we can import psutil, use it for some extra data try: import psutil # type: ignore # It's not in typeshed yet except ImportError: pass else: process = psutil.Process(os.getpid()) meminfo = process.memory_info() res['memory_rss_mib'] = meminfo.rss / MiB res['memory_vms_mib'] = meminfo.vms / MiB return res mypy-0.560/mypy/dmypy_util.py0000644€tŠÔÚ€2›s®0000000124013215007206022434 0ustar jukkaDROPBOX\Domain Users00000000000000"""Shared code between dmypy.py and dmypy_server.py. This should be pretty lightweight and not depend on other mypy code. """ import json import socket from typing import Any STATUS_FILE = 'dmypy.json' def receive(sock: socket.socket) -> Any: """Receive JSON data from a socket until EOF.""" bdata = bytearray() while True: more = sock.recv(100000) if not more: break bdata.extend(more) if not bdata: raise OSError("No data received") data = json.loads(bdata.decode('utf8')) if not isinstance(data, dict): raise OSError("Data received is not a dict (%s)" % str(type(data))) return data mypy-0.560/mypy/erasetype.py0000644€tŠÔÚ€2›s®0000000671013215007205022244 0ustar jukkaDROPBOX\Domain Users00000000000000from typing import Optional, Container, Callable from mypy.types import ( Type, TypeVisitor, UnboundType, AnyType, NoneTyp, TypeVarId, Instance, TypeVarType, CallableType, TupleType, TypedDictType, UnionType, Overloaded, ErasedType, PartialType, DeletedType, TypeTranslator, TypeList, UninhabitedType, TypeType, TypeOfAny ) from mypy import experiments def erase_type(typ: Type) -> Type: """Erase any type variables from a type. Also replace tuple types with the corresponding concrete types. Replace callable types with empty callable types. Examples: A -> A B[X] -> B[Any] Tuple[A, B] -> tuple Callable[...] -> Callable[[], None] Type[X] -> Type[Any] """ return typ.accept(EraseTypeVisitor()) class EraseTypeVisitor(TypeVisitor[Type]): def visit_unbound_type(self, t: UnboundType) -> Type: assert False, 'Not supported' def visit_any(self, t: AnyType) -> Type: return t def visit_none_type(self, t: NoneTyp) -> Type: return t def visit_uninhabited_type(self, t: UninhabitedType) -> Type: return t def visit_erased_type(self, t: ErasedType) -> Type: # Should not get here. raise RuntimeError() def visit_partial_type(self, t: PartialType) -> Type: # Should not get here. raise RuntimeError() def visit_deleted_type(self, t: DeletedType) -> Type: return t def visit_instance(self, t: Instance) -> Type: return Instance(t.type, [AnyType(TypeOfAny.special_form)] * len(t.args), t.line) def visit_type_var(self, t: TypeVarType) -> Type: return AnyType(TypeOfAny.special_form) def visit_callable_type(self, t: CallableType) -> Type: # We must preserve the fallback type for overload resolution to work. ret_type = NoneTyp() # type: Type return CallableType([], [], [], ret_type, t.fallback) def visit_overloaded(self, t: Overloaded) -> Type: return t.items()[0].accept(self) def visit_tuple_type(self, t: TupleType) -> Type: return t.fallback.accept(self) def visit_typeddict_type(self, t: TypedDictType) -> Type: return t.fallback.accept(self) def visit_union_type(self, t: UnionType) -> Type: erased_items = [erase_type(item) for item in t.items] return UnionType.make_simplified_union(erased_items) def visit_type_type(self, t: TypeType) -> Type: return TypeType.make_normalized(t.item.accept(self), line=t.line) def erase_typevars(t: Type, ids_to_erase: Optional[Container[TypeVarId]] = None) -> Type: """Replace all type variables in a type with any, or just the ones in the provided collection. """ def erase_id(id: TypeVarId) -> bool: if ids_to_erase is None: return True return id in ids_to_erase return t.accept(TypeVarEraser(erase_id, AnyType(TypeOfAny.special_form))) def replace_meta_vars(t: Type, target_type: Type) -> Type: """Replace unification variables in a type with the target type.""" return t.accept(TypeVarEraser(lambda id: id.is_meta_var(), target_type)) class TypeVarEraser(TypeTranslator): """Implementation of type erasure""" def __init__(self, erase_id: Callable[[TypeVarId], bool], replacement: Type) -> None: self.erase_id = erase_id self.replacement = replacement def visit_type_var(self, t: TypeVarType) -> Type: if self.erase_id(t.id): return self.replacement return t mypy-0.560/mypy/errors.py0000644€tŠÔÚ€2›s®0000005441413215007205021563 0ustar jukkaDROPBOX\Domain Users00000000000000import os.path import sys import traceback from collections import OrderedDict, defaultdict from contextlib import contextmanager from typing import Tuple, List, TypeVar, Set, Dict, Iterator, Optional, cast from mypy.options import Options from mypy.version import __version__ as mypy_version T = TypeVar('T') allowed_duplicates = ['@overload', 'Got:', 'Expected:'] class ErrorInfo: """Representation of a single error message.""" # Description of a sequence of imports that refer to the source file # related to this error. Each item is a (path, line number) tuple. import_ctx = None # type: List[Tuple[str, int]] # The source file that was the source of this error. file = '' # The fully-qualified id of the source module for this error. module = None # type: Optional[str] # The name of the type in which this error is located at. type = '' # type: Optional[str] # Unqualified, may be None # The name of the function or member in which this error is located at. function_or_member = '' # type: Optional[str] # Unqualified, may be None # The line number related to this error within file. line = 0 # -1 if unknown # The column number related to this error with file. column = 0 # -1 if unknown # Either 'error', 'note', or 'warning'. severity = '' # The error message. message = '' # If True, we should halt build after the file that generated this error. blocker = False # Only report this particular messages once per program. only_once = False # Fine-grained incremental target where this was reported target = None # type: Optional[str] def __init__(self, import_ctx: List[Tuple[str, int]], file: str, module: Optional[str], typ: Optional[str], function_or_member: Optional[str], line: int, column: int, severity: str, message: str, blocker: bool, only_once: bool, origin: Optional[Tuple[str, int]] = None, target: Optional[str] = None) -> None: self.import_ctx = import_ctx self.file = file self.module = module self.type = typ self.function_or_member = function_or_member self.line = line self.column = column self.severity = severity self.message = message self.blocker = blocker self.only_once = only_once self.origin = origin or (file, line) self.target = target class Errors: """Container for compile errors. This class generates and keeps tracks of compile errors and the current error context (nested imports). """ # List of generated error messages. error_info = None # type: List[ErrorInfo] # Current error context: nested import context/stack, as a list of (path, line) pairs. import_ctx = None # type: List[Tuple[str, int]] # Set of files with errors. error_files = None # type: Set[str] # Path name prefix that is removed from all paths, if set. ignore_prefix = None # type: str # Path to current file. file = None # type: str # Stack of short names of currents types (or None). type_name = None # type: List[Optional[str]] # Stack of short names of current functions or members (or None). function_or_member = None # type: List[Optional[str]] # Ignore errors on these lines of each file. ignored_lines = None # type: Dict[str, Set[int]] # Lines on which an error was actually ignored. used_ignored_lines = None # type: Dict[str, Set[int]] # Files where all errors should be ignored. ignored_files = None # type: Set[str] # Collection of reported only_once messages. only_once_messages = None # type: Set[str] # Set to True to show "In function "foo":" messages. show_error_context = False # type: bool # Set to True to show column numbers in error messages. show_column_numbers = False # type: bool # Stack of active fine-grained incremental checking targets within # a module. The first item is always the current module id. # (See mypy.server.update for more about targets.) target = None # type: List[str] def __init__(self, show_error_context: bool = False, show_column_numbers: bool = False) -> None: self.show_error_context = show_error_context self.show_column_numbers = show_column_numbers self.initialize() def initialize(self) -> None: self.error_info = [] self.import_ctx = [] self.error_files = set() self.type_name = [None] self.function_or_member = [None] self.ignored_lines = OrderedDict() self.used_ignored_lines = defaultdict(set) self.ignored_files = set() self.only_once_messages = set() self.target = [] def reset(self) -> None: self.initialize() def copy(self) -> 'Errors': new = Errors(self.show_error_context, self.show_column_numbers) new.file = self.file new.import_ctx = self.import_ctx[:] new.type_name = self.type_name[:] new.function_or_member = self.function_or_member[:] new.target = self.target[:] return new def set_ignore_prefix(self, prefix: str) -> None: """Set path prefix that will be removed from all paths.""" prefix = os.path.normpath(prefix) # Add separator to the end, if not given. if os.path.basename(prefix) != '': prefix += os.sep self.ignore_prefix = prefix def simplify_path(self, file: str) -> str: file = os.path.normpath(file) return remove_path_prefix(file, self.ignore_prefix) def set_file(self, file: str, module: Optional[str], ignored_lines: Optional[Set[int]] = None) -> None: """Set the path and module id of the current file.""" # The path will be simplified later, in render_messages. That way # * 'file' is always a key that uniquely identifies a source file # that mypy read (simplified paths might not be unique); and # * we only have to simplify in one place, while still supporting # reporting errors for files other than the one currently being # processed. self.file = file if module: self.target = [module] def set_file_ignored_lines(self, file: str, ignored_lines: Set[int], ignore_all: bool = False) -> None: self.ignored_lines[file] = ignored_lines if ignore_all: self.ignored_files.add(file) def push_function(self, name: str) -> None: """Set the current function or member short name (it can be None).""" self.push_target_component(name) self.function_or_member.append(name) def pop_function(self) -> None: self.function_or_member.pop() self.pop_target_component() @contextmanager def enter_function(self, name: str) -> Iterator[None]: self.push_function(name) yield self.pop_function() def push_type(self, name: str) -> None: """Set the short name of the current type (it can be None).""" self.push_target_component(name) self.type_name.append(name) def pop_type(self) -> None: self.type_name.pop() self.pop_target_component() def push_target_component(self, name: str) -> None: if self.target and not self.function_or_member[-1]: self.target.append('{}.{}'.format(self.target[-1], name)) def pop_target_component(self) -> None: if self.target and not self.function_or_member[-1]: self.target.pop() def current_target(self) -> Optional[str]: if self.target: return self.target[-1] return None def current_module(self) -> Optional[str]: if self.target: return self.target[0] return None @contextmanager def enter_type(self, name: str) -> Iterator[None]: """Set the short name of the current type (it can be None).""" self.push_type(name) yield self.pop_type() def import_context(self) -> List[Tuple[str, int]]: """Return a copy of the import context.""" return self.import_ctx[:] def set_import_context(self, ctx: List[Tuple[str, int]]) -> None: """Replace the entire import context with a new value.""" self.import_ctx = ctx[:] def report(self, line: int, column: int, message: str, blocker: bool = False, severity: str = 'error', file: Optional[str] = None, only_once: bool = False, origin_line: Optional[int] = None, offset: int = 0) -> None: """Report message at the given line using the current error context. Args: line: line number of error message: message to report blocker: if True, don't continue analysis after this error severity: 'error', 'note' or 'warning' file: if non-None, override current file as context only_once: if True, only report this exact message once per build origin_line: if non-None, override current context as origin """ type = self.type_name[-1] # type: Optional[str] if len(self.function_or_member) > 2: type = None # Omit type context if nested function if file is None: file = self.file if offset: message = " " * offset + message info = ErrorInfo(self.import_context(), file, self.current_module(), type, self.function_or_member[-1], line, column, severity, message, blocker, only_once, origin=(self.file, origin_line) if origin_line else None, target=self.current_target()) self.add_error_info(info) def add_error_info(self, info: ErrorInfo) -> None: (file, line) = cast(Tuple[str, int], info.origin) # see issue 1855 if not info.blocker: # Blockers cannot be ignored if file in self.ignored_lines and line in self.ignored_lines[file]: # Annotation requests us to ignore all errors on this line. self.used_ignored_lines[file].add(line) return if file in self.ignored_files: return if info.only_once: if info.message in self.only_once_messages: return self.only_once_messages.add(info.message) self.error_info.append(info) self.error_files.add(file) def generate_unused_ignore_notes(self) -> None: for file, ignored_lines in self.ignored_lines.items(): if not self.is_typeshed_file(file): for line in ignored_lines - self.used_ignored_lines[file]: # Don't use report since add_error_info will ignore the error! info = ErrorInfo(self.import_context(), file, self.current_module(), None, None, line, -1, 'note', "unused 'type: ignore' comment", False, False) self.error_info.append(info) def is_typeshed_file(self, file: str) -> bool: # gross, but no other clear way to tell return 'typeshed' in os.path.normpath(file).split(os.sep) def num_messages(self) -> int: """Return the number of generated messages.""" return len(self.error_info) def is_errors(self) -> bool: """Are there any generated errors?""" return bool(self.error_info) def is_blockers(self) -> bool: """Are the any errors that are blockers?""" return any(err for err in self.error_info if err.blocker) def blocker_module(self) -> Optional[str]: """Return the module with a blocking error, or None if not possible.""" for err in self.error_info: if err.blocker: return err.module return None def is_errors_for_file(self, file: str) -> bool: """Are there any errors for the given file?""" return file in self.error_files def raise_error(self) -> None: """Raise a CompileError with the generated messages. Render the messages suitable for displaying. """ raise CompileError(self.messages(), use_stdout=True, module_with_blocker=self.blocker_module()) def messages(self) -> List[str]: """Return a string list that represents the error messages. Use a form suitable for displaying to the user. """ a = [] # type: List[str] errors = self.render_messages(self.sort_messages(self.error_info)) errors = self.remove_duplicates(errors) for file, line, column, severity, message in errors: s = '' if file is not None: if self.show_column_numbers and line is not None and line >= 0 \ and column is not None and column >= 0: srcloc = '{}:{}:{}'.format(file, line, column) elif line is not None and line >= 0: srcloc = '{}:{}'.format(file, line) else: srcloc = file s = '{}: {}: {}'.format(srcloc, severity, message) else: s = message a.append(s) return a def targets(self) -> Set[str]: """Return a set of all targets that contain errors.""" # TODO: Make sure that either target is always defined or that not being defined # is okay for fine-grained incremental checking. return set(info.target for info in self.error_info if info.target) def render_messages(self, errors: List[ErrorInfo]) -> List[Tuple[Optional[str], int, int, str, str]]: """Translate the messages into a sequence of tuples. Each tuple is of form (path, line, col, message. The rendered sequence includes information about error contexts. The path item may be None. If the line item is negative, the line number is not defined for the tuple. """ result = [] # type: List[Tuple[Optional[str], int, int, str, str]] # (path, line, column, severity, message) prev_import_context = [] # type: List[Tuple[str, int]] prev_function_or_member = None # type: Optional[str] prev_type = None # type: Optional[str] for e in errors: # Report module import context, if different from previous message. if not self.show_error_context: pass elif e.import_ctx != prev_import_context: last = len(e.import_ctx) - 1 i = last while i >= 0: path, line = e.import_ctx[i] fmt = '{}:{}: note: In module imported here' if i < last: fmt = '{}:{}: note: ... from here' if i > 0: fmt += ',' else: fmt += ':' # Remove prefix to ignore from path (if present) to # simplify path. path = remove_path_prefix(path, self.ignore_prefix) result.append((None, -1, -1, 'note', fmt.format(path, line))) i -= 1 file = self.simplify_path(e.file) # Report context within a source file. if not self.show_error_context: pass elif (e.function_or_member != prev_function_or_member or e.type != prev_type): if e.function_or_member is None: if e.type is None: result.append((file, -1, -1, 'note', 'At top level:')) else: result.append((file, -1, -1, 'note', 'In class "{}":'.format( e.type))) else: if e.type is None: result.append((file, -1, -1, 'note', 'In function "{}":'.format( e.function_or_member))) else: result.append((file, -1, -1, 'note', 'In member "{}" of class "{}":'.format( e.function_or_member, e.type))) elif e.type != prev_type: if e.type is None: result.append((file, -1, -1, 'note', 'At top level:')) else: result.append((file, -1, -1, 'note', 'In class "{}":'.format(e.type))) result.append((file, e.line, e.column, e.severity, e.message)) prev_import_context = e.import_ctx prev_function_or_member = e.function_or_member prev_type = e.type return result def sort_messages(self, errors: List[ErrorInfo]) -> List[ErrorInfo]: """Sort an array of error messages locally by line number. I.e., sort a run of consecutive messages with the same file context by line number, but otherwise retain the general ordering of the messages. """ result = [] # type: List[ErrorInfo] i = 0 while i < len(errors): i0 = i # Find neighbouring errors with the same context and file. while (i + 1 < len(errors) and errors[i + 1].import_ctx == errors[i].import_ctx and errors[i + 1].file == errors[i].file): i += 1 i += 1 # Sort the errors specific to a file according to line number and column. a = sorted(errors[i0:i], key=lambda x: (x.line, x.column)) result.extend(a) return result def remove_duplicates(self, errors: List[Tuple[Optional[str], int, int, str, str]] ) -> List[Tuple[Optional[str], int, int, str, str]]: """Remove duplicates from a sorted error list.""" res = [] # type: List[Tuple[Optional[str], int, int, str, str]] i = 0 while i < len(errors): dup = False j = i - 1 while (j >= 0 and errors[j][0] == errors[i][0] and errors[j][1] == errors[i][1]): if (errors[j][3] == errors[i][3] and # Allow duplicate notes in overload conficts reporting not (errors[i][3] == 'note' and errors[i][4].strip() in allowed_duplicates or errors[i][4].strip().startswith('def ')) and errors[j][4] == errors[i][4]): # ignore column dup = True break j -= 1 if not dup: res.append(errors[i]) i += 1 return res class CompileError(Exception): """Exception raised when there is a compile error. It can be a parse, semantic analysis, type check or other compilation-related error. """ messages = None # type: List[str] use_stdout = False # Can be set in case there was a module with a blocking error module_with_blocker = None # type: Optional[str] def __init__(self, messages: List[str], use_stdout: bool = False, module_with_blocker: Optional[str] = None) -> None: super().__init__('\n'.join(messages)) self.messages = messages self.use_stdout = use_stdout self.module_with_blocker = module_with_blocker class DecodeError(Exception): """Exception raised when a file cannot be decoded due to an unknown encoding type. Essentially a wrapper for the LookupError raised by `bytearray.decode` """ def remove_path_prefix(path: str, prefix: str) -> str: """If path starts with prefix, return copy of path with the prefix removed. Otherwise, return path. If path is None, return None. """ if prefix is not None and path.startswith(prefix): return path[len(prefix):] else: return path def report_internal_error(err: Exception, file: Optional[str], line: int, errors: Errors, options: Options) -> None: """Report internal error and exit. This optionally starts pdb or shows a traceback. """ # Dump out errors so far, they often provide a clue. # But catch unexpected errors rendering them. try: for msg in errors.messages(): print(msg) except Exception as e: print("Failed to dump errors:", repr(e), file=sys.stderr) # Compute file:line prefix for official-looking error messages. if file: if line: prefix = '{}:{}: '.format(file, line) else: prefix = '{}: '.format(file) else: prefix = '' # Print "INTERNAL ERROR" message. print('{}error: INTERNAL ERROR --'.format(prefix), 'please report a bug at https://github.com/python/mypy/issues', 'version: {}'.format(mypy_version), file=sys.stderr) # If requested, drop into pdb. This overrides show_tb. if options.pdb: print('Dropping into pdb', file=sys.stderr) import pdb pdb.post_mortem(sys.exc_info()[2]) # If requested, print traceback, else print note explaining how to get one. if not options.show_traceback: if not options.pdb: print('{}: note: please use --show-traceback to print a traceback ' 'when reporting a bug'.format(prefix), file=sys.stderr) else: tb = traceback.extract_stack()[:-2] tb2 = traceback.extract_tb(sys.exc_info()[2]) print('Traceback (most recent call last):') for s in traceback.format_list(tb + tb2): print(s.rstrip('\n')) print('{}: {}'.format(type(err).__name__, err)) print('{}: note: use --pdb to drop into pdb'.format(prefix), file=sys.stderr) # Exit. The caller has nothing more to say. raise SystemExit(1) mypy-0.560/mypy/expandtype.py0000644€tŠÔÚ€2›s®0000001134713215007205022426 0ustar jukkaDROPBOX\Domain Users00000000000000from typing import Dict, Iterable, List, TypeVar, Mapping, cast from mypy.types import ( Type, Instance, CallableType, TypeVisitor, UnboundType, AnyType, NoneTyp, TypeVarType, Overloaded, TupleType, TypedDictType, UnionType, ErasedType, TypeList, PartialType, DeletedType, UninhabitedType, TypeType, TypeVarId, FunctionLike, TypeVarDef ) def expand_type(typ: Type, env: Mapping[TypeVarId, Type]) -> Type: """Substitute any type variable references in a type given by a type environment. """ return typ.accept(ExpandTypeVisitor(env)) def expand_type_by_instance(typ: Type, instance: Instance) -> Type: """Substitute type variables in type using values from an Instance. Type variables are considered to be bound by the class declaration.""" if instance.args == []: return typ else: variables = {} # type: Dict[TypeVarId, Type] for binder, arg in zip(instance.type.defn.type_vars, instance.args): variables[binder.id] = arg return expand_type(typ, variables) F = TypeVar('F', bound=FunctionLike) def freshen_function_type_vars(callee: F) -> F: """Substitute fresh type variables for generic function type variables.""" if isinstance(callee, CallableType): if not callee.is_generic(): return cast(F, callee) tvdefs = [] tvmap = {} # type: Dict[TypeVarId, Type] for v in callee.variables: tvdef = TypeVarDef.new_unification_variable(v) tvdefs.append(tvdef) tvmap[v.id] = TypeVarType(tvdef) fresh = cast(CallableType, expand_type(callee, tvmap)).copy_modified(variables=tvdefs) return cast(F, fresh) else: assert isinstance(callee, Overloaded) fresh_overload = Overloaded([freshen_function_type_vars(item) for item in callee.items()]) return cast(F, fresh_overload) class ExpandTypeVisitor(TypeVisitor[Type]): """Visitor that substitutes type variables with values.""" variables = None # type: Mapping[TypeVarId, Type] # TypeVar id -> TypeVar value def __init__(self, variables: Mapping[TypeVarId, Type]) -> None: self.variables = variables def visit_unbound_type(self, t: UnboundType) -> Type: return t def visit_any(self, t: AnyType) -> Type: return t def visit_none_type(self, t: NoneTyp) -> Type: return t def visit_uninhabited_type(self, t: UninhabitedType) -> Type: return t def visit_deleted_type(self, t: DeletedType) -> Type: return t def visit_erased_type(self, t: ErasedType) -> Type: # Should not get here. raise RuntimeError() def visit_instance(self, t: Instance) -> Type: args = self.expand_types(t.args) return Instance(t.type, args, t.line, t.column) def visit_type_var(self, t: TypeVarType) -> Type: repl = self.variables.get(t.id, t) if isinstance(repl, Instance): inst = repl # Return copy of instance with type erasure flag on. return Instance(inst.type, inst.args, line=inst.line, column=inst.column, erased=True) else: return repl def visit_callable_type(self, t: CallableType) -> Type: return t.copy_modified(arg_types=self.expand_types(t.arg_types), ret_type=t.ret_type.accept(self)) def visit_overloaded(self, t: Overloaded) -> Type: items = [] # type: List[CallableType] for item in t.items(): new_item = item.accept(self) assert isinstance(new_item, CallableType) items.append(new_item) return Overloaded(items) def visit_tuple_type(self, t: TupleType) -> Type: return t.copy_modified(items=self.expand_types(t.items)) def visit_typeddict_type(self, t: TypedDictType) -> Type: return t.copy_modified(item_types=self.expand_types(t.items.values())) def visit_union_type(self, t: UnionType) -> Type: # After substituting for type variables in t.items, # some of the resulting types might be subtypes of others. return UnionType.make_simplified_union(self.expand_types(t.items), t.line, t.column) def visit_partial_type(self, t: PartialType) -> Type: return t def visit_type_type(self, t: TypeType) -> Type: # TODO: Verify that the new item type is valid (instance or # union of instances or Any). Sadly we can't report errors # here yet. item = t.item.accept(self) return TypeType.make_normalized(item) def expand_types(self, types: Iterable[Type]) -> List[Type]: a = [] # type: List[Type] for t in types: a.append(t.accept(self)) return a mypy-0.560/mypy/experiments.py0000644€tŠÔÚ€2›s®0000000055113215007205022603 0ustar jukkaDROPBOX\Domain Users00000000000000from contextlib import contextmanager from typing import Optional, Tuple, Iterator STRICT_OPTIONAL = False find_occurrences = None # type: Optional[Tuple[str, str]] @contextmanager def strict_optional_set(value: bool) -> Iterator[None]: global STRICT_OPTIONAL saved = STRICT_OPTIONAL STRICT_OPTIONAL = value yield STRICT_OPTIONAL = saved mypy-0.560/mypy/exprtotype.py0000644€tŠÔÚ€2›s®0000001105213215007205022461 0ustar jukkaDROPBOX\Domain Users00000000000000"""Translate an Expression to a Type value.""" from mypy.nodes import ( Expression, NameExpr, MemberExpr, IndexExpr, TupleExpr, ListExpr, StrExpr, BytesExpr, UnicodeExpr, EllipsisExpr, CallExpr, ARG_POS, ARG_NAMED, get_member_expr_fullname ) from mypy.fastparse import parse_type_comment from mypy.types import ( Type, UnboundType, TypeList, EllipsisType, AnyType, Optional, CallableArgument, TypeOfAny ) class TypeTranslationError(Exception): """Exception raised when an expression is not valid as a type.""" def _extract_argument_name(expr: Expression) -> Optional[str]: if isinstance(expr, NameExpr) and expr.name == 'None': return None elif isinstance(expr, StrExpr): return expr.value elif isinstance(expr, UnicodeExpr): return expr.value else: raise TypeTranslationError() def expr_to_unanalyzed_type(expr: Expression, _parent: Optional[Expression] = None) -> Type: """Translate an expression to the corresponding type. The result is not semantically analyzed. It can be UnboundType or TypeList. Raise TypeTranslationError if the expression cannot represent a type. """ # The `parent` paremeter is used in recursive calls to provide context for # understanding whether an CallableArgument is ok. name = None # type: Optional[str] if isinstance(expr, NameExpr): name = expr.name return UnboundType(name, line=expr.line, column=expr.column) elif isinstance(expr, MemberExpr): fullname = get_member_expr_fullname(expr) if fullname: return UnboundType(fullname, line=expr.line, column=expr.column) else: raise TypeTranslationError() elif isinstance(expr, IndexExpr): base = expr_to_unanalyzed_type(expr.base, expr) if isinstance(base, UnboundType): if base.args: raise TypeTranslationError() if isinstance(expr.index, TupleExpr): args = expr.index.items else: args = [expr.index] base.args = [expr_to_unanalyzed_type(arg, expr) for arg in args] if not base.args: base.empty_tuple_index = True return base else: raise TypeTranslationError() elif isinstance(expr, CallExpr) and isinstance(_parent, ListExpr): c = expr.callee names = [] # Go through the dotted member expr chain to get the full arg # constructor name to look up while True: if isinstance(c, NameExpr): names.append(c.name) break elif isinstance(c, MemberExpr): names.append(c.name) c = c.expr else: raise TypeTranslationError() arg_const = '.'.join(reversed(names)) # Go through the constructor args to get its name and type. name = None default_type = AnyType(TypeOfAny.unannotated) typ = default_type # type: Type for i, arg in enumerate(expr.args): if expr.arg_names[i] is not None: if expr.arg_names[i] == "name": if name is not None: # Two names raise TypeTranslationError() name = _extract_argument_name(arg) continue elif expr.arg_names[i] == "type": if typ is not default_type: # Two types raise TypeTranslationError() typ = expr_to_unanalyzed_type(arg, expr) continue else: raise TypeTranslationError() elif i == 0: typ = expr_to_unanalyzed_type(arg, expr) elif i == 1: name = _extract_argument_name(arg) else: raise TypeTranslationError() return CallableArgument(typ, name, arg_const, expr.line, expr.column) elif isinstance(expr, ListExpr): return TypeList([expr_to_unanalyzed_type(t, expr) for t in expr.items], line=expr.line, column=expr.column) elif isinstance(expr, (StrExpr, BytesExpr, UnicodeExpr)): # Parse string literal type. try: result = parse_type_comment(expr.value, expr.line, None) assert result is not None except SyntaxError: raise TypeTranslationError() return result elif isinstance(expr, EllipsisExpr): return EllipsisType(expr.line) else: raise TypeTranslationError() mypy-0.560/mypy/fastparse.py0000644€tŠÔÚ€2›s®0000013256513215007206022244 0ustar jukkaDROPBOX\Domain Users00000000000000from functools import wraps import sys from typing import ( Tuple, Union, TypeVar, Callable, Sequence, Optional, Any, cast, List, Set, overload ) from mypy.sharedparse import ( special_function_elide_names, argument_elide_name, ) from mypy.nodes import ( MypyFile, Node, ImportBase, Import, ImportAll, ImportFrom, FuncDef, OverloadedFuncDef, OverloadPart, ClassDef, Decorator, Block, Var, OperatorAssignmentStmt, ExpressionStmt, AssignmentStmt, ReturnStmt, RaiseStmt, AssertStmt, DelStmt, BreakStmt, ContinueStmt, PassStmt, GlobalDecl, WhileStmt, ForStmt, IfStmt, TryStmt, WithStmt, TupleExpr, GeneratorExpr, ListComprehension, ListExpr, ConditionalExpr, DictExpr, SetExpr, NameExpr, IntExpr, StrExpr, BytesExpr, UnicodeExpr, FloatExpr, CallExpr, SuperExpr, MemberExpr, IndexExpr, SliceExpr, OpExpr, UnaryExpr, LambdaExpr, ComparisonExpr, StarExpr, YieldFromExpr, NonlocalDecl, DictionaryComprehension, SetComprehension, ComplexExpr, EllipsisExpr, YieldExpr, Argument, AwaitExpr, TempNode, Expression, Statement, ARG_POS, ARG_OPT, ARG_STAR, ARG_NAMED, ARG_NAMED_OPT, ARG_STAR2, check_arg_names, ) from mypy.types import ( Type, CallableType, AnyType, UnboundType, TupleType, TypeList, EllipsisType, CallableArgument, TypeOfAny ) from mypy import defaults from mypy import experiments from mypy import messages from mypy.errors import Errors from mypy.options import Options try: from typed_ast import ast3 except ImportError: if sys.version_info.minor > 2: try: from typed_ast import ast35 # type: ignore except ImportError: print('The typed_ast package is not installed.\n' 'You can install it with `python3 -m pip install typed-ast`.', file=sys.stderr) else: print('You need a more recent version of the typed_ast package.\n' 'You can update to the latest version with ' '`python3 -m pip install -U typed-ast`.', file=sys.stderr) else: print('Mypy requires the typed_ast package, which is only compatible with\n' 'Python 3.3 and greater.', file=sys.stderr) sys.exit(1) T = TypeVar('T', bound=Union[ast3.expr, ast3.stmt]) U = TypeVar('U', bound=Node) V = TypeVar('V') # There is no way to create reasonable fallbacks at this stage, # they must be patched later. _dummy_fallback = None # type: Any TYPE_COMMENT_SYNTAX_ERROR = 'syntax error in type comment' TYPE_COMMENT_AST_ERROR = 'invalid type comment or annotation' def parse(source: Union[str, bytes], fnam: str, module: Optional[str], errors: Optional[Errors] = None, options: Options = Options()) -> MypyFile: """Parse a source file, without doing any semantic analysis. Return the parse tree. If errors is not provided, raise ParseError on failure. Otherwise, use the errors object to report parse errors. """ raise_on_error = False if errors is None: errors = Errors() raise_on_error = True errors.set_file(fnam, module) is_stub_file = fnam.endswith('.pyi') try: if is_stub_file: feature_version = defaults.PYTHON3_VERSION[1] else: assert options.python_version[0] >= 3 feature_version = options.python_version[1] ast = ast3.parse(source, fnam, 'exec', feature_version=feature_version) tree = ASTConverter(options=options, is_stub=is_stub_file, errors=errors, ).visit(ast) tree.path = fnam tree.is_stub = is_stub_file except SyntaxError as e: errors.report(e.lineno, e.offset, e.msg, blocker=True) tree = MypyFile([], [], False, set()) if raise_on_error and errors.is_errors(): errors.raise_error() return tree def parse_type_comment(type_comment: str, line: int, errors: Optional[Errors]) -> Optional[Type]: try: typ = ast3.parse(type_comment, '', 'eval') except SyntaxError as e: if errors is not None: errors.report(line, e.offset, TYPE_COMMENT_SYNTAX_ERROR, blocker=True) return None else: raise else: assert isinstance(typ, ast3.Expression) return TypeConverter(errors, line=line).visit(typ.body) def with_line(f: Callable[['ASTConverter', T], U]) -> Callable[['ASTConverter', T], U]: @wraps(f) def wrapper(self: 'ASTConverter', ast: T) -> U: node = f(self, ast) node.set_line(ast.lineno, ast.col_offset) return node return wrapper def find(f: Callable[[V], bool], seq: Sequence[V]) -> Optional[V]: for item in seq: if f(item): return item return None def is_no_type_check_decorator(expr: ast3.expr) -> bool: if isinstance(expr, ast3.Name): return expr.id == 'no_type_check' elif isinstance(expr, ast3.Attribute): if isinstance(expr.value, ast3.Name): return expr.value.id == 'typing' and expr.attr == 'no_type_check' return False class ASTConverter(ast3.NodeTransformer): def __init__(self, options: Options, is_stub: bool, errors: Errors) -> None: self.class_nesting = 0 self.imports = [] # type: List[ImportBase] self.options = options self.is_stub = is_stub self.errors = errors def note(self, msg: str, line: int, column: int) -> None: self.errors.report(line, column, msg, severity='note') def fail(self, msg: str, line: int, column: int) -> None: self.errors.report(line, column, msg, blocker=True) def generic_visit(self, node: ast3.AST) -> None: raise RuntimeError('AST node not implemented: ' + str(type(node))) def visit(self, node: Optional[ast3.AST]) -> Any: # same as in typed_ast stub if node is None: return None return super().visit(node) def translate_expr_list(self, l: Sequence[ast3.AST]) -> List[Expression]: res = [] # type: List[Expression] for e in l: exp = self.visit(e) isinstance(exp, Expression) res.append(exp) return res def translate_stmt_list(self, l: Sequence[ast3.AST]) -> List[Statement]: res = [] # type: List[Statement] for e in l: stmt = self.visit(e) isinstance(stmt, Statement) res.append(stmt) return res op_map = { ast3.Add: '+', ast3.Sub: '-', ast3.Mult: '*', ast3.MatMult: '@', ast3.Div: '/', ast3.Mod: '%', ast3.Pow: '**', ast3.LShift: '<<', ast3.RShift: '>>', ast3.BitOr: '|', ast3.BitXor: '^', ast3.BitAnd: '&', ast3.FloorDiv: '//' } def from_operator(self, op: ast3.operator) -> str: op_name = ASTConverter.op_map.get(type(op)) if op_name is None: raise RuntimeError('Unknown operator ' + str(type(op))) else: return op_name comp_op_map = { ast3.Gt: '>', ast3.Lt: '<', ast3.Eq: '==', ast3.GtE: '>=', ast3.LtE: '<=', ast3.NotEq: '!=', ast3.Is: 'is', ast3.IsNot: 'is not', ast3.In: 'in', ast3.NotIn: 'not in' } def from_comp_operator(self, op: ast3.cmpop) -> str: op_name = ASTConverter.comp_op_map.get(type(op)) if op_name is None: raise RuntimeError('Unknown comparison operator ' + str(type(op))) else: return op_name def as_block(self, stmts: List[ast3.stmt], lineno: int) -> Optional[Block]: b = None if stmts: b = Block(self.fix_function_overloads(self.translate_stmt_list(stmts))) b.set_line(lineno) return b def as_required_block(self, stmts: List[ast3.stmt], lineno: int) -> Block: assert stmts # must be non-empty b = Block(self.fix_function_overloads(self.translate_stmt_list(stmts))) b.set_line(lineno) return b def fix_function_overloads(self, stmts: List[Statement]) -> List[Statement]: ret = [] # type: List[Statement] current_overload = [] # type: List[OverloadPart] current_overload_name = None for stmt in stmts: if (current_overload_name is not None and isinstance(stmt, (Decorator, FuncDef)) and stmt.name() == current_overload_name): current_overload.append(stmt) else: if len(current_overload) == 1: ret.append(current_overload[0]) elif len(current_overload) > 1: ret.append(OverloadedFuncDef(current_overload)) if isinstance(stmt, Decorator): current_overload = [stmt] current_overload_name = stmt.name() else: current_overload = [] current_overload_name = None ret.append(stmt) if len(current_overload) == 1: ret.append(current_overload[0]) elif len(current_overload) > 1: ret.append(OverloadedFuncDef(current_overload)) return ret def in_class(self) -> bool: return self.class_nesting > 0 def translate_module_id(self, id: str) -> str: """Return the actual, internal module id for a source text id. For example, translate '__builtin__' in Python 2 to 'builtins'. """ if id == self.options.custom_typing_module: return 'typing' elif id == '__builtin__' and self.options.python_version[0] == 2: # HACK: __builtin__ in Python 2 is aliases to builtins. However, the implementation # is named __builtin__.py (there is another layer of translation elsewhere). return 'builtins' return id def visit_Module(self, mod: ast3.Module) -> MypyFile: body = self.fix_function_overloads(self.translate_stmt_list(mod.body)) return MypyFile(body, self.imports, False, {ti.lineno for ti in mod.type_ignores}, ) # --- stmt --- # FunctionDef(identifier name, arguments args, # stmt* body, expr* decorator_list, expr? returns, string? type_comment) # arguments = (arg* args, arg? vararg, arg* kwonlyargs, expr* kw_defaults, # arg? kwarg, expr* defaults) @with_line def visit_FunctionDef(self, n: ast3.FunctionDef) -> Union[FuncDef, Decorator]: return self.do_func_def(n) # AsyncFunctionDef(identifier name, arguments args, # stmt* body, expr* decorator_list, expr? returns, string? type_comment) @with_line def visit_AsyncFunctionDef(self, n: ast3.AsyncFunctionDef) -> Union[FuncDef, Decorator]: return self.do_func_def(n, is_coroutine=True) def do_func_def(self, n: Union[ast3.FunctionDef, ast3.AsyncFunctionDef], is_coroutine: bool = False) -> Union[FuncDef, Decorator]: """Helper shared between visit_FunctionDef and visit_AsyncFunctionDef.""" no_type_check = bool(n.decorator_list and any(is_no_type_check_decorator(d) for d in n.decorator_list)) args = self.transform_args(n.args, n.lineno, no_type_check=no_type_check) arg_kinds = [arg.kind for arg in args] arg_names = [arg.variable.name() for arg in args] # type: List[Optional[str]] arg_names = [None if argument_elide_name(name) else name for name in arg_names] if special_function_elide_names(n.name): arg_names = [None] * len(arg_names) arg_types = [] # type: List[Optional[Type]] if no_type_check: arg_types = [None] * len(args) return_type = None elif n.type_comment is not None: try: func_type_ast = ast3.parse(n.type_comment, '', 'func_type') assert isinstance(func_type_ast, ast3.FunctionType) # for ellipsis arg if (len(func_type_ast.argtypes) == 1 and isinstance(func_type_ast.argtypes[0], ast3.Ellipsis)): if n.returns: # PEP 484 disallows both type annotations and type comments self.fail(messages.DUPLICATE_TYPE_SIGNATURES, n.lineno, n.col_offset) arg_types = [a.type_annotation if a.type_annotation is not None else AnyType(TypeOfAny.unannotated) for a in args] else: # PEP 484 disallows both type annotations and type comments if n.returns or any(a.type_annotation is not None for a in args): self.fail(messages.DUPLICATE_TYPE_SIGNATURES, n.lineno, n.col_offset) translated_args = (TypeConverter(self.errors, line=n.lineno) .translate_expr_list(func_type_ast.argtypes)) arg_types = [a if a is not None else AnyType(TypeOfAny.unannotated) for a in translated_args] return_type = TypeConverter(self.errors, line=n.lineno).visit(func_type_ast.returns) # add implicit self type if self.in_class() and len(arg_types) < len(args): arg_types.insert(0, AnyType(TypeOfAny.special_form)) except SyntaxError: self.fail(TYPE_COMMENT_SYNTAX_ERROR, n.lineno, n.col_offset) if n.type_comment and n.type_comment[0] != "(": self.note('Suggestion: wrap argument types in parentheses', n.lineno, n.col_offset) arg_types = [AnyType(TypeOfAny.from_error)] * len(args) return_type = AnyType(TypeOfAny.from_error) else: arg_types = [a.type_annotation for a in args] return_type = TypeConverter(self.errors, line=n.returns.lineno if n.returns else n.lineno).visit(n.returns) for arg, arg_type in zip(args, arg_types): self.set_type_optional(arg_type, arg.initializer) func_type = None if any(arg_types) or return_type: if len(arg_types) != 1 and any(isinstance(t, EllipsisType) for t in arg_types): self.fail("Ellipses cannot accompany other argument types " "in function type signature.", n.lineno, 0) elif len(arg_types) > len(arg_kinds): self.fail('Type signature has too many arguments', n.lineno, 0) elif len(arg_types) < len(arg_kinds): self.fail('Type signature has too few arguments', n.lineno, 0) else: func_type = CallableType([a if a is not None else AnyType(TypeOfAny.unannotated) for a in arg_types], arg_kinds, arg_names, return_type if return_type is not None else AnyType(TypeOfAny.unannotated), _dummy_fallback) func_def = FuncDef(n.name, args, self.as_required_block(n.body, n.lineno), func_type) if is_coroutine: # A coroutine is also a generator, mostly for internal reasons. func_def.is_generator = func_def.is_coroutine = True if func_type is not None: func_type.definition = func_def func_type.line = n.lineno if n.decorator_list: var = Var(func_def.name()) var.is_ready = False var.set_line(n.decorator_list[0].lineno) func_def.is_decorated = True func_def.set_line(n.lineno + len(n.decorator_list)) func_def.body.set_line(func_def.get_line()) return Decorator(func_def, self.translate_expr_list(n.decorator_list), var) else: return func_def def set_type_optional(self, type: Optional[Type], initializer: Optional[Expression]) -> None: if self.options.no_implicit_optional: return # Indicate that type should be wrapped in an Optional if arg is initialized to None. optional = isinstance(initializer, NameExpr) and initializer.name == 'None' if isinstance(type, UnboundType): type.optional = optional def transform_args(self, args: ast3.arguments, line: int, no_type_check: bool = False, ) -> List[Argument]: def make_argument(arg: ast3.arg, default: Optional[ast3.expr], kind: int) -> Argument: if no_type_check: arg_type = None else: if arg.annotation is not None and arg.type_comment is not None: self.fail(messages.DUPLICATE_TYPE_SIGNATURES, arg.lineno, arg.col_offset) arg_type = None if arg.annotation is not None: arg_type = TypeConverter(self.errors, line=arg.lineno).visit(arg.annotation) elif arg.type_comment is not None: arg_type = parse_type_comment(arg.type_comment, arg.lineno, self.errors) return Argument(Var(arg.arg), arg_type, self.visit(default), kind) new_args = [] names = [] # type: List[ast3.arg] num_no_defaults = len(args.args) - len(args.defaults) # positional arguments without defaults for a in args.args[:num_no_defaults]: new_args.append(make_argument(a, None, ARG_POS)) names.append(a) # positional arguments with defaults for a, d in zip(args.args[num_no_defaults:], args.defaults): new_args.append(make_argument(a, d, ARG_OPT)) names.append(a) # *arg if args.vararg is not None: new_args.append(make_argument(args.vararg, None, ARG_STAR)) names.append(args.vararg) # keyword-only arguments with defaults for a, d in zip(args.kwonlyargs, args.kw_defaults): new_args.append(make_argument( a, d, ARG_NAMED if d is None else ARG_NAMED_OPT)) names.append(a) # **kwarg if args.kwarg is not None: new_args.append(make_argument(args.kwarg, None, ARG_STAR2)) names.append(args.kwarg) def fail_arg(msg: str, arg: ast3.arg) -> None: self.fail(msg, arg.lineno, arg.col_offset) check_arg_names([name.arg for name in names], names, fail_arg) return new_args # ClassDef(identifier name, # expr* bases, # keyword* keywords, # stmt* body, # expr* decorator_list) @with_line def visit_ClassDef(self, n: ast3.ClassDef) -> ClassDef: self.class_nesting += 1 keywords = [(kw.arg, self.visit(kw.value)) for kw in n.keywords if kw.arg] cdef = ClassDef(n.name, self.as_required_block(n.body, n.lineno), None, self.translate_expr_list(n.bases), metaclass=dict(keywords).get('metaclass'), keywords=keywords) cdef.decorators = self.translate_expr_list(n.decorator_list) self.class_nesting -= 1 return cdef # Return(expr? value) @with_line def visit_Return(self, n: ast3.Return) -> ReturnStmt: return ReturnStmt(self.visit(n.value)) # Delete(expr* targets) @with_line def visit_Delete(self, n: ast3.Delete) -> DelStmt: if len(n.targets) > 1: tup = TupleExpr(self.translate_expr_list(n.targets)) tup.set_line(n.lineno) return DelStmt(tup) else: return DelStmt(self.visit(n.targets[0])) # Assign(expr* targets, expr? value, string? type_comment, expr? annotation) @with_line def visit_Assign(self, n: ast3.Assign) -> AssignmentStmt: lvalues = self.translate_expr_list(n.targets) rvalue = self.visit(n.value) if n.type_comment is not None: typ = parse_type_comment(n.type_comment, n.lineno, self.errors) else: typ = None return AssignmentStmt(lvalues, rvalue, type=typ, new_syntax=False) # AnnAssign(expr target, expr annotation, expr? value, int simple) @with_line def visit_AnnAssign(self, n: ast3.AnnAssign) -> AssignmentStmt: if n.value is None: # always allow 'x: int' rvalue = TempNode(AnyType(TypeOfAny.special_form), no_rhs=True) # type: Expression else: rvalue = self.visit(n.value) typ = TypeConverter(self.errors, line=n.lineno).visit(n.annotation) assert typ is not None typ.column = n.annotation.col_offset return AssignmentStmt([self.visit(n.target)], rvalue, type=typ, new_syntax=True) # AugAssign(expr target, operator op, expr value) @with_line def visit_AugAssign(self, n: ast3.AugAssign) -> OperatorAssignmentStmt: return OperatorAssignmentStmt(self.from_operator(n.op), self.visit(n.target), self.visit(n.value)) # For(expr target, expr iter, stmt* body, stmt* orelse, string? type_comment) @with_line def visit_For(self, n: ast3.For) -> ForStmt: if n.type_comment is not None: target_type = parse_type_comment(n.type_comment, n.lineno, self.errors) else: target_type = None return ForStmt(self.visit(n.target), self.visit(n.iter), self.as_required_block(n.body, n.lineno), self.as_block(n.orelse, n.lineno), target_type) # AsyncFor(expr target, expr iter, stmt* body, stmt* orelse, string? type_comment) @with_line def visit_AsyncFor(self, n: ast3.AsyncFor) -> ForStmt: if n.type_comment is not None: target_type = parse_type_comment(n.type_comment, n.lineno, self.errors) else: target_type = None r = ForStmt(self.visit(n.target), self.visit(n.iter), self.as_required_block(n.body, n.lineno), self.as_block(n.orelse, n.lineno), target_type) r.is_async = True return r # While(expr test, stmt* body, stmt* orelse) @with_line def visit_While(self, n: ast3.While) -> WhileStmt: return WhileStmt(self.visit(n.test), self.as_required_block(n.body, n.lineno), self.as_block(n.orelse, n.lineno)) # If(expr test, stmt* body, stmt* orelse) @with_line def visit_If(self, n: ast3.If) -> IfStmt: return IfStmt([self.visit(n.test)], [self.as_required_block(n.body, n.lineno)], self.as_block(n.orelse, n.lineno)) # With(withitem* items, stmt* body, string? type_comment) @with_line def visit_With(self, n: ast3.With) -> WithStmt: if n.type_comment is not None: target_type = parse_type_comment(n.type_comment, n.lineno, self.errors) else: target_type = None return WithStmt([self.visit(i.context_expr) for i in n.items], [self.visit(i.optional_vars) for i in n.items], self.as_required_block(n.body, n.lineno), target_type) # AsyncWith(withitem* items, stmt* body, string? type_comment) @with_line def visit_AsyncWith(self, n: ast3.AsyncWith) -> WithStmt: if n.type_comment is not None: target_type = parse_type_comment(n.type_comment, n.lineno, self.errors) else: target_type = None r = WithStmt([self.visit(i.context_expr) for i in n.items], [self.visit(i.optional_vars) for i in n.items], self.as_required_block(n.body, n.lineno), target_type) r.is_async = True return r # Raise(expr? exc, expr? cause) @with_line def visit_Raise(self, n: ast3.Raise) -> RaiseStmt: return RaiseStmt(self.visit(n.exc), self.visit(n.cause)) # Try(stmt* body, excepthandler* handlers, stmt* orelse, stmt* finalbody) @with_line def visit_Try(self, n: ast3.Try) -> TryStmt: vs = [NameExpr(h.name) if h.name is not None else None for h in n.handlers] types = [self.visit(h.type) for h in n.handlers] handlers = [self.as_required_block(h.body, h.lineno) for h in n.handlers] return TryStmt(self.as_required_block(n.body, n.lineno), vs, types, handlers, self.as_block(n.orelse, n.lineno), self.as_block(n.finalbody, n.lineno)) # Assert(expr test, expr? msg) @with_line def visit_Assert(self, n: ast3.Assert) -> AssertStmt: return AssertStmt(self.visit(n.test), self.visit(n.msg)) # Import(alias* names) @with_line def visit_Import(self, n: ast3.Import) -> Import: names = [] # type: List[Tuple[str, Optional[str]]] for alias in n.names: name = self.translate_module_id(alias.name) asname = alias.asname if asname is None and name != alias.name: # if the module name has been translated (and it's not already # an explicit import-as), make it an implicit import-as the # original name asname = alias.name names.append((name, asname)) i = Import(names) self.imports.append(i) return i # ImportFrom(identifier? module, alias* names, int? level) @with_line def visit_ImportFrom(self, n: ast3.ImportFrom) -> ImportBase: assert n.level is not None if len(n.names) == 1 and n.names[0].name == '*': mod = n.module if n.module is not None else '' i = ImportAll(mod, n.level) # type: ImportBase else: i = ImportFrom(self.translate_module_id(n.module) if n.module is not None else '', n.level, [(a.name, a.asname) for a in n.names]) self.imports.append(i) return i # Global(identifier* names) @with_line def visit_Global(self, n: ast3.Global) -> GlobalDecl: return GlobalDecl(n.names) # Nonlocal(identifier* names) @with_line def visit_Nonlocal(self, n: ast3.Nonlocal) -> NonlocalDecl: return NonlocalDecl(n.names) # Expr(expr value) @with_line def visit_Expr(self, n: ast3.Expr) -> ExpressionStmt: value = self.visit(n.value) return ExpressionStmt(value) # Pass @with_line def visit_Pass(self, n: ast3.Pass) -> PassStmt: return PassStmt() # Break @with_line def visit_Break(self, n: ast3.Break) -> BreakStmt: return BreakStmt() # Continue @with_line def visit_Continue(self, n: ast3.Continue) -> ContinueStmt: return ContinueStmt() # --- expr --- # BoolOp(boolop op, expr* values) @with_line def visit_BoolOp(self, n: ast3.BoolOp) -> OpExpr: # mypy translates (1 and 2 and 3) as (1 and (2 and 3)) assert len(n.values) >= 2 if isinstance(n.op, ast3.And): op = 'and' elif isinstance(n.op, ast3.Or): op = 'or' else: raise RuntimeError('unknown BoolOp ' + str(type(n))) # potentially inefficient! def group(vals: List[Expression]) -> OpExpr: if len(vals) == 2: return OpExpr(op, vals[0], vals[1]) else: return OpExpr(op, vals[0], group(vals[1:])) return group(self.translate_expr_list(n.values)) # BinOp(expr left, operator op, expr right) @with_line def visit_BinOp(self, n: ast3.BinOp) -> OpExpr: op = self.from_operator(n.op) if op is None: raise RuntimeError('cannot translate BinOp ' + str(type(n.op))) return OpExpr(op, self.visit(n.left), self.visit(n.right)) # UnaryOp(unaryop op, expr operand) @with_line def visit_UnaryOp(self, n: ast3.UnaryOp) -> UnaryExpr: op = None if isinstance(n.op, ast3.Invert): op = '~' elif isinstance(n.op, ast3.Not): op = 'not' elif isinstance(n.op, ast3.UAdd): op = '+' elif isinstance(n.op, ast3.USub): op = '-' if op is None: raise RuntimeError('cannot translate UnaryOp ' + str(type(n.op))) return UnaryExpr(op, self.visit(n.operand)) # Lambda(arguments args, expr body) @with_line def visit_Lambda(self, n: ast3.Lambda) -> LambdaExpr: body = ast3.Return(n.body) body.lineno = n.lineno body.col_offset = n.col_offset return LambdaExpr(self.transform_args(n.args, n.lineno), self.as_required_block([body], n.lineno)) # IfExp(expr test, expr body, expr orelse) @with_line def visit_IfExp(self, n: ast3.IfExp) -> ConditionalExpr: return ConditionalExpr(self.visit(n.test), self.visit(n.body), self.visit(n.orelse)) # Dict(expr* keys, expr* values) @with_line def visit_Dict(self, n: ast3.Dict) -> DictExpr: return DictExpr(list(zip(self.translate_expr_list(n.keys), self.translate_expr_list(n.values)))) # Set(expr* elts) @with_line def visit_Set(self, n: ast3.Set) -> SetExpr: return SetExpr(self.translate_expr_list(n.elts)) # ListComp(expr elt, comprehension* generators) @with_line def visit_ListComp(self, n: ast3.ListComp) -> ListComprehension: return ListComprehension(self.visit_GeneratorExp(cast(ast3.GeneratorExp, n))) # SetComp(expr elt, comprehension* generators) @with_line def visit_SetComp(self, n: ast3.SetComp) -> SetComprehension: return SetComprehension(self.visit_GeneratorExp(cast(ast3.GeneratorExp, n))) # DictComp(expr key, expr value, comprehension* generators) @with_line def visit_DictComp(self, n: ast3.DictComp) -> DictionaryComprehension: targets = [self.visit(c.target) for c in n.generators] iters = [self.visit(c.iter) for c in n.generators] ifs_list = [self.translate_expr_list(c.ifs) for c in n.generators] is_async = [bool(c.is_async) for c in n.generators] return DictionaryComprehension(self.visit(n.key), self.visit(n.value), targets, iters, ifs_list, is_async) # GeneratorExp(expr elt, comprehension* generators) @with_line def visit_GeneratorExp(self, n: ast3.GeneratorExp) -> GeneratorExpr: targets = [self.visit(c.target) for c in n.generators] iters = [self.visit(c.iter) for c in n.generators] ifs_list = [self.translate_expr_list(c.ifs) for c in n.generators] is_async = [bool(c.is_async) for c in n.generators] return GeneratorExpr(self.visit(n.elt), targets, iters, ifs_list, is_async) # Await(expr value) @with_line def visit_Await(self, n: ast3.Await) -> AwaitExpr: v = self.visit(n.value) return AwaitExpr(v) # Yield(expr? value) @with_line def visit_Yield(self, n: ast3.Yield) -> YieldExpr: return YieldExpr(self.visit(n.value)) # YieldFrom(expr value) @with_line def visit_YieldFrom(self, n: ast3.YieldFrom) -> YieldFromExpr: return YieldFromExpr(self.visit(n.value)) # Compare(expr left, cmpop* ops, expr* comparators) @with_line def visit_Compare(self, n: ast3.Compare) -> ComparisonExpr: operators = [self.from_comp_operator(o) for o in n.ops] operands = self.translate_expr_list([n.left] + n.comparators) return ComparisonExpr(operators, operands) # Call(expr func, expr* args, keyword* keywords) # keyword = (identifier? arg, expr value) @with_line def visit_Call(self, n: ast3.Call) -> CallExpr: def is_star2arg(k: ast3.keyword) -> bool: return k.arg is None arg_types = self.translate_expr_list( [a.value if isinstance(a, ast3.Starred) else a for a in n.args] + [k.value for k in n.keywords]) arg_kinds = ([ARG_STAR if isinstance(a, ast3.Starred) else ARG_POS for a in n.args] + [ARG_STAR2 if is_star2arg(k) else ARG_NAMED for k in n.keywords]) return CallExpr(self.visit(n.func), arg_types, arg_kinds, cast(List[Optional[str]], [None] * len(n.args)) + [k.arg for k in n.keywords]) # Num(object n) -- a number as a PyObject. @with_line def visit_Num(self, n: ast3.Num) -> Union[IntExpr, FloatExpr, ComplexExpr]: if isinstance(n.n, int): return IntExpr(n.n) elif isinstance(n.n, float): return FloatExpr(n.n) elif isinstance(n.n, complex): return ComplexExpr(n.n) raise RuntimeError('num not implemented for ' + str(type(n.n))) # Str(string s) @with_line def visit_Str(self, n: ast3.Str) -> Union[UnicodeExpr, StrExpr]: # Hack: assume all string literals in Python 2 stubs are normal # strs (i.e. not unicode). All stubs are parsed with the Python 3 # parser, which causes unprefixed string literals to be interpreted # as unicode instead of bytes. This hack is generally okay, # because mypy considers str literals to be compatible with # unicode. return StrExpr(n.s) # Only available with typed_ast >= 0.6.2 if hasattr(ast3, 'JoinedStr'): # JoinedStr(expr* values) @with_line def visit_JoinedStr(self, n: ast3.JoinedStr) -> Expression: # Each of n.values is a str or FormattedValue; we just concatenate # them all using ''.join. empty_string = StrExpr('') empty_string.set_line(n.lineno, n.col_offset) strs_to_join = ListExpr(self.translate_expr_list(n.values)) strs_to_join.set_line(empty_string) join_method = MemberExpr(empty_string, 'join') join_method.set_line(empty_string) result_expression = CallExpr(join_method, [strs_to_join], [ARG_POS], [None]) return result_expression # FormattedValue(expr value) @with_line def visit_FormattedValue(self, n: ast3.FormattedValue) -> Expression: # A FormattedValue is a component of a JoinedStr, or it can exist # on its own. We translate them to individual '{}'.format(value) # calls -- we don't bother with the conversion/format_spec fields. exp = self.visit(n.value) exp.set_line(n.lineno, n.col_offset) format_string = StrExpr('{}') format_string.set_line(n.lineno, n.col_offset) format_method = MemberExpr(format_string, 'format') format_method.set_line(format_string) result_expression = CallExpr(format_method, [exp], [ARG_POS], [None]) return result_expression # Bytes(bytes s) @with_line def visit_Bytes(self, n: ast3.Bytes) -> Union[BytesExpr, StrExpr]: # The following line is a bit hacky, but is the best way to maintain # compatibility with how mypy currently parses the contents of bytes literals. contents = str(n.s)[2:-1] return BytesExpr(contents) # NameConstant(singleton value) def visit_NameConstant(self, n: ast3.NameConstant) -> NameExpr: return NameExpr(str(n.value)) # Ellipsis @with_line def visit_Ellipsis(self, n: ast3.Ellipsis) -> EllipsisExpr: return EllipsisExpr() # Attribute(expr value, identifier attr, expr_context ctx) @with_line def visit_Attribute(self, n: ast3.Attribute) -> Union[MemberExpr, SuperExpr]: if (isinstance(n.value, ast3.Call) and isinstance(n.value.func, ast3.Name) and n.value.func.id == 'super'): return SuperExpr(n.attr, self.visit(n.value)) return MemberExpr(self.visit(n.value), n.attr) # Subscript(expr value, slice slice, expr_context ctx) @with_line def visit_Subscript(self, n: ast3.Subscript) -> IndexExpr: return IndexExpr(self.visit(n.value), self.visit(n.slice)) # Starred(expr value, expr_context ctx) @with_line def visit_Starred(self, n: ast3.Starred) -> StarExpr: return StarExpr(self.visit(n.value)) # Name(identifier id, expr_context ctx) @with_line def visit_Name(self, n: ast3.Name) -> NameExpr: return NameExpr(n.id) # List(expr* elts, expr_context ctx) @with_line def visit_List(self, n: ast3.List) -> ListExpr: return ListExpr([self.visit(e) for e in n.elts]) # Tuple(expr* elts, expr_context ctx) @with_line def visit_Tuple(self, n: ast3.Tuple) -> TupleExpr: return TupleExpr([self.visit(e) for e in n.elts]) # --- slice --- # Slice(expr? lower, expr? upper, expr? step) def visit_Slice(self, n: ast3.Slice) -> SliceExpr: return SliceExpr(self.visit(n.lower), self.visit(n.upper), self.visit(n.step)) # ExtSlice(slice* dims) def visit_ExtSlice(self, n: ast3.ExtSlice) -> TupleExpr: return TupleExpr(self.translate_expr_list(n.dims)) # Index(expr value) def visit_Index(self, n: ast3.Index) -> Node: return self.visit(n.value) class TypeConverter(ast3.NodeTransformer): def __init__(self, errors: Optional[Errors], line: int = -1) -> None: self.errors = errors self.line = line self.node_stack = [] # type: List[ast3.AST] def _visit_implementation(self, node: Optional[ast3.AST]) -> Optional[Type]: """Modified visit -- keep track of the stack of nodes""" if node is None: return None self.node_stack.append(node) try: return super().visit(node) finally: self.node_stack.pop() if sys.version_info >= (3, 6): @overload def visit(self, node: ast3.expr) -> Type: ... @overload # noqa def visit(self, node: Optional[ast3.AST]) -> Optional[Type]: ... def visit(self, node: Optional[ast3.AST]) -> Optional[Type]: # noqa return self._visit_implementation(node) else: def visit(self, node: Optional[ast3.AST]) -> Any: return self._visit_implementation(node) def parent(self) -> Optional[ast3.AST]: """Return the AST node above the one we are processing""" if len(self.node_stack) < 2: return None return self.node_stack[-2] def fail(self, msg: str, line: int, column: int) -> None: if self.errors: self.errors.report(line, column, msg, blocker=True) def note(self, msg: str, line: int, column: int) -> None: if self.errors: self.errors.report(line, column, msg, severity='note') def visit_raw_str(self, s: str) -> Type: # An escape hatch that allows the AST walker in fastparse2 to # directly hook into the Python 3.5 type converter in some cases # without needing to create an intermediary `ast3.Str` object. return (parse_type_comment(s.strip(), self.line, self.errors) or AnyType(TypeOfAny.from_error)) def generic_visit(self, node: ast3.AST) -> Type: # type: ignore self.fail(TYPE_COMMENT_AST_ERROR, self.line, getattr(node, 'col_offset', -1)) return AnyType(TypeOfAny.from_error) def translate_expr_list(self, l: Sequence[ast3.expr]) -> List[Type]: return [self.visit(e) for e in l] def visit_Call(self, e: ast3.Call) -> Type: # Parse the arg constructor f = e.func constructor = stringify_name(f) if not isinstance(self.parent(), ast3.List): self.fail(TYPE_COMMENT_AST_ERROR, self.line, e.col_offset) if constructor: self.note("Suggestion: use {}[...] instead of {}(...)".format( constructor, constructor), self.line, e.col_offset) return AnyType(TypeOfAny.from_error) if not constructor: self.fail("Expected arg constructor name", e.lineno, e.col_offset) name = None # type: Optional[str] default_type = AnyType(TypeOfAny.special_form) typ = default_type # type: Type for i, arg in enumerate(e.args): if i == 0: converted = self.visit(arg) assert converted is not None typ = converted elif i == 1: name = self._extract_argument_name(arg) else: self.fail("Too many arguments for argument constructor", f.lineno, f.col_offset) for k in e.keywords: value = k.value if k.arg == "name": if name is not None: self.fail('"{}" gets multiple values for keyword argument "name"'.format( constructor), f.lineno, f.col_offset) name = self._extract_argument_name(value) elif k.arg == "type": if typ is not default_type: self.fail('"{}" gets multiple values for keyword argument "type"'.format( constructor), f.lineno, f.col_offset) converted = self.visit(value) assert converted is not None typ = converted else: self.fail( 'Unexpected argument "{}" for argument constructor'.format(k.arg), value.lineno, value.col_offset) return CallableArgument(typ, name, constructor, e.lineno, e.col_offset) def translate_argument_list(self, l: Sequence[ast3.expr]) -> TypeList: return TypeList([self.visit(e) for e in l], line=self.line) def _extract_argument_name(self, n: ast3.expr) -> Optional[str]: if isinstance(n, ast3.Str): return n.s.strip() elif isinstance(n, ast3.NameConstant) and str(n.value) == 'None': return None self.fail('Expected string literal for argument name, got {}'.format( type(n).__name__), self.line, 0) return None def visit_Name(self, n: ast3.Name) -> Type: return UnboundType(n.id, line=self.line) def visit_NameConstant(self, n: ast3.NameConstant) -> Type: return UnboundType(str(n.value)) # Str(string s) def visit_Str(self, n: ast3.Str) -> Type: return (parse_type_comment(n.s.strip(), self.line, self.errors) or AnyType(TypeOfAny.from_error)) # Subscript(expr value, slice slice, expr_context ctx) def visit_Subscript(self, n: ast3.Subscript) -> Type: if not isinstance(n.slice, ast3.Index): self.fail(TYPE_COMMENT_SYNTAX_ERROR, self.line, getattr(n, 'col_offset', -1)) return AnyType(TypeOfAny.from_error) empty_tuple_index = False if isinstance(n.slice.value, ast3.Tuple): params = self.translate_expr_list(n.slice.value.elts) if len(n.slice.value.elts) == 0: empty_tuple_index = True else: params = [self.visit(n.slice.value)] value = self.visit(n.value) if isinstance(value, UnboundType) and not value.args: return UnboundType(value.name, params, line=self.line, empty_tuple_index=empty_tuple_index) else: self.fail(TYPE_COMMENT_AST_ERROR, self.line, getattr(n, 'col_offset', -1)) return AnyType(TypeOfAny.from_error) def visit_Tuple(self, n: ast3.Tuple) -> Type: return TupleType(self.translate_expr_list(n.elts), _dummy_fallback, implicit=True, line=self.line) # Attribute(expr value, identifier attr, expr_context ctx) def visit_Attribute(self, n: ast3.Attribute) -> Type: before_dot = self.visit(n.value) if isinstance(before_dot, UnboundType) and not before_dot.args: return UnboundType("{}.{}".format(before_dot.name, n.attr), line=self.line) else: self.fail(TYPE_COMMENT_AST_ERROR, self.line, getattr(n, 'col_offset', -1)) return AnyType(TypeOfAny.from_error) # Ellipsis def visit_Ellipsis(self, n: ast3.Ellipsis) -> Type: return EllipsisType(line=self.line) # List(expr* elts, expr_context ctx) def visit_List(self, n: ast3.List) -> Type: return self.translate_argument_list(n.elts) def stringify_name(n: ast3.AST) -> Optional[str]: if isinstance(n, ast3.Name): return n.id elif isinstance(n, ast3.Attribute): sv = stringify_name(n.value) if sv is not None: return "{}.{}".format(sv, n.attr) return None # Can't do it. mypy-0.560/mypy/fastparse2.py0000644€tŠÔÚ€2›s®0000010652613215007206022324 0ustar jukkaDROPBOX\Domain Users00000000000000""" This file is nearly identical to `fastparse.py`, except that it works with a Python 2 AST instead of a Python 3 AST. Previously, how we handled Python 2 code was by first obtaining the Python 2 AST via typed_ast, converting it into a Python 3 AST by using typed_ast.conversion, then running it through mypy.fastparse. While this worked, it did add some overhead, especially in larger Python 2 codebases. This module allows us to skip the conversion step, saving us some time. The reason why this file is not easily merged with mypy.fastparse despite the large amount of redundancy is because the Python 2 AST and the Python 3 AST nodes belong to two completely different class heirarchies, which made it difficult to write a shared visitor between the two in a typesafe way. """ from functools import wraps import sys from typing import Tuple, Union, TypeVar, Callable, Sequence, Optional, Any, cast, List, Set from mypy.sharedparse import ( special_function_elide_names, argument_elide_name, ) from mypy.nodes import ( MypyFile, Node, ImportBase, Import, ImportAll, ImportFrom, FuncDef, OverloadedFuncDef, ClassDef, Decorator, Block, Var, OperatorAssignmentStmt, ExpressionStmt, AssignmentStmt, ReturnStmt, RaiseStmt, AssertStmt, DelStmt, BreakStmt, ContinueStmt, PassStmt, GlobalDecl, WhileStmt, ForStmt, IfStmt, TryStmt, WithStmt, TupleExpr, GeneratorExpr, ListComprehension, ListExpr, ConditionalExpr, DictExpr, SetExpr, NameExpr, IntExpr, StrExpr, BytesExpr, UnicodeExpr, FloatExpr, CallExpr, SuperExpr, MemberExpr, IndexExpr, SliceExpr, OpExpr, UnaryExpr, LambdaExpr, ComparisonExpr, DictionaryComprehension, SetComprehension, ComplexExpr, EllipsisExpr, YieldExpr, Argument, Expression, Statement, BackquoteExpr, PrintStmt, ExecStmt, ARG_POS, ARG_OPT, ARG_STAR, ARG_NAMED, ARG_STAR2, OverloadPart, check_arg_names, ) from mypy.types import ( Type, CallableType, AnyType, UnboundType, EllipsisType, TypeOfAny ) from mypy import experiments from mypy import messages from mypy.errors import Errors from mypy.fastparse import TypeConverter, parse_type_comment from mypy.options import Options try: from typed_ast import ast27 from typed_ast import ast3 except ImportError: if sys.version_info.minor > 2: try: from typed_ast import ast35 # type: ignore except ImportError: print('The typed_ast package is not installed.\n' 'You can install it with `python3 -m pip install typed-ast`.', file=sys.stderr) else: print('You need a more recent version of the typed_ast package.\n' 'You can update to the latest version with ' '`python3 -m pip install -U typed-ast`.', file=sys.stderr) else: print('Mypy requires the typed_ast package, which is only compatible with\n' 'Python 3.3 and greater.', file=sys.stderr) sys.exit(1) T = TypeVar('T', bound=Union[ast27.expr, ast27.stmt]) U = TypeVar('U', bound=Node) V = TypeVar('V') # There is no way to create reasonable fallbacks at this stage, # they must be patched later. _dummy_fallback = None # type: Any TYPE_COMMENT_SYNTAX_ERROR = 'syntax error in type comment' TYPE_COMMENT_AST_ERROR = 'invalid type comment' def parse(source: Union[str, bytes], fnam: str, module: Optional[str], errors: Optional[Errors] = None, options: Options = Options()) -> MypyFile: """Parse a source file, without doing any semantic analysis. Return the parse tree. If errors is not provided, raise ParseError on failure. Otherwise, use the errors object to report parse errors. """ raise_on_error = False if errors is None: errors = Errors() raise_on_error = True errors.set_file(fnam, module) is_stub_file = fnam.endswith('.pyi') try: assert options.python_version[0] < 3 and not is_stub_file ast = ast27.parse(source, fnam, 'exec') tree = ASTConverter(options=options, is_stub=is_stub_file, errors=errors, ).visit(ast) assert isinstance(tree, MypyFile) tree.path = fnam tree.is_stub = is_stub_file except SyntaxError as e: errors.report(e.lineno, e.offset, e.msg, blocker=True) tree = MypyFile([], [], False, set()) if raise_on_error and errors.is_errors(): errors.raise_error() return tree def with_line(f: Callable[['ASTConverter', T], U]) -> Callable[['ASTConverter', T], U]: @wraps(f) def wrapper(self: 'ASTConverter', ast: T) -> U: node = f(self, ast) node.set_line(ast.lineno, ast.col_offset) return node return wrapper def find(f: Callable[[V], bool], seq: Sequence[V]) -> Optional[V]: for item in seq: if f(item): return item return None def is_no_type_check_decorator(expr: ast27.expr) -> bool: if isinstance(expr, ast27.Name): return expr.id == 'no_type_check' elif isinstance(expr, ast27.Attribute): if isinstance(expr.value, ast27.Name): return expr.value.id == 'typing' and expr.attr == 'no_type_check' return False class ASTConverter(ast27.NodeTransformer): def __init__(self, options: Options, is_stub: bool, errors: Errors) -> None: self.class_nesting = 0 self.imports = [] # type: List[ImportBase] self.options = options self.is_stub = is_stub self.errors = errors def fail(self, msg: str, line: int, column: int) -> None: self.errors.report(line, column, msg, blocker=True) def generic_visit(self, node: ast27.AST) -> None: raise RuntimeError('AST node not implemented: ' + str(type(node))) def visit(self, node: Optional[ast27.AST]) -> Any: # same as in typed_ast stub if node is None: return None return super().visit(node) def translate_expr_list(self, l: Sequence[ast27.AST]) -> List[Expression]: res = [] # type: List[Expression] for e in l: exp = self.visit(e) assert isinstance(exp, Expression) res.append(exp) return res def translate_stmt_list(self, l: Sequence[ast27.AST]) -> List[Statement]: res = [] # type: List[Statement] for e in l: stmt = self.visit(e) assert isinstance(stmt, Statement) res.append(stmt) return res op_map = { ast27.Add: '+', ast27.Sub: '-', ast27.Mult: '*', ast27.Div: '/', ast27.Mod: '%', ast27.Pow: '**', ast27.LShift: '<<', ast27.RShift: '>>', ast27.BitOr: '|', ast27.BitXor: '^', ast27.BitAnd: '&', ast27.FloorDiv: '//' } def from_operator(self, op: ast27.operator) -> str: op_name = ASTConverter.op_map.get(type(op)) if op_name is None: raise RuntimeError('Unknown operator ' + str(type(op))) elif op_name == '@': raise RuntimeError('mypy does not support the MatMult operator') else: return op_name comp_op_map = { ast27.Gt: '>', ast27.Lt: '<', ast27.Eq: '==', ast27.GtE: '>=', ast27.LtE: '<=', ast27.NotEq: '!=', ast27.Is: 'is', ast27.IsNot: 'is not', ast27.In: 'in', ast27.NotIn: 'not in' } def from_comp_operator(self, op: ast27.cmpop) -> str: op_name = ASTConverter.comp_op_map.get(type(op)) if op_name is None: raise RuntimeError('Unknown comparison operator ' + str(type(op))) else: return op_name def as_block(self, stmts: List[ast27.stmt], lineno: int) -> Optional[Block]: b = None if stmts: b = Block(self.fix_function_overloads(self.translate_stmt_list(stmts))) b.set_line(lineno) return b def as_required_block(self, stmts: List[ast27.stmt], lineno: int) -> Block: assert stmts # must be non-empty b = Block(self.fix_function_overloads(self.translate_stmt_list(stmts))) b.set_line(lineno) return b def fix_function_overloads(self, stmts: List[Statement]) -> List[Statement]: ret = [] # type: List[Statement] current_overload = [] # type: List[OverloadPart] current_overload_name = None for stmt in stmts: if (current_overload_name is not None and isinstance(stmt, (Decorator, FuncDef)) and stmt.name() == current_overload_name): current_overload.append(stmt) else: if len(current_overload) == 1: ret.append(current_overload[0]) elif len(current_overload) > 1: ret.append(OverloadedFuncDef(current_overload)) if isinstance(stmt, Decorator): current_overload = [stmt] current_overload_name = stmt.name() else: current_overload = [] current_overload_name = None ret.append(stmt) if len(current_overload) == 1: ret.append(current_overload[0]) elif len(current_overload) > 1: ret.append(OverloadedFuncDef(current_overload)) return ret def in_class(self) -> bool: return self.class_nesting > 0 def translate_module_id(self, id: str) -> str: """Return the actual, internal module id for a source text id. For example, translate '__builtin__' in Python 2 to 'builtins'. """ if id == self.options.custom_typing_module: return 'typing' elif id == '__builtin__': # HACK: __builtin__ in Python 2 is aliases to builtins. However, the implementation # is named __builtin__.py (there is another layer of translation elsewhere). return 'builtins' return id def visit_Module(self, mod: ast27.Module) -> MypyFile: body = self.fix_function_overloads(self.translate_stmt_list(mod.body)) return MypyFile(body, self.imports, False, {ti.lineno for ti in mod.type_ignores}, ) # --- stmt --- # FunctionDef(identifier name, arguments args, # stmt* body, expr* decorator_list, expr? returns, string? type_comment) # arguments = (arg* args, arg? vararg, arg* kwonlyargs, expr* kw_defaults, # arg? kwarg, expr* defaults) @with_line def visit_FunctionDef(self, n: ast27.FunctionDef) -> Statement: converter = TypeConverter(self.errors, line=n.lineno) args, decompose_stmts = self.transform_args(n.args, n.lineno) arg_kinds = [arg.kind for arg in args] arg_names = [arg.variable.name() for arg in args] # type: List[Optional[str]] arg_names = [None if argument_elide_name(name) else name for name in arg_names] if special_function_elide_names(n.name): arg_names = [None] * len(arg_names) arg_types = [] # type: List[Optional[Type]] if (n.decorator_list and any(is_no_type_check_decorator(d) for d in n.decorator_list)): arg_types = [None] * len(args) return_type = None elif n.type_comment is not None and len(n.type_comment) > 0: try: func_type_ast = ast3.parse(n.type_comment, '', 'func_type') assert isinstance(func_type_ast, ast3.FunctionType) # for ellipsis arg if (len(func_type_ast.argtypes) == 1 and isinstance(func_type_ast.argtypes[0], ast3.Ellipsis)): arg_types = [a.type_annotation if a.type_annotation is not None else AnyType(TypeOfAny.unannotated) for a in args] else: # PEP 484 disallows both type annotations and type comments if any(a.type_annotation is not None for a in args): self.fail(messages.DUPLICATE_TYPE_SIGNATURES, n.lineno, n.col_offset) arg_types = [a if a is not None else AnyType(TypeOfAny.unannotated) for a in converter.translate_expr_list(func_type_ast.argtypes)] return_type = converter.visit(func_type_ast.returns) # add implicit self type if self.in_class() and len(arg_types) < len(args): arg_types.insert(0, AnyType(TypeOfAny.special_form)) except SyntaxError: self.fail(TYPE_COMMENT_SYNTAX_ERROR, n.lineno, n.col_offset) arg_types = [AnyType(TypeOfAny.from_error)] * len(args) return_type = AnyType(TypeOfAny.from_error) else: arg_types = [a.type_annotation for a in args] return_type = converter.visit(None) for arg, arg_type in zip(args, arg_types): self.set_type_optional(arg_type, arg.initializer) func_type = None if any(arg_types) or return_type: if len(arg_types) != 1 and any(isinstance(t, EllipsisType) for t in arg_types): self.fail("Ellipses cannot accompany other argument types " "in function type signature.", n.lineno, 0) elif len(arg_types) > len(arg_kinds): self.fail('Type signature has too many arguments', n.lineno, 0) elif len(arg_types) < len(arg_kinds): self.fail('Type signature has too few arguments', n.lineno, 0) else: any_type = AnyType(TypeOfAny.unannotated) func_type = CallableType([a if a is not None else any_type for a in arg_types], arg_kinds, arg_names, return_type if return_type is not None else any_type, _dummy_fallback) body = self.as_required_block(n.body, n.lineno) if decompose_stmts: body.body = decompose_stmts + body.body func_def = FuncDef(n.name, args, body, func_type) if func_type is not None: func_type.definition = func_def func_type.line = n.lineno if n.decorator_list: var = Var(func_def.name()) var.is_ready = False var.set_line(n.decorator_list[0].lineno) func_def.is_decorated = True func_def.set_line(n.lineno + len(n.decorator_list)) func_def.body.set_line(func_def.get_line()) return Decorator(func_def, self.translate_expr_list(n.decorator_list), var) else: return func_def def set_type_optional(self, type: Optional[Type], initializer: Optional[Expression]) -> None: if self.options.no_implicit_optional: return # Indicate that type should be wrapped in an Optional if arg is initialized to None. optional = isinstance(initializer, NameExpr) and initializer.name == 'None' if isinstance(type, UnboundType): type.optional = optional def transform_args(self, n: ast27.arguments, line: int, ) -> Tuple[List[Argument], List[Statement]]: type_comments = n.type_comments converter = TypeConverter(self.errors, line=line) decompose_stmts = [] # type: List[Statement] def extract_names(arg: ast27.expr) -> List[str]: if isinstance(arg, ast27.Name): return [arg.id] elif isinstance(arg, ast27.Tuple): return [name for elt in arg.elts for name in extract_names(elt)] else: return [] def convert_arg(index: int, arg: ast27.expr) -> Var: if isinstance(arg, ast27.Name): v = arg.id elif isinstance(arg, ast27.Tuple): v = '__tuple_arg_{}'.format(index + 1) rvalue = NameExpr(v) rvalue.set_line(line) assignment = AssignmentStmt([self.visit(arg)], rvalue) assignment.set_line(line) decompose_stmts.append(assignment) else: raise RuntimeError("'{}' is not a valid argument.".format(ast27.dump(arg))) return Var(v) def get_type(i: int) -> Optional[Type]: if i < len(type_comments) and type_comments[i] is not None: return converter.visit_raw_str(type_comments[i]) return None args = [(convert_arg(i, arg), get_type(i)) for i, arg in enumerate(n.args)] defaults = self.translate_expr_list(n.defaults) names = [name for arg in n.args for name in extract_names(arg)] # type: List[str] new_args = [] # type: List[Argument] num_no_defaults = len(args) - len(defaults) # positional arguments without defaults for a, annotation in args[:num_no_defaults]: new_args.append(Argument(a, annotation, None, ARG_POS)) # positional arguments with defaults for (a, annotation), d in zip(args[num_no_defaults:], defaults): new_args.append(Argument(a, annotation, d, ARG_OPT)) # *arg if n.vararg is not None: new_args.append(Argument(Var(n.vararg), get_type(len(args)), None, ARG_STAR)) names.append(n.vararg) # **kwarg if n.kwarg is not None: typ = get_type(len(args) + (0 if n.vararg is None else 1)) new_args.append(Argument(Var(n.kwarg), typ, None, ARG_STAR2)) names.append(n.kwarg) # We don't have any context object to give, but we have closed around the line num def fail_arg(msg: str, arg: None) -> None: self.fail(msg, line, 0) check_arg_names(names, [None] * len(names), fail_arg) return new_args, decompose_stmts def stringify_name(self, n: ast27.AST) -> str: if isinstance(n, ast27.Name): return n.id elif isinstance(n, ast27.Attribute): return "{}.{}".format(self.stringify_name(n.value), n.attr) else: assert False, "can't stringify " + str(type(n)) # ClassDef(identifier name, # expr* bases, # keyword* keywords, # stmt* body, # expr* decorator_list) @with_line def visit_ClassDef(self, n: ast27.ClassDef) -> ClassDef: self.class_nesting += 1 cdef = ClassDef(n.name, self.as_required_block(n.body, n.lineno), None, self.translate_expr_list(n.bases), metaclass=None) cdef.decorators = self.translate_expr_list(n.decorator_list) self.class_nesting -= 1 return cdef # Return(expr? value) @with_line def visit_Return(self, n: ast27.Return) -> ReturnStmt: return ReturnStmt(self.visit(n.value)) # Delete(expr* targets) @with_line def visit_Delete(self, n: ast27.Delete) -> DelStmt: if len(n.targets) > 1: tup = TupleExpr(self.translate_expr_list(n.targets)) tup.set_line(n.lineno) return DelStmt(tup) else: return DelStmt(self.visit(n.targets[0])) # Assign(expr* targets, expr value, string? type_comment) @with_line def visit_Assign(self, n: ast27.Assign) -> AssignmentStmt: typ = None if n.type_comment: typ = parse_type_comment(n.type_comment, n.lineno, self.errors) return AssignmentStmt(self.translate_expr_list(n.targets), self.visit(n.value), type=typ) # AugAssign(expr target, operator op, expr value) @with_line def visit_AugAssign(self, n: ast27.AugAssign) -> OperatorAssignmentStmt: return OperatorAssignmentStmt(self.from_operator(n.op), self.visit(n.target), self.visit(n.value)) # For(expr target, expr iter, stmt* body, stmt* orelse, string? type_comment) @with_line def visit_For(self, n: ast27.For) -> ForStmt: if n.type_comment is not None: target_type = parse_type_comment(n.type_comment, n.lineno, self.errors) else: target_type = None return ForStmt(self.visit(n.target), self.visit(n.iter), self.as_required_block(n.body, n.lineno), self.as_block(n.orelse, n.lineno), target_type) # While(expr test, stmt* body, stmt* orelse) @with_line def visit_While(self, n: ast27.While) -> WhileStmt: return WhileStmt(self.visit(n.test), self.as_required_block(n.body, n.lineno), self.as_block(n.orelse, n.lineno)) # If(expr test, stmt* body, stmt* orelse) @with_line def visit_If(self, n: ast27.If) -> IfStmt: return IfStmt([self.visit(n.test)], [self.as_required_block(n.body, n.lineno)], self.as_block(n.orelse, n.lineno)) # With(withitem* items, stmt* body, string? type_comment) @with_line def visit_With(self, n: ast27.With) -> WithStmt: if n.type_comment is not None: target_type = parse_type_comment(n.type_comment, n.lineno, self.errors) else: target_type = None return WithStmt([self.visit(n.context_expr)], [self.visit(n.optional_vars)], self.as_required_block(n.body, n.lineno), target_type) @with_line def visit_Raise(self, n: ast27.Raise) -> RaiseStmt: if n.type is None: e = None else: if n.inst is None: e = self.visit(n.type) else: if n.tback is None: e = TupleExpr([self.visit(n.type), self.visit(n.inst)]) else: e = TupleExpr([self.visit(n.type), self.visit(n.inst), self.visit(n.tback)]) return RaiseStmt(e, None) # TryExcept(stmt* body, excepthandler* handlers, stmt* orelse) @with_line def visit_TryExcept(self, n: ast27.TryExcept) -> TryStmt: return self.try_handler(n.body, n.handlers, n.orelse, [], n.lineno) @with_line def visit_TryFinally(self, n: ast27.TryFinally) -> TryStmt: if len(n.body) == 1 and isinstance(n.body[0], ast27.TryExcept): return self.try_handler([n.body[0]], [], [], n.finalbody, n.lineno) else: return self.try_handler(n.body, [], [], n.finalbody, n.lineno) def try_handler(self, body: List[ast27.stmt], handlers: List[ast27.ExceptHandler], orelse: List[ast27.stmt], finalbody: List[ast27.stmt], lineno: int) -> TryStmt: vs = [] # type: List[Optional[NameExpr]] for item in handlers: if item.name is None: vs.append(None) elif isinstance(item.name, ast27.Name): vs.append(NameExpr(item.name.id)) else: self.fail("Sorry, `except , ` is not supported", item.lineno, item.col_offset) vs.append(None) types = [self.visit(h.type) for h in handlers] handlers_ = [self.as_required_block(h.body, h.lineno) for h in handlers] return TryStmt(self.as_required_block(body, lineno), vs, types, handlers_, self.as_block(orelse, lineno), self.as_block(finalbody, lineno)) @with_line def visit_Print(self, n: ast27.Print) -> PrintStmt: return PrintStmt(self.translate_expr_list(n.values), n.nl, self.visit(n.dest)) @with_line def visit_Exec(self, n: ast27.Exec) -> ExecStmt: return ExecStmt(self.visit(n.body), self.visit(n.globals), self.visit(n.locals)) @with_line def visit_Repr(self, n: ast27.Repr) -> BackquoteExpr: return BackquoteExpr(self.visit(n.value)) # Assert(expr test, expr? msg) @with_line def visit_Assert(self, n: ast27.Assert) -> AssertStmt: return AssertStmt(self.visit(n.test), self.visit(n.msg)) # Import(alias* names) @with_line def visit_Import(self, n: ast27.Import) -> Import: names = [] # type: List[Tuple[str, Optional[str]]] for alias in n.names: name = self.translate_module_id(alias.name) asname = alias.asname if asname is None and name != alias.name: # if the module name has been translated (and it's not already # an explicit import-as), make it an implicit import-as the # original name asname = alias.name names.append((name, asname)) i = Import(names) self.imports.append(i) return i # ImportFrom(identifier? module, alias* names, int? level) @with_line def visit_ImportFrom(self, n: ast27.ImportFrom) -> ImportBase: assert n.level is not None if len(n.names) == 1 and n.names[0].name == '*': mod = n.module if n.module is not None else '' i = ImportAll(mod, n.level) # type: ImportBase else: i = ImportFrom(self.translate_module_id(n.module) if n.module is not None else '', n.level, [(a.name, a.asname) for a in n.names]) self.imports.append(i) return i # Global(identifier* names) @with_line def visit_Global(self, n: ast27.Global) -> GlobalDecl: return GlobalDecl(n.names) # Expr(expr value) @with_line def visit_Expr(self, n: ast27.Expr) -> ExpressionStmt: value = self.visit(n.value) return ExpressionStmt(value) # Pass @with_line def visit_Pass(self, n: ast27.Pass) -> PassStmt: return PassStmt() # Break @with_line def visit_Break(self, n: ast27.Break) -> BreakStmt: return BreakStmt() # Continue @with_line def visit_Continue(self, n: ast27.Continue) -> ContinueStmt: return ContinueStmt() # --- expr --- # BoolOp(boolop op, expr* values) @with_line def visit_BoolOp(self, n: ast27.BoolOp) -> OpExpr: # mypy translates (1 and 2 and 3) as (1 and (2 and 3)) assert len(n.values) >= 2 if isinstance(n.op, ast27.And): op = 'and' elif isinstance(n.op, ast27.Or): op = 'or' else: raise RuntimeError('unknown BoolOp ' + str(type(n))) # potentially inefficient! def group(vals: List[Expression]) -> OpExpr: if len(vals) == 2: return OpExpr(op, vals[0], vals[1]) else: return OpExpr(op, vals[0], group(vals[1:])) return group(self.translate_expr_list(n.values)) # BinOp(expr left, operator op, expr right) @with_line def visit_BinOp(self, n: ast27.BinOp) -> OpExpr: op = self.from_operator(n.op) if op is None: raise RuntimeError('cannot translate BinOp ' + str(type(n.op))) return OpExpr(op, self.visit(n.left), self.visit(n.right)) # UnaryOp(unaryop op, expr operand) @with_line def visit_UnaryOp(self, n: ast27.UnaryOp) -> UnaryExpr: op = None if isinstance(n.op, ast27.Invert): op = '~' elif isinstance(n.op, ast27.Not): op = 'not' elif isinstance(n.op, ast27.UAdd): op = '+' elif isinstance(n.op, ast27.USub): op = '-' if op is None: raise RuntimeError('cannot translate UnaryOp ' + str(type(n.op))) return UnaryExpr(op, self.visit(n.operand)) # Lambda(arguments args, expr body) @with_line def visit_Lambda(self, n: ast27.Lambda) -> LambdaExpr: args, decompose_stmts = self.transform_args(n.args, n.lineno) n_body = ast27.Return(n.body) n_body.lineno = n.lineno n_body.col_offset = n.col_offset body = self.as_required_block([n_body], n.lineno) if decompose_stmts: body.body = decompose_stmts + body.body return LambdaExpr(args, body) # IfExp(expr test, expr body, expr orelse) @with_line def visit_IfExp(self, n: ast27.IfExp) -> ConditionalExpr: return ConditionalExpr(self.visit(n.test), self.visit(n.body), self.visit(n.orelse)) # Dict(expr* keys, expr* values) @with_line def visit_Dict(self, n: ast27.Dict) -> DictExpr: return DictExpr(list(zip(self.translate_expr_list(n.keys), self.translate_expr_list(n.values)))) # Set(expr* elts) @with_line def visit_Set(self, n: ast27.Set) -> SetExpr: return SetExpr(self.translate_expr_list(n.elts)) # ListComp(expr elt, comprehension* generators) @with_line def visit_ListComp(self, n: ast27.ListComp) -> ListComprehension: return ListComprehension(self.visit_GeneratorExp(cast(ast27.GeneratorExp, n))) # SetComp(expr elt, comprehension* generators) @with_line def visit_SetComp(self, n: ast27.SetComp) -> SetComprehension: return SetComprehension(self.visit_GeneratorExp(cast(ast27.GeneratorExp, n))) # DictComp(expr key, expr value, comprehension* generators) @with_line def visit_DictComp(self, n: ast27.DictComp) -> DictionaryComprehension: targets = [self.visit(c.target) for c in n.generators] iters = [self.visit(c.iter) for c in n.generators] ifs_list = [self.translate_expr_list(c.ifs) for c in n.generators] return DictionaryComprehension(self.visit(n.key), self.visit(n.value), targets, iters, ifs_list, [False for _ in n.generators]) # GeneratorExp(expr elt, comprehension* generators) @with_line def visit_GeneratorExp(self, n: ast27.GeneratorExp) -> GeneratorExpr: targets = [self.visit(c.target) for c in n.generators] iters = [self.visit(c.iter) for c in n.generators] ifs_list = [self.translate_expr_list(c.ifs) for c in n.generators] return GeneratorExpr(self.visit(n.elt), targets, iters, ifs_list, [False for _ in n.generators]) # Yield(expr? value) @with_line def visit_Yield(self, n: ast27.Yield) -> YieldExpr: return YieldExpr(self.visit(n.value)) # Compare(expr left, cmpop* ops, expr* comparators) @with_line def visit_Compare(self, n: ast27.Compare) -> ComparisonExpr: operators = [self.from_comp_operator(o) for o in n.ops] operands = self.translate_expr_list([n.left] + n.comparators) return ComparisonExpr(operators, operands) # Call(expr func, expr* args, keyword* keywords) # keyword = (identifier? arg, expr value) @with_line def visit_Call(self, n: ast27.Call) -> CallExpr: arg_types = [] # type: List[ast27.expr] arg_kinds = [] # type: List[int] signature = [] # type: List[Optional[str]] arg_types.extend(n.args) arg_kinds.extend(ARG_POS for a in n.args) signature.extend(None for a in n.args) if n.starargs is not None: arg_types.append(n.starargs) arg_kinds.append(ARG_STAR) signature.append(None) arg_types.extend(k.value for k in n.keywords) arg_kinds.extend(ARG_NAMED for k in n.keywords) signature.extend(k.arg for k in n.keywords) if n.kwargs is not None: arg_types.append(n.kwargs) arg_kinds.append(ARG_STAR2) signature.append(None) return CallExpr(self.visit(n.func), self.translate_expr_list(arg_types), arg_kinds, signature) # Num(object n) -- a number as a PyObject. @with_line def visit_Num(self, new: ast27.Num) -> Expression: value = new.n is_inverse = False if str(new.n).startswith('-'): # Hackish because of complex. value = -new.n is_inverse = True if isinstance(value, int): expr = IntExpr(value) # type: Expression elif isinstance(value, float): expr = FloatExpr(value) elif isinstance(value, complex): expr = ComplexExpr(value) else: raise RuntimeError('num not implemented for ' + str(type(new.n))) if is_inverse: expr = UnaryExpr('-', expr) return expr # Str(string s) @with_line def visit_Str(self, s: ast27.Str) -> Expression: # Hack: assume all string literals in Python 2 stubs are normal # strs (i.e. not unicode). All stubs are parsed with the Python 3 # parser, which causes unprefixed string literals to be interpreted # as unicode instead of bytes. This hack is generally okay, # because mypy considers str literals to be compatible with # unicode. if isinstance(s.s, bytes): n = s.s # The following line is a bit hacky, but is the best way to maintain # compatibility with how mypy currently parses the contents of bytes literals. contents = str(n)[2:-1] return StrExpr(contents) else: return UnicodeExpr(s.s) # Ellipsis def visit_Ellipsis(self, n: ast27.Ellipsis) -> EllipsisExpr: return EllipsisExpr() # Attribute(expr value, identifier attr, expr_context ctx) @with_line def visit_Attribute(self, n: ast27.Attribute) -> Expression: if (isinstance(n.value, ast27.Call) and isinstance(n.value.func, ast27.Name) and n.value.func.id == 'super'): return SuperExpr(n.attr, self.visit(n.value)) return MemberExpr(self.visit(n.value), n.attr) # Subscript(expr value, slice slice, expr_context ctx) @with_line def visit_Subscript(self, n: ast27.Subscript) -> IndexExpr: return IndexExpr(self.visit(n.value), self.visit(n.slice)) # Name(identifier id, expr_context ctx) @with_line def visit_Name(self, n: ast27.Name) -> NameExpr: return NameExpr(n.id) # List(expr* elts, expr_context ctx) @with_line def visit_List(self, n: ast27.List) -> ListExpr: return ListExpr([self.visit(e) for e in n.elts]) # Tuple(expr* elts, expr_context ctx) @with_line def visit_Tuple(self, n: ast27.Tuple) -> TupleExpr: return TupleExpr([self.visit(e) for e in n.elts]) # --- slice --- # Slice(expr? lower, expr? upper, expr? step) def visit_Slice(self, n: ast27.Slice) -> SliceExpr: return SliceExpr(self.visit(n.lower), self.visit(n.upper), self.visit(n.step)) # ExtSlice(slice* dims) def visit_ExtSlice(self, n: ast27.ExtSlice) -> TupleExpr: return TupleExpr(self.translate_expr_list(n.dims)) # Index(expr value) def visit_Index(self, n: ast27.Index) -> Expression: return self.visit(n.value) mypy-0.560/mypy/fixup.py0000644€tŠÔÚ€2›s®0000002552013215007205021376 0ustar jukkaDROPBOX\Domain Users00000000000000"""Fix up various things after deserialization.""" from typing import Any, Dict, Optional from mypy.nodes import ( MypyFile, SymbolNode, SymbolTable, SymbolTableNode, TypeInfo, FuncDef, OverloadedFuncDef, Decorator, Var, TypeVarExpr, ClassDef, Block, LDEF, MDEF, GDEF, TYPE_ALIAS ) from mypy.types import ( CallableType, EllipsisType, Instance, Overloaded, TupleType, TypedDictType, TypeList, TypeVarType, UnboundType, UnionType, TypeVisitor, TypeType, NOT_READY ) from mypy.visitor import NodeVisitor def fixup_module_pass_one(tree: MypyFile, modules: Dict[str, MypyFile], quick_and_dirty: bool) -> None: node_fixer = NodeFixer(modules, quick_and_dirty) node_fixer.visit_symbol_table(tree.names) def fixup_module_pass_two(tree: MypyFile, modules: Dict[str, MypyFile], quick_and_dirty: bool) -> None: compute_all_mros(tree.names, modules) def compute_all_mros(symtab: SymbolTable, modules: Dict[str, MypyFile]) -> None: for key, value in symtab.items(): if value.kind in (LDEF, MDEF, GDEF) and isinstance(value.node, TypeInfo): info = value.node info.calculate_mro() assert info.mro, "No MRO calculated for %s" % (info.fullname(),) compute_all_mros(info.names, modules) # TODO: Fix up .info when deserializing, i.e. much earlier. class NodeFixer(NodeVisitor[None]): current_info = None # type: Optional[TypeInfo] def __init__(self, modules: Dict[str, MypyFile], quick_and_dirty: bool) -> None: self.modules = modules self.quick_and_dirty = quick_and_dirty self.type_fixer = TypeFixer(self.modules, quick_and_dirty) # NOTE: This method isn't (yet) part of the NodeVisitor API. def visit_type_info(self, info: TypeInfo) -> None: save_info = self.current_info try: self.current_info = info if info.defn: info.defn.accept(self) if info.names: self.visit_symbol_table(info.names) if info.bases: for base in info.bases: base.accept(self.type_fixer) if info._promote: info._promote.accept(self.type_fixer) if info.tuple_type: info.tuple_type.accept(self.type_fixer) if info.typeddict_type: info.typeddict_type.accept(self.type_fixer) if info.declared_metaclass: info.declared_metaclass.accept(self.type_fixer) if info.metaclass_type: info.metaclass_type.accept(self.type_fixer) finally: self.current_info = save_info # NOTE: This method *definitely* isn't part of the NodeVisitor API. def visit_symbol_table(self, symtab: SymbolTable) -> None: # Copy the items because we may mutate symtab. for key, value in list(symtab.items()): cross_ref = value.cross_ref if cross_ref is not None: # Fix up cross-reference. del value.cross_ref if cross_ref in self.modules: value.node = self.modules[cross_ref] else: stnode = lookup_qualified_stnode(self.modules, cross_ref, self.quick_and_dirty) if stnode is not None: value.node = stnode.node value.type_override = stnode.type_override if (self.quick_and_dirty and value.kind == TYPE_ALIAS and stnode.type_override is None): value.type_override = Instance(stale_info(), []) value.alias_tvars = stnode.alias_tvars or [] elif not self.quick_and_dirty: assert stnode is not None, "Could not find cross-ref %s" % (cross_ref,) else: # We have a missing crossref in quick mode, need to put something value.node = stale_info() if value.kind == TYPE_ALIAS: value.type_override = Instance(stale_info(), []) else: if isinstance(value.node, TypeInfo): # TypeInfo has no accept(). TODO: Add it? self.visit_type_info(value.node) elif value.node is not None: value.node.accept(self) if value.type_override is not None: value.type_override.accept(self.type_fixer) def visit_func_def(self, func: FuncDef) -> None: if self.current_info is not None: func.info = self.current_info if func.type is not None: func.type.accept(self.type_fixer) def visit_overloaded_func_def(self, o: OverloadedFuncDef) -> None: if self.current_info is not None: o.info = self.current_info if o.type: o.type.accept(self.type_fixer) for item in o.items: item.accept(self) if o.impl: o.impl.accept(self) def visit_decorator(self, d: Decorator) -> None: if self.current_info is not None: d.var.info = self.current_info if d.func: d.func.accept(self) if d.var: d.var.accept(self) for node in d.decorators: node.accept(self) def visit_class_def(self, c: ClassDef) -> None: for v in c.type_vars: for value in v.values: value.accept(self.type_fixer) v.upper_bound.accept(self.type_fixer) def visit_type_var_expr(self, tv: TypeVarExpr) -> None: for value in tv.values: value.accept(self.type_fixer) tv.upper_bound.accept(self.type_fixer) def visit_var(self, v: Var) -> None: if self.current_info is not None: v.info = self.current_info if v.type is not None: v.type.accept(self.type_fixer) class TypeFixer(TypeVisitor[None]): def __init__(self, modules: Dict[str, MypyFile], quick_and_dirty: bool) -> None: self.modules = modules self.quick_and_dirty = quick_and_dirty def visit_instance(self, inst: Instance) -> None: # TODO: Combine Instances that are exactly the same? type_ref = inst.type_ref if type_ref is None: return # We've already been here. del inst.type_ref node = lookup_qualified(self.modules, type_ref, self.quick_and_dirty) if isinstance(node, TypeInfo): inst.type = node # TODO: Is this needed or redundant? # Also fix up the bases, just in case. for base in inst.type.bases: if base.type is NOT_READY: base.accept(self) else: # Looks like a missing TypeInfo in quick mode, put something there assert self.quick_and_dirty, "Should never get here in normal mode" inst.type = stale_info() for a in inst.args: a.accept(self) def visit_any(self, o: Any) -> None: pass # Nothing to descend into. def visit_callable_type(self, ct: CallableType) -> None: if ct.fallback: ct.fallback.accept(self) for argt in ct.arg_types: # argt may be None, e.g. for __self in NamedTuple constructors. if argt is not None: argt.accept(self) if ct.ret_type is not None: ct.ret_type.accept(self) for v in ct.variables: if v.values: for val in v.values: val.accept(self) v.upper_bound.accept(self) for arg in ct.bound_args: if arg: arg.accept(self) def visit_overloaded(self, t: Overloaded) -> None: for ct in t.items(): ct.accept(self) def visit_deleted_type(self, o: Any) -> None: pass # Nothing to descend into. def visit_none_type(self, o: Any) -> None: pass # Nothing to descend into. def visit_uninhabited_type(self, o: Any) -> None: pass # Nothing to descend into. def visit_partial_type(self, o: Any) -> None: raise RuntimeError("Shouldn't get here", o) def visit_tuple_type(self, tt: TupleType) -> None: if tt.items: for it in tt.items: it.accept(self) if tt.fallback is not None: tt.fallback.accept(self) def visit_typeddict_type(self, tdt: TypedDictType) -> None: if tdt.items: for it in tdt.items.values(): it.accept(self) if tdt.fallback is not None: tdt.fallback.accept(self) def visit_type_var(self, tvt: TypeVarType) -> None: if tvt.values: for vt in tvt.values: vt.accept(self) if tvt.upper_bound is not None: tvt.upper_bound.accept(self) def visit_unbound_type(self, o: UnboundType) -> None: for a in o.args: a.accept(self) def visit_union_type(self, ut: UnionType) -> None: if ut.items: for it in ut.items: it.accept(self) def visit_void(self, o: Any) -> None: pass # Nothing to descend into. def visit_type_type(self, t: TypeType) -> None: t.item.accept(self) def lookup_qualified(modules: Dict[str, MypyFile], name: str, quick_and_dirty: bool) -> Optional[SymbolNode]: stnode = lookup_qualified_stnode(modules, name, quick_and_dirty) if stnode is None: return None else: return stnode.node def lookup_qualified_stnode(modules: Dict[str, MypyFile], name: str, quick_and_dirty: bool) -> Optional[SymbolTableNode]: head = name rest = [] while True: if '.' not in head: if not quick_and_dirty: assert '.' in head, "Cannot find %s" % (name,) return None head, tail = head.rsplit('.', 1) rest.append(tail) mod = modules.get(head) if mod is not None: break names = mod.names while True: if not rest: if not quick_and_dirty: assert rest, "Cannot find %s" % (name,) return None key = rest.pop() if key not in names: if not quick_and_dirty: assert key in names, "Cannot find %s for %s" % (key, name) return None stnode = names[key] if not rest: return stnode node = stnode.node assert isinstance(node, TypeInfo) names = node.names def stale_info() -> TypeInfo: suggestion = "" dummy_def = ClassDef(suggestion, Block([])) dummy_def.fullname = suggestion info = TypeInfo(SymbolTable(), dummy_def, "") info.mro = [info] info.bases = [] return info mypy-0.560/mypy/git.py0000644€tŠÔÚ€2›s®0000001162013215007205021022 0ustar jukkaDROPBOX\Domain Users00000000000000"""Utilities for verifying git integrity.""" # Used also from setup.py, so don't pull in anything additional here (like mypy or typing): import os import pipes import subprocess import sys MYPY = False if MYPY: from typing import Iterator def is_git_repo(dir: str) -> bool: """Is the given directory version-controlled with git?""" return os.path.exists(os.path.join(dir, ".git")) def have_git() -> bool: """Can we run the git executable?""" try: subprocess.check_output(["git", "--help"]) return True except subprocess.CalledProcessError: return False except OSError: return False def get_submodules(dir: str) -> "Iterator[str]": """Return a list of all git top-level submodules in a given directory.""" # It would be nicer to do # "git submodule foreach 'echo MODULE $name $path $sha1 $toplevel'" # but that wouldn't work on Windows. output = subprocess.check_output(["git", "submodule", "status"], cwd=dir) # " name desc" # status='-': not initialized # status='+': changed # status='u': merge conflicts # status=' ': up-to-date for line in output.splitlines(): # Skip the status indicator, as it could be a space can confuse the split. line = line[1:] name = line.split(b" ")[1] yield name.decode(sys.getfilesystemencoding()) def git_revision(dir: str) -> bytes: """Get the SHA-1 of the HEAD of a git repository.""" return subprocess.check_output(["git", "rev-parse", "HEAD"], cwd=dir).strip() def submodule_revision(dir: str, submodule: str) -> bytes: """Get the SHA-1 a submodule is supposed to have.""" output = subprocess.check_output(["git", "ls-files", "-s", submodule], cwd=dir).strip() # E.g.: "160000 e4a7edb949e0b920b16f61aeeb19fc3d328f3012 0 typeshed" return output.split()[1] def is_dirty(dir: str) -> bool: """Check whether a git repository has uncommitted changes.""" output = subprocess.check_output(["git", "status", "-uno", "--porcelain"], cwd=dir) return output.strip() != b"" def has_extra_files(dir: str) -> bool: """Check whether a git repository has untracked files.""" output = subprocess.check_output(["git", "clean", "--dry-run", "-d"], cwd=dir) return output.strip() != b"" def warn_no_git_executable() -> None: print("Warning: Couldn't check git integrity. " "git executable not in path.", file=sys.stderr) def warn_dirty(dir: str) -> None: print("Warning: git module '{}' has uncommitted changes.".format(dir), file=sys.stderr) print("Go to the directory", file=sys.stderr) print(" {}".format(dir), file=sys.stderr) print("and commit or reset your changes", file=sys.stderr) def warn_extra_files(dir: str) -> None: print("Warning: git module '{}' has untracked files.".format(dir), file=sys.stderr) print("Go to the directory", file=sys.stderr) print(" {}".format(dir), file=sys.stderr) print("and add & commit your new files.", file=sys.stderr) def chdir_prefix(dir: str) -> str: """Return the command to change to the target directory, plus '&&'.""" if os.path.relpath(dir) != ".": return "cd " + pipes.quote(dir) + " && " else: return "" def error_submodule_not_initialized(name: str, dir: str) -> None: print("Submodule '{}' not initialized.".format(name), file=sys.stderr) print("Please run:", file=sys.stderr) print(" {}git submodule update --init {}".format( chdir_prefix(dir), name), file=sys.stderr) def error_submodule_not_updated(name: str, dir: str) -> None: print("Submodule '{}' not updated.".format(name), file=sys.stderr) print("Please run:", file=sys.stderr) print(" {}git submodule update {}".format( chdir_prefix(dir), name), file=sys.stderr) print("(If you got this message because you updated {} yourself".format(name), file=sys.stderr) print(" then run \"git add {}\" to silence this check)".format(name), file=sys.stderr) def verify_git_integrity_or_abort(datadir: str) -> None: """Verify the (submodule) integrity of a git repository. Potentially output warnings/errors (to stderr), and exit with status 1 if we detected a severe problem. """ datadir = datadir or '.' if not is_git_repo(datadir): return if not have_git(): warn_no_git_executable() return for submodule in get_submodules(datadir): submodule_path = os.path.join(datadir, submodule) if not is_git_repo(submodule_path): error_submodule_not_initialized(submodule, datadir) sys.exit(1) elif submodule_revision(datadir, submodule) != git_revision(submodule_path): error_submodule_not_updated(submodule, datadir) sys.exit(1) elif is_dirty(submodule_path): warn_dirty(submodule) elif has_extra_files(submodule_path): warn_extra_files(submodule) mypy-0.560/mypy/indirection.py0000644€tŠÔÚ€2›s®0000000726213215007205022555 0ustar jukkaDROPBOX\Domain Users00000000000000from typing import Dict, Iterable, List, Optional, Set from abc import abstractmethod from mypy.visitor import NodeVisitor from mypy.types import SyntheticTypeVisitor from mypy.nodes import MODULE_REF import mypy.nodes as nodes import mypy.types as types from mypy.util import split_module_names def extract_module_names(type_name: Optional[str]) -> List[str]: """Returns the module names of a fully qualified type name.""" if type_name is not None: # Discard the first one, which is just the qualified name of the type possible_module_names = split_module_names(type_name) return possible_module_names[1:] else: return [] class TypeIndirectionVisitor(SyntheticTypeVisitor[Set[str]]): """Returns all module references within a particular type.""" def __init__(self) -> None: self.cache = {} # type: Dict[types.Type, Set[str]] def find_modules(self, typs: Iterable[types.Type]) -> Set[str]: return self._visit(*typs) def _visit(self, *typs: types.Type) -> Set[str]: output = set() # type: Set[str] for typ in typs: if typ in self.cache: modules = self.cache[typ] else: modules = typ.accept(self) self.cache[typ] = set(modules) output.update(modules) return output def visit_unbound_type(self, t: types.UnboundType) -> Set[str]: return self._visit(*t.args) def visit_type_list(self, t: types.TypeList) -> Set[str]: return self._visit(*t.items) def visit_callable_argument(self, t: types.CallableArgument) -> Set[str]: return self._visit(t.typ) def visit_any(self, t: types.AnyType) -> Set[str]: return set() def visit_none_type(self, t: types.NoneTyp) -> Set[str]: return set() def visit_uninhabited_type(self, t: types.UninhabitedType) -> Set[str]: return set() def visit_erased_type(self, t: types.ErasedType) -> Set[str]: return set() def visit_deleted_type(self, t: types.DeletedType) -> Set[str]: return set() def visit_type_var(self, t: types.TypeVarType) -> Set[str]: return self._visit(*t.values) | self._visit(t.upper_bound) def visit_instance(self, t: types.Instance) -> Set[str]: out = self._visit(*t.args) if t.type is not None: out.update(split_module_names(t.type.module_name)) return out def visit_callable_type(self, t: types.CallableType) -> Set[str]: out = self._visit(*t.arg_types) | self._visit(t.ret_type) if t.definition is not None: out.update(extract_module_names(t.definition.fullname())) return out def visit_overloaded(self, t: types.Overloaded) -> Set[str]: return self._visit(*t.items()) | self._visit(t.fallback) def visit_tuple_type(self, t: types.TupleType) -> Set[str]: return self._visit(*t.items) | self._visit(t.fallback) def visit_typeddict_type(self, t: types.TypedDictType) -> Set[str]: return self._visit(*t.items.values()) | self._visit(t.fallback) def visit_star_type(self, t: types.StarType) -> Set[str]: return set() def visit_union_type(self, t: types.UnionType) -> Set[str]: return self._visit(*t.items) def visit_partial_type(self, t: types.PartialType) -> Set[str]: return set() def visit_ellipsis_type(self, t: types.EllipsisType) -> Set[str]: return set() def visit_type_type(self, t: types.TypeType) -> Set[str]: return self._visit(t.item) def visit_forwardref_type(self, t: types.ForwardRef) -> Set[str]: if t.resolved: return self._visit(t.resolved) else: return set() mypy-0.560/mypy/infer.py0000644€tŠÔÚ€2›s®0000000345213215007205021346 0ustar jukkaDROPBOX\Domain Users00000000000000"""Utilities for type argument inference.""" from typing import List, Optional, Sequence from mypy.constraints import infer_constraints, infer_constraints_for_callable from mypy.types import Type, TypeVarId, CallableType from mypy.solve import solve_constraints from mypy.constraints import SUBTYPE_OF def infer_function_type_arguments(callee_type: CallableType, arg_types: Sequence[Optional[Type]], arg_kinds: List[int], formal_to_actual: List[List[int]], strict: bool = True) -> List[Optional[Type]]: """Infer the type arguments of a generic function. Return an array of lower bound types for the type variables -1 (at index 0), -2 (at index 1), etc. A lower bound is None if a value could not be inferred. Arguments: callee_type: the target generic function arg_types: argument types at the call site (each optional; if None, we are not considering this argument in the current pass) arg_kinds: nodes.ARG_* values for arg_types formal_to_actual: mapping from formal to actual variable indices """ # Infer constraints. constraints = infer_constraints_for_callable( callee_type, arg_types, arg_kinds, formal_to_actual) # Solve constraints. type_vars = callee_type.type_var_ids() return solve_constraints(type_vars, constraints, strict) def infer_type_arguments(type_var_ids: List[TypeVarId], template: Type, actual: Type) -> List[Optional[Type]]: # Like infer_function_type_arguments, but only match a single type # against a generic type. constraints = infer_constraints(template, actual, SUBTYPE_OF) return solve_constraints(type_var_ids, constraints) mypy-0.560/mypy/join.py0000644€tŠÔÚ€2›s®0000003722113215007205021203 0ustar jukkaDROPBOX\Domain Users00000000000000"""Calculation of the least upper bound types (joins).""" from collections import OrderedDict from typing import cast, List, Optional from mypy.types import ( Type, AnyType, NoneTyp, TypeVisitor, Instance, UnboundType, TypeVarType, CallableType, TupleType, TypedDictType, ErasedType, TypeList, UnionType, FunctionLike, Overloaded, PartialType, DeletedType, UninhabitedType, TypeType, true_or_false, TypeOfAny ) from mypy.maptype import map_instance_to_supertype from mypy.subtypes import ( is_subtype, is_equivalent, is_subtype_ignoring_tvars, is_proper_subtype, is_protocol_implementation ) from mypy import experiments def join_simple(declaration: Optional[Type], s: Type, t: Type) -> Type: """Return a simple least upper bound given the declared type.""" if (s.can_be_true, s.can_be_false) != (t.can_be_true, t.can_be_false): # if types are restricted in different ways, use the more general versions s = true_or_false(s) t = true_or_false(t) if isinstance(s, AnyType): return s if isinstance(s, ErasedType): return t if is_proper_subtype(s, t): return t if is_proper_subtype(t, s): return s if isinstance(declaration, UnionType): return UnionType.make_simplified_union([s, t]) if isinstance(s, NoneTyp) and not isinstance(t, NoneTyp): s, t = t, s if isinstance(s, UninhabitedType) and not isinstance(t, UninhabitedType): s, t = t, s value = t.accept(TypeJoinVisitor(s)) if value is None: # XXX this code path probably should be avoided. # It seems to happen when a line (x = y) is a type error, and # it's not clear that assuming that x is arbitrary afterward # is a good idea. return declaration if declaration is None or is_subtype(value, declaration): return value return declaration def join_types(s: Type, t: Type) -> Type: """Return the least upper bound of s and t. For example, the join of 'int' and 'object' is 'object'. """ if (s.can_be_true, s.can_be_false) != (t.can_be_true, t.can_be_false): # if types are restricted in different ways, use the more general versions s = true_or_false(s) t = true_or_false(t) if isinstance(s, AnyType): return s if isinstance(s, ErasedType): return t if isinstance(s, UnionType) and not isinstance(t, UnionType): s, t = t, s if isinstance(s, NoneTyp) and not isinstance(t, NoneTyp): s, t = t, s if isinstance(s, UninhabitedType) and not isinstance(t, UninhabitedType): s, t = t, s # Use a visitor to handle non-trivial cases. return t.accept(TypeJoinVisitor(s)) class TypeJoinVisitor(TypeVisitor[Type]): """Implementation of the least upper bound algorithm. Attributes: s: The other (left) type operand. """ def __init__(self, s: Type) -> None: self.s = s def visit_unbound_type(self, t: UnboundType) -> Type: return AnyType(TypeOfAny.special_form) def visit_union_type(self, t: UnionType) -> Type: if is_subtype(self.s, t): return t else: return UnionType.make_simplified_union([self.s, t]) def visit_any(self, t: AnyType) -> Type: return t def visit_none_type(self, t: NoneTyp) -> Type: if experiments.STRICT_OPTIONAL: if isinstance(self.s, (NoneTyp, UninhabitedType)): return t elif isinstance(self.s, UnboundType): return AnyType(TypeOfAny.special_form) else: return UnionType.make_simplified_union([self.s, t]) else: return self.s def visit_uninhabited_type(self, t: UninhabitedType) -> Type: return self.s def visit_deleted_type(self, t: DeletedType) -> Type: return self.s def visit_erased_type(self, t: ErasedType) -> Type: return self.s def visit_type_var(self, t: TypeVarType) -> Type: if isinstance(self.s, TypeVarType) and self.s.id == t.id: return self.s else: return self.default(self.s) def visit_instance(self, t: Instance) -> Type: if isinstance(self.s, Instance): nominal = join_instances(t, self.s) structural = None # type: Optional[Instance] if t.type.is_protocol and is_protocol_implementation(self.s, t): structural = t elif self.s.type.is_protocol and is_protocol_implementation(t, self.s): structural = self.s # Structural join is preferred in the case where we have found both # structural and nominal and they have same MRO length (see two comments # in join_instances_via_supertype). Otherwise, just return the nominal join. if not structural or is_better(nominal, structural): return nominal return structural elif isinstance(self.s, FunctionLike): return join_types(t, self.s.fallback) elif isinstance(self.s, TypeType): return join_types(t, self.s) elif isinstance(self.s, TypedDictType): return join_types(t, self.s) else: return self.default(self.s) def visit_callable_type(self, t: CallableType) -> Type: if isinstance(self.s, CallableType) and is_similar_callables(t, self.s): if is_equivalent(t, self.s): return combine_similar_callables(t, self.s) result = join_similar_callables(t, self.s) if any(isinstance(tp, (NoneTyp, UninhabitedType)) for tp in result.arg_types): # We don't want to return unusable Callable, attempt fallback instead. return join_types(t.fallback, self.s) return result elif isinstance(self.s, Overloaded): # Switch the order of arguments to that we'll get to visit_overloaded. return join_types(t, self.s) else: return join_types(t.fallback, self.s) def visit_overloaded(self, t: Overloaded) -> Type: # This is more complex than most other cases. Here are some # examples that illustrate how this works. # # First let's define a concise notation: # - Cn are callable types (for n in 1, 2, ...) # - Ov(C1, C2, ...) is an overloaded type with items C1, C2, ... # - Callable[[T, ...], S] is written as [T, ...] -> S. # # We want some basic properties to hold (assume Cn are all # unrelated via Any-similarity): # # join(Ov(C1, C2), C1) == C1 # join(Ov(C1, C2), Ov(C1, C2)) == Ov(C1, C2) # join(Ov(C1, C2), Ov(C1, C3)) == C1 # join(Ov(C2, C2), C3) == join of fallback types # # The presence of Any types makes things more interesting. The join is the # most general type we can get with respect to Any: # # join(Ov([int] -> int, [str] -> str), [Any] -> str) == Any -> str # # We could use a simplification step that removes redundancies, but that's not # implemented right now. Consider this example, where we get a redundancy: # # join(Ov([int, Any] -> Any, [str, Any] -> Any), [Any, int] -> Any) == # Ov([Any, int] -> Any, [Any, int] -> Any) # # TODO: Consider more cases of callable subtyping. result = [] # type: List[CallableType] s = self.s if isinstance(s, FunctionLike): # The interesting case where both types are function types. for t_item in t.items(): for s_item in s.items(): if is_similar_callables(t_item, s_item): if is_equivalent(t_item, s_item): result.append(combine_similar_callables(t_item, s_item)) elif is_subtype(t_item, s_item): result.append(s_item) if result: # TODO: Simplify redundancies from the result. if len(result) == 1: return result[0] else: return Overloaded(result) return join_types(t.fallback, s.fallback) return join_types(t.fallback, s) def visit_tuple_type(self, t: TupleType) -> Type: if isinstance(self.s, TupleType) and self.s.length() == t.length(): items = [] # type: List[Type] for i in range(t.length()): items.append(self.join(t.items[i], self.s.items[i])) fallback = join_instances(self.s.fallback, t.fallback) assert isinstance(fallback, Instance) return TupleType(items, fallback) else: return self.default(self.s) def visit_typeddict_type(self, t: TypedDictType) -> Type: if isinstance(self.s, TypedDictType): items = OrderedDict([ (item_name, s_item_type) for (item_name, s_item_type, t_item_type) in self.s.zip(t) if (is_equivalent(s_item_type, t_item_type) and (item_name in t.required_keys) == (item_name in self.s.required_keys)) ]) mapping_value_type = join_type_list(list(items.values())) fallback = self.s.create_anonymous_fallback(value_type=mapping_value_type) # We need to filter by items.keys() since some required keys present in both t and # self.s might be missing from the join if the types are incompatible. required_keys = set(items.keys()) & t.required_keys & self.s.required_keys return TypedDictType(items, required_keys, fallback) elif isinstance(self.s, Instance): return join_types(self.s, t.fallback) else: return self.default(self.s) def visit_partial_type(self, t: PartialType) -> Type: # We only have partial information so we can't decide the join result. We should # never get here. assert False, "Internal error" def visit_type_type(self, t: TypeType) -> Type: if isinstance(self.s, TypeType): return TypeType.make_normalized(self.join(t.item, self.s.item), line=t.line) elif isinstance(self.s, Instance) and self.s.type.fullname() == 'builtins.type': return self.s else: return self.default(self.s) def join(self, s: Type, t: Type) -> Type: return join_types(s, t) def default(self, typ: Type) -> Type: if isinstance(typ, Instance): return object_from_instance(typ) elif isinstance(typ, UnboundType): return AnyType(TypeOfAny.special_form) elif isinstance(typ, TupleType): return self.default(typ.fallback) elif isinstance(typ, TypedDictType): return self.default(typ.fallback) elif isinstance(typ, FunctionLike): return self.default(typ.fallback) elif isinstance(typ, TypeVarType): return self.default(typ.upper_bound) else: return AnyType(TypeOfAny.special_form) def join_instances(t: Instance, s: Instance) -> Type: """Calculate the join of two instance types. """ if t.type == s.type: # Simplest case: join two types with the same base type (but # potentially different arguments). if is_subtype(t, s) or is_subtype(s, t): # Compatible; combine type arguments. args = [] # type: List[Type] for i in range(len(t.args)): args.append(join_types(t.args[i], s.args[i])) return Instance(t.type, args) else: # Incompatible; return trivial result object. return object_from_instance(t) elif t.type.bases and is_subtype_ignoring_tvars(t, s): return join_instances_via_supertype(t, s) else: # Now t is not a subtype of s, and t != s. Now s could be a subtype # of t; alternatively, we need to find a common supertype. This works # in of the both cases. return join_instances_via_supertype(s, t) def join_instances_via_supertype(t: Instance, s: Instance) -> Type: # Give preference to joins via duck typing relationship, so that # join(int, float) == float, for example. if t.type._promote and is_subtype(t.type._promote, s): return join_types(t.type._promote, s) elif s.type._promote and is_subtype(s.type._promote, t): return join_types(t, s.type._promote) # Compute the "best" supertype of t when joined with s. # The definition of "best" may evolve; for now it is the one with # the longest MRO. Ties are broken by using the earlier base. best = None # type: Optional[Type] for base in t.type.bases: mapped = map_instance_to_supertype(t, base.type) res = join_instances(mapped, s) if best is None or is_better(res, best): best = res assert best is not None return best def is_better(t: Type, s: Type) -> bool: # Given two possible results from join_instances_via_supertype(), # indicate whether t is the better one. if isinstance(t, Instance): if not isinstance(s, Instance): return True # Use len(mro) as a proxy for the better choice. if len(t.type.mro) > len(s.type.mro): return True return False def is_similar_callables(t: CallableType, s: CallableType) -> bool: """Return True if t and s have identical numbers of arguments, default arguments and varargs. """ return (len(t.arg_types) == len(s.arg_types) and t.min_args == s.min_args and t.is_var_arg == s.is_var_arg) def join_similar_callables(t: CallableType, s: CallableType) -> CallableType: from mypy.meet import meet_types arg_types = [] # type: List[Type] for i in range(len(t.arg_types)): arg_types.append(meet_types(t.arg_types[i], s.arg_types[i])) # TODO in combine_similar_callables also applies here (names and kinds) # The fallback type can be either 'function' or 'type'. The result should have 'type' as # fallback only if both operands have it as 'type'. if t.fallback.type.fullname() != 'builtins.type': fallback = t.fallback else: fallback = s.fallback return t.copy_modified(arg_types=arg_types, ret_type=join_types(t.ret_type, s.ret_type), fallback=fallback, name=None) def combine_similar_callables(t: CallableType, s: CallableType) -> CallableType: arg_types = [] # type: List[Type] for i in range(len(t.arg_types)): arg_types.append(join_types(t.arg_types[i], s.arg_types[i])) # TODO kinds and argument names # The fallback type can be either 'function' or 'type'. The result should have 'type' as # fallback only if both operands have it as 'type'. if t.fallback.type.fullname() != 'builtins.type': fallback = t.fallback else: fallback = s.fallback return t.copy_modified(arg_types=arg_types, ret_type=join_types(t.ret_type, s.ret_type), fallback=fallback, name=None) def object_from_instance(instance: Instance) -> Instance: """Construct the type 'builtins.object' from an instance type.""" # Use the fact that 'object' is always the last class in the mro. res = Instance(instance.type.mro[-1], []) return res def join_type_list(types: List[Type]) -> Type: if not types: # This is a little arbitrary but reasonable. Any empty tuple should be compatible # with all variable length tuples, and this makes it possible. return UninhabitedType() joined = types[0] for t in types[1:]: joined = join_types(joined, t) return joined mypy-0.560/mypy/literals.py0000644€tŠÔÚ€2›s®0000001640413215007205022063 0ustar jukkaDROPBOX\Domain Users00000000000000from typing import Optional, Union, Any, Tuple, Iterable from mypy.nodes import ( Expression, ComparisonExpr, OpExpr, MemberExpr, UnaryExpr, StarExpr, IndexExpr, LITERAL_YES, LITERAL_NO, NameExpr, LITERAL_TYPE, IntExpr, FloatExpr, ComplexExpr, StrExpr, BytesExpr, UnicodeExpr, ListExpr, TupleExpr, SetExpr, DictExpr, CallExpr, SliceExpr, CastExpr, ConditionalExpr, EllipsisExpr, YieldFromExpr, YieldExpr, RevealTypeExpr, SuperExpr, TypeApplication, LambdaExpr, ListComprehension, SetComprehension, DictionaryComprehension, GeneratorExpr, BackquoteExpr, TypeVarExpr, TypeAliasExpr, NamedTupleExpr, EnumCallExpr, TypedDictExpr, NewTypeExpr, PromoteExpr, AwaitExpr, TempNode, ) from mypy.visitor import ExpressionVisitor # [Note Literals and literal_hash] # ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ # # Mypy uses the term "literal" to refer to any expression built out of # the following: # # * Plain literal expressions, like `1` (integer, float, string, etc.) # # * Compound literal expressions, like `(lit1, lit2)` (list, dict, # set, or tuple) # # * Operator expressions, like `lit1 + lit2` # # * Variable references, like `x` # # * Member references, like `lit.m` # # * Index expressions, like `lit[0]` # # A typical "literal" looks like `x[(i,j+1)].m`. # # An expression that is a literal has a `literal_hash`, with the # following properties. # # * `literal_hash` is a Key: a tuple containing basic data types and # possibly other Keys. So it can be used as a key in a dictionary # that will be compared by value (as opposed to the Node itself, # which is compared by identity). # # * Two expressions have equal `literal_hash`es if and only if they # are syntactically equal expressions. (NB: Actually, we also # identify as equal expressions like `3` and `3.0`; is this a good # idea?) # # * The elements of `literal_hash` that are tuples are exactly the # subexpressions of the original expression (e.g. the base and index # of an index expression, or the operands of an operator expression). def literal(e: Expression) -> int: if isinstance(e, ComparisonExpr): return min(literal(o) for o in e.operands) elif isinstance(e, OpExpr): return min(literal(e.left), literal(e.right)) elif isinstance(e, (MemberExpr, UnaryExpr, StarExpr)): return literal(e.expr) elif isinstance(e, IndexExpr): if literal(e.index) == LITERAL_YES: return literal(e.base) else: return LITERAL_NO elif isinstance(e, NameExpr): return LITERAL_TYPE if isinstance(e, (IntExpr, FloatExpr, ComplexExpr, StrExpr, BytesExpr, UnicodeExpr)): return LITERAL_YES if literal_hash(e): return LITERAL_YES return LITERAL_NO Key = Tuple[Any, ...] def subkeys(key: Key) -> Iterable[Key]: return [elt for elt in key if isinstance(elt, tuple)] def literal_hash(e: Expression) -> Optional[Key]: return e.accept(_hasher) class _Hasher(ExpressionVisitor[Optional[Key]]): def visit_int_expr(self, e: IntExpr) -> Key: return ('Literal', e.value) def visit_str_expr(self, e: StrExpr) -> Key: return ('Literal', e.value) def visit_bytes_expr(self, e: BytesExpr) -> Key: return ('Literal', e.value) def visit_unicode_expr(self, e: UnicodeExpr) -> Key: return ('Literal', e.value) def visit_float_expr(self, e: FloatExpr) -> Key: return ('Literal', e.value) def visit_complex_expr(self, e: ComplexExpr) -> Key: return ('Literal', e.value) def visit_star_expr(self, e: StarExpr) -> Key: return ('Star', literal_hash(e.expr)) def visit_name_expr(self, e: NameExpr) -> Key: return ('Var', e.name) def visit_member_expr(self, e: MemberExpr) -> Key: return ('Member', literal_hash(e.expr), e.name) def visit_op_expr(self, e: OpExpr) -> Key: return ('Binary', e.op, literal_hash(e.left), literal_hash(e.right)) def visit_comparison_expr(self, e: ComparisonExpr) -> Key: rest = tuple(e.operators) # type: Any rest += tuple(literal_hash(o) for o in e.operands) return ('Comparison',) + rest def visit_unary_expr(self, e: UnaryExpr) -> Key: return ('Unary', e.op, literal_hash(e.expr)) def seq_expr(self, e: Union[ListExpr, TupleExpr, SetExpr], name: str) -> Optional[Key]: if all(literal(x) == LITERAL_YES for x in e.items): rest = tuple(literal_hash(x) for x in e.items) # type: Any return (name,) + rest return None def visit_list_expr(self, e: ListExpr) -> Optional[Key]: return self.seq_expr(e, 'List') def visit_dict_expr(self, e: DictExpr) -> Optional[Key]: if all(a and literal(a) == literal(b) == LITERAL_YES for a, b in e.items): rest = tuple((literal_hash(a), literal_hash(b)) for a, b in e.items) # type: Any return ('Dict',) + rest return None def visit_tuple_expr(self, e: TupleExpr) -> Optional[Key]: return self.seq_expr(e, 'Tuple') def visit_set_expr(self, e: SetExpr) -> Optional[Key]: return self.seq_expr(e, 'Set') def visit_index_expr(self, e: IndexExpr) -> Optional[Key]: if literal(e.index) == LITERAL_YES: return ('Index', literal_hash(e.base), literal_hash(e.index)) return None def visit_call_expr(self, e: CallExpr) -> None: return None def visit_slice_expr(self, e: SliceExpr) -> None: return None def visit_cast_expr(self, e: CastExpr) -> None: return None def visit_conditional_expr(self, e: ConditionalExpr) -> None: return None def visit_ellipsis(self, e: EllipsisExpr) -> None: return None def visit_yield_from_expr(self, e: YieldFromExpr) -> None: return None def visit_yield_expr(self, e: YieldExpr) -> None: return None def visit_reveal_type_expr(self, e: RevealTypeExpr) -> None: return None def visit_super_expr(self, e: SuperExpr) -> None: return None def visit_type_application(self, e: TypeApplication) -> None: return None def visit_lambda_expr(self, e: LambdaExpr) -> None: return None def visit_list_comprehension(self, e: ListComprehension) -> None: return None def visit_set_comprehension(self, e: SetComprehension) -> None: return None def visit_dictionary_comprehension(self, e: DictionaryComprehension) -> None: return None def visit_generator_expr(self, e: GeneratorExpr) -> None: return None def visit_backquote_expr(self, e: BackquoteExpr) -> None: return None def visit_type_var_expr(self, e: TypeVarExpr) -> None: return None def visit_type_alias_expr(self, e: TypeAliasExpr) -> None: return None def visit_namedtuple_expr(self, e: NamedTupleExpr) -> None: return None def visit_enum_call_expr(self, e: EnumCallExpr) -> None: return None def visit_typeddict_expr(self, e: TypedDictExpr) -> None: return None def visit_newtype_expr(self, e: NewTypeExpr) -> None: return None def visit__promote_expr(self, e: PromoteExpr) -> None: return None def visit_await_expr(self, e: AwaitExpr) -> None: return None def visit_temp_node(self, e: TempNode) -> None: return None _hasher = _Hasher() mypy-0.560/mypy/main.py0000644€tŠÔÚ€2›s®0000010462313215007205021171 0ustar jukkaDROPBOX\Domain Users00000000000000"""Mypy type checker command line tool.""" import argparse import configparser import fnmatch import os import re import sys import time from typing import Any, Dict, List, Mapping, Optional, Sequence, Set, Tuple from mypy import build from mypy import defaults from mypy import experiments from mypy import util from mypy.build import BuildSource, BuildResult, PYTHON_EXTENSIONS from mypy.errors import CompileError from mypy.options import Options, BuildType from mypy.report import reporter_classes from mypy.version import __version__ PY_EXTENSIONS = tuple(PYTHON_EXTENSIONS) class InvalidPackageName(Exception): """Exception indicating that a package name was invalid.""" orig_stat = os.stat def stat_proxy(path: str) -> os.stat_result: try: st = orig_stat(path) except os.error as err: print("stat(%r) -> %s" % (path, err)) raise else: print("stat(%r) -> (st_mode=%o, st_mtime=%d, st_size=%d)" % (path, st.st_mode, st.st_mtime, st.st_size)) return st def main(script_path: Optional[str], args: Optional[List[str]] = None) -> None: """Main entry point to the type checker. Args: script_path: Path to the 'mypy' script (used for finding data files). args: Custom command-line arguments. If not given, sys.argv[1:] will be used. """ t0 = time.time() # To log stat() calls: os.stat = stat_proxy if script_path: bin_dir = find_bin_directory(script_path) # type: Optional[str] else: bin_dir = None sys.setrecursionlimit(2 ** 14) if args is None: args = sys.argv[1:] sources, options = process_options(args) serious = False try: res = type_check_only(sources, bin_dir, options) a = res.errors except CompileError as e: a = e.messages if not e.use_stdout: serious = True if options.warn_unused_configs and options.unused_configs: print("Warning: unused section(s) in %s: %s" % (options.config_file, ", ".join("[mypy-%s]" % glob for glob in options.unused_configs.values())), file=sys.stderr) if options.junit_xml: t1 = time.time() util.write_junit_xml(t1 - t0, serious, a, options.junit_xml) if a: f = sys.stderr if serious else sys.stdout try: for m in a: f.write(m + '\n') except BrokenPipeError: pass sys.exit(1) def find_bin_directory(script_path: str) -> str: """Find the directory that contains this script. This is used by build to find stubs and other data files. """ # Follow up to 5 symbolic links (cap to avoid cycles). for i in range(5): if os.path.islink(script_path): script_path = readlinkabs(script_path) else: break return os.path.dirname(script_path) def readlinkabs(link: str) -> str: """Return an absolute path to symbolic link destination.""" # Adapted from code by Greg Smith. assert os.path.islink(link) path = os.readlink(link) if os.path.isabs(path): return path return os.path.join(os.path.dirname(link), path) def type_check_only(sources: List[BuildSource], bin_dir: Optional[str], options: Options) -> BuildResult: # Type-check the program and dependencies. return build.build(sources=sources, bin_dir=bin_dir, options=options) FOOTER = """environment variables: MYPYPATH additional module search path""" class SplitNamespace(argparse.Namespace): def __init__(self, standard_namespace: object, alt_namespace: object, alt_prefix: str) -> None: self.__dict__['_standard_namespace'] = standard_namespace self.__dict__['_alt_namespace'] = alt_namespace self.__dict__['_alt_prefix'] = alt_prefix def _get(self) -> Tuple[Any, Any]: return (self._standard_namespace, self._alt_namespace) def __setattr__(self, name: str, value: Any) -> None: if name.startswith(self._alt_prefix): setattr(self._alt_namespace, name[len(self._alt_prefix):], value) else: setattr(self._standard_namespace, name, value) def __getattr__(self, name: str) -> Any: if name.startswith(self._alt_prefix): return getattr(self._alt_namespace, name[len(self._alt_prefix):]) else: return getattr(self._standard_namespace, name) def parse_version(v: str) -> Tuple[int, int]: m = re.match(r'\A(\d)\.(\d+)\Z', v) if not m: raise argparse.ArgumentTypeError( "Invalid python version '{}' (expected format: 'x.y')".format(v)) major, minor = int(m.group(1)), int(m.group(2)) if major == 2: if minor != 7: raise argparse.ArgumentTypeError( "Python 2.{} is not supported (must be 2.7)".format(minor)) elif major == 3: if minor < defaults.PYTHON3_VERSION_MIN[1]: raise argparse.ArgumentTypeError( "Python 3.{0} is not supported (must be {1}.{2} or higher)".format(minor, *defaults.PYTHON3_VERSION_MIN)) else: raise argparse.ArgumentTypeError( "Python major version '{}' out of range (must be 2 or 3)".format(major)) return major, minor # Make the help output a little less jarring. class AugmentedHelpFormatter(argparse.HelpFormatter): def __init__(self, prog: str) -> None: super().__init__(prog=prog, max_help_position=28) # Define pairs of flag prefixes with inverse meaning. flag_prefix_pairs = [ ('allow', 'disallow'), ('show', 'hide'), ] flag_prefix_map = {} # type: Dict[str, str] for a, b in flag_prefix_pairs: flag_prefix_map[a] = b flag_prefix_map[b] = a def invert_flag_name(flag: str) -> str: split = flag[2:].split('-', 1) if len(split) == 2: prefix, rest = split if prefix in flag_prefix_map: return '--{}-{}'.format(flag_prefix_map[prefix], rest) elif prefix == 'no': return '--{}'.format(rest) return '--no-{}'.format(flag[2:]) def process_options(args: List[str], require_targets: bool = True ) -> Tuple[List[BuildSource], Options]: """Parse command line arguments.""" parser = argparse.ArgumentParser(prog='mypy', epilog=FOOTER, fromfile_prefix_chars='@', formatter_class=AugmentedHelpFormatter) strict_flag_names = [] # type: List[str] strict_flag_assignments = [] # type: List[Tuple[str, bool]] def add_invertible_flag(flag: str, *, inverse: Optional[str] = None, default: bool, dest: Optional[str] = None, help: str, strict_flag: bool = False ) -> None: if inverse is None: inverse = invert_flag_name(flag) if help is not argparse.SUPPRESS: help += " (inverse: {})".format(inverse) arg = parser.add_argument(flag, # type: ignore # incorrect stub for add_argument action='store_false' if default else 'store_true', dest=dest, help=help) dest = arg.dest arg = parser.add_argument(inverse, # type: ignore # incorrect stub for add_argument action='store_true' if default else 'store_false', dest=dest, help=argparse.SUPPRESS) if strict_flag: assert dest is not None strict_flag_names.append(flag) strict_flag_assignments.append((dest, not default)) # Unless otherwise specified, arguments will be parsed directly onto an # Options object. Options that require further processing should have # their `dest` prefixed with `special-opts:`, which will cause them to be # parsed into the separate special_opts namespace object. parser.add_argument('-v', '--verbose', action='count', dest='verbosity', help="more verbose messages") parser.add_argument('-V', '--version', action='version', version='%(prog)s ' + __version__) parser.add_argument('--python-version', type=parse_version, metavar='x.y', help='use Python x.y') parser.add_argument('--platform', action='store', metavar='PLATFORM', help="typecheck special-cased code for the given OS platform " "(defaults to sys.platform).") parser.add_argument('-2', '--py2', dest='python_version', action='store_const', const=defaults.PYTHON2_VERSION, help="use Python 2 mode") parser.add_argument('--ignore-missing-imports', action='store_true', help="silently ignore imports of missing modules") parser.add_argument('--follow-imports', choices=['normal', 'silent', 'skip', 'error'], default='normal', help="how to treat imports (default normal)") parser.add_argument('--disallow-any-unimported', default=False, action='store_true', help="disallow Any types resulting from unfollowed imports") parser.add_argument('--disallow-any-expr', default=False, action='store_true', help='disallow all expressions that have type Any') parser.add_argument('--disallow-any-decorated', default=False, action='store_true', help='disallow functions that have Any in their signature ' 'after decorator transformation') parser.add_argument('--disallow-any-explicit', default=False, action='store_true', help='disallow explicit Any in type positions') parser.add_argument('--disallow-any-generics', default=False, action='store_true', help='disallow usage of generic types that do not specify explicit ' 'type parameters') add_invertible_flag('--disallow-untyped-calls', default=False, strict_flag=True, help="disallow calling functions without type annotations" " from functions with type annotations") add_invertible_flag('--disallow-untyped-defs', default=False, strict_flag=True, help="disallow defining functions without type annotations" " or with incomplete type annotations") add_invertible_flag('--disallow-incomplete-defs', default=False, strict_flag=True, help="disallow defining functions with incomplete type annotations") add_invertible_flag('--check-untyped-defs', default=False, strict_flag=True, help="type check the interior of functions without type annotations") add_invertible_flag('--disallow-subclassing-any', default=False, strict_flag=True, help="disallow subclassing values of type 'Any' when defining classes") add_invertible_flag('--warn-incomplete-stub', default=False, help="warn if missing type annotation in typeshed, only relevant with" " --check-untyped-defs enabled") add_invertible_flag('--disallow-untyped-decorators', default=False, strict_flag=True, help="disallow decorating typed functions with untyped decorators") add_invertible_flag('--warn-redundant-casts', default=False, strict_flag=True, help="warn about casting an expression to its inferred type") add_invertible_flag('--no-warn-no-return', dest='warn_no_return', default=True, help="do not warn about functions that end without returning") add_invertible_flag('--warn-return-any', default=False, strict_flag=True, help="warn about returning values of type Any" " from non-Any typed functions") add_invertible_flag('--warn-unused-ignores', default=False, strict_flag=True, help="warn about unneeded '# type: ignore' comments") add_invertible_flag('--warn-unused-configs', default=False, strict_flag=True, help="warn about unnused '[mypy-]' config sections") add_invertible_flag('--show-error-context', default=False, dest='show_error_context', help='Precede errors with "note:" messages explaining context') add_invertible_flag('--no-implicit-optional', default=False, strict_flag=True, help="don't assume arguments with default values of None are Optional") parser.add_argument('-i', '--incremental', action='store_true', help="enable module cache, (inverse: --no-incremental)") parser.add_argument('--no-incremental', action='store_false', dest='incremental', help=argparse.SUPPRESS) parser.add_argument('--quick-and-dirty', action='store_true', help="use cache even if dependencies out of date " "(implies --incremental)") parser.add_argument('--cache-dir', action='store', metavar='DIR', help="store module cache info in the given folder in incremental mode " "(defaults to '{}')".format(defaults.CACHE_DIR)) parser.add_argument('--skip-version-check', action='store_true', help="allow using cache written by older mypy version") add_invertible_flag('--strict-optional', default=False, strict_flag=True, help="enable experimental strict Optional checks") parser.add_argument('--strict-optional-whitelist', metavar='GLOB', nargs='*', help="suppress strict Optional errors in all but the provided files " "(experimental -- read documentation before using!). " "Implies --strict-optional. Has the undesirable side-effect of " "suppressing other errors in non-whitelisted files.") parser.add_argument('--junit-xml', help="write junit.xml to the given file") parser.add_argument('--pdb', action='store_true', help="invoke pdb on fatal error") parser.add_argument('--show-traceback', '--tb', action='store_true', help="show traceback on fatal error") parser.add_argument('--stats', action='store_true', dest='dump_type_stats', help="dump stats") parser.add_argument('--inferstats', action='store_true', dest='dump_inference_stats', help="dump type inference stats") parser.add_argument('--custom-typing', metavar='MODULE', dest='custom_typing_module', help="use a custom typing module") parser.add_argument('--custom-typeshed-dir', metavar='DIR', help="use the custom typeshed in DIR") parser.add_argument('--scripts-are-modules', action='store_true', help="Script x becomes module x instead of __main__") parser.add_argument('--config-file', help="Configuration file, must have a [mypy] section " "(defaults to {})".format(defaults.CONFIG_FILE)) add_invertible_flag('--show-column-numbers', default=False, help="Show column numbers in error messages") parser.add_argument('--find-occurrences', metavar='CLASS.MEMBER', dest='special-opts:find_occurrences', help="print out all usages of a class member (experimental)") strict_help = "Strict mode. Enables the following flags: {}".format( ", ".join(strict_flag_names)) parser.add_argument('--strict', action='store_true', dest='special-opts:strict', help=strict_help) parser.add_argument('--shadow-file', nargs=2, metavar=('SOURCE_FILE', 'SHADOW_FILE'), dest='shadow_file', help='Typecheck SHADOW_FILE in place of SOURCE_FILE.') # hidden options # --debug-cache will disable any cache-related compressions/optimizations, # which will make the cache writing process output pretty-printed JSON (which # is easier to debug). parser.add_argument('--debug-cache', action='store_true', help=argparse.SUPPRESS) # --dump-deps will dump all fine-grained dependencies to stdout parser.add_argument('--dump-deps', action='store_true', help=argparse.SUPPRESS) # --dump-graph will dump the contents of the graph of SCCs and exit. parser.add_argument('--dump-graph', action='store_true', help=argparse.SUPPRESS) # --semantic-analysis-only does exactly that. parser.add_argument('--semantic-analysis-only', action='store_true', help=argparse.SUPPRESS) # deprecated options parser.add_argument('--disallow-any', dest='special-opts:disallow_any', help=argparse.SUPPRESS) add_invertible_flag('--strict-boolean', default=False, help=argparse.SUPPRESS) parser.add_argument('-f', '--dirty-stubs', action='store_true', dest='special-opts:dirty_stubs', help=argparse.SUPPRESS) parser.add_argument('--use-python-path', action='store_true', dest='special-opts:use_python_path', help=argparse.SUPPRESS) parser.add_argument('-s', '--silent-imports', action='store_true', dest='special-opts:silent_imports', help=argparse.SUPPRESS) parser.add_argument('--almost-silent', action='store_true', dest='special-opts:almost_silent', help=argparse.SUPPRESS) parser.add_argument('--fast-parser', action='store_true', dest='special-opts:fast_parser', help=argparse.SUPPRESS) parser.add_argument('--no-fast-parser', action='store_true', dest='special-opts:no_fast_parser', help=argparse.SUPPRESS) report_group = parser.add_argument_group( title='report generation', description='Generate a report in the specified format.') for report_type in sorted(reporter_classes): report_group.add_argument('--%s-report' % report_type.replace('_', '-'), metavar='DIR', dest='special-opts:%s_report' % report_type) code_group = parser.add_argument_group(title='How to specify the code to type check') code_group.add_argument('-m', '--module', action='append', metavar='MODULE', dest='special-opts:modules', help="type-check module; can repeat for more modules") # TODO: `mypy -p A -p B` currently silently ignores A # (last option wins). Perhaps -c, -m and -p could just be # command-line flags that modify how we interpret self.files? code_group.add_argument('-c', '--command', action='append', metavar='PROGRAM_TEXT', dest='special-opts:command', help="type-check program passed in as string") code_group.add_argument('-p', '--package', metavar='PACKAGE', dest='special-opts:package', help="type-check all files in a directory") code_group.add_argument(metavar='files', nargs='*', dest='special-opts:files', help="type-check given files or directories") # Parse arguments once into a dummy namespace so we can get the # filename for the config file and know if the user requested all strict options. dummy = argparse.Namespace() parser.parse_args(args, dummy) config_file = dummy.config_file if config_file is not None and not os.path.exists(config_file): parser.error("Cannot find config file '%s'" % config_file) # Parse config file first, so command line can override. options = Options() parse_config_file(options, config_file) # Set strict flags before parsing (if strict mode enabled), so other command # line options can override. if getattr(dummy, 'special-opts:strict'): for dest, value in strict_flag_assignments: setattr(options, dest, value) # Parse command line for real, using a split namespace. special_opts = argparse.Namespace() parser.parse_args(args, SplitNamespace(options, special_opts, 'special-opts:')) # --use-python-path is no longer supported; explain why. if special_opts.use_python_path: parser.error("Sorry, --use-python-path is no longer supported.\n" "If you are trying this because your code depends on a library module,\n" "you should really investigate how to obtain stubs for that module.\n" "See https://github.com/python/mypy/issues/1411 for more discussion." ) # Process deprecated options if special_opts.disallow_any: print("--disallow-any option was split up into multiple flags. " "See http://mypy.readthedocs.io/en/latest/command_line.html#disallow-any-flags") if options.strict_boolean: print("Warning: --strict-boolean is deprecated; " "see https://github.com/python/mypy/issues/3195", file=sys.stderr) if special_opts.almost_silent: print("Warning: --almost-silent has been replaced by " "--follow-imports=errors", file=sys.stderr) if options.follow_imports == 'normal': options.follow_imports = 'errors' elif special_opts.silent_imports: print("Warning: --silent-imports has been replaced by " "--ignore-missing-imports --follow-imports=skip", file=sys.stderr) options.ignore_missing_imports = True if options.follow_imports == 'normal': options.follow_imports = 'skip' if special_opts.dirty_stubs: print("Warning: -f/--dirty-stubs is deprecated and no longer necessary. Mypy no longer " "checks the git status of stubs.", file=sys.stderr) if special_opts.fast_parser: print("Warning: --fast-parser is now the default (and only) parser.") if special_opts.no_fast_parser: print("Warning: --no-fast-parser no longer has any effect. The fast parser " "is now mypy's default and only parser.") # Check for invalid argument combinations. if require_targets: code_methods = sum(bool(c) for c in [special_opts.modules, special_opts.command, special_opts.package, special_opts.files]) if code_methods == 0: parser.error("Missing target module, package, files, or command.") elif code_methods > 1: parser.error("May only specify one of: module, package, files, or command.") # Set build flags. if options.strict_optional_whitelist is not None: # TODO: Deprecate, then kill this flag options.strict_optional = True if options.strict_optional: experiments.STRICT_OPTIONAL = True if special_opts.find_occurrences: experiments.find_occurrences = special_opts.find_occurrences.split('.') assert experiments.find_occurrences is not None if len(experiments.find_occurrences) < 2: parser.error("Can only find occurrences of class members.") if len(experiments.find_occurrences) != 2: parser.error("Can only find occurrences of non-nested class members.") # Set reports. for flag, val in vars(special_opts).items(): if flag.endswith('_report') and val is not None: report_type = flag[:-7].replace('_', '-') report_dir = val options.report_dirs[report_type] = report_dir # Let quick_and_dirty imply incremental. if options.quick_and_dirty: options.incremental = True # Set target. if special_opts.modules: options.build_type = BuildType.MODULE targets = [BuildSource(None, m, None) for m in special_opts.modules] return targets, options elif special_opts.package: if os.sep in special_opts.package or os.altsep and os.altsep in special_opts.package: fail("Package name '{}' cannot have a slash in it." .format(special_opts.package)) options.build_type = BuildType.MODULE lib_path = [os.getcwd()] + build.mypy_path() targets = build.find_modules_recursive(special_opts.package, lib_path) if not targets: fail("Can't find package '{}'".format(special_opts.package)) return targets, options elif special_opts.command: options.build_type = BuildType.PROGRAM_TEXT targets = [BuildSource(None, None, '\n'.join(special_opts.command))] return targets, options else: targets = create_source_list(special_opts.files, options) return targets, options def create_source_list(files: Sequence[str], options: Options) -> List[BuildSource]: targets = [] for f in files: if f.endswith(PY_EXTENSIONS): try: targets.append(BuildSource(f, crawl_up(f)[1], None)) except InvalidPackageName as e: fail(str(e)) elif os.path.isdir(f): try: sub_targets = expand_dir(f) except InvalidPackageName as e: fail(str(e)) if not sub_targets: fail("There are no .py[i] files in directory '{}'" .format(f)) targets.extend(sub_targets) else: mod = os.path.basename(f) if options.scripts_are_modules else None targets.append(BuildSource(f, mod, None)) return targets def keyfunc(name: str) -> Tuple[int, str]: """Determines sort order for directory listing. The desirable property is foo < foo.pyi < foo.py. """ base, suffix = os.path.splitext(name) for i, ext in enumerate(PY_EXTENSIONS): if suffix == ext: return (i, base) return (-1, name) def expand_dir(arg: str, mod_prefix: str = '') -> List[BuildSource]: """Convert a directory name to a list of sources to build.""" f = get_init_file(arg) if mod_prefix and not f: return [] seen = set() # type: Set[str] sources = [] if f and not mod_prefix: top_dir, top_mod = crawl_up(f) mod_prefix = top_mod + '.' if mod_prefix: sources.append(BuildSource(f, mod_prefix.rstrip('.'), None)) names = os.listdir(arg) names.sort(key=keyfunc) for name in names: path = os.path.join(arg, name) if os.path.isdir(path): sub_sources = expand_dir(path, mod_prefix + name + '.') if sub_sources: seen.add(name) sources.extend(sub_sources) else: base, suffix = os.path.splitext(name) if base == '__init__': continue if base not in seen and '.' not in base and suffix in PY_EXTENSIONS: seen.add(base) src = BuildSource(path, mod_prefix + base, None) sources.append(src) return sources def crawl_up(arg: str) -> Tuple[str, str]: """Given a .py[i] filename, return (root directory, module). We crawl up the path until we find a directory without __init__.py[i], or until we run out of path components. """ dir, mod = os.path.split(arg) mod = strip_py(mod) or mod while dir and get_init_file(dir): dir, base = os.path.split(dir) if not base: break # Ensure that base is a valid python module name if not base.isidentifier(): raise InvalidPackageName('{} is not a valid Python package name'.format(base)) if mod == '__init__' or not mod: mod = base else: mod = base + '.' + mod return dir, mod def strip_py(arg: str) -> Optional[str]: """Strip a trailing .py or .pyi suffix. Return None if no such suffix is found. """ for ext in PY_EXTENSIONS: if arg.endswith(ext): return arg[:-len(ext)] return None def get_init_file(dir: str) -> Optional[str]: """Check whether a directory contains a file named __init__.py[i]. If so, return the file's name (with dir prefixed). If not, return None. This prefers .pyi over .py (because of the ordering of PY_EXTENSIONS). """ for ext in PY_EXTENSIONS: f = os.path.join(dir, '__init__' + ext) if os.path.isfile(f): return f return None # For most options, the type of the default value set in options.py is # sufficient, and we don't have to do anything here. This table # exists to specify types for values initialized to None or container # types. config_types = { 'python_version': parse_version, 'strict_optional_whitelist': lambda s: s.split(), 'custom_typing_module': str, 'custom_typeshed_dir': str, 'mypy_path': lambda s: [p.strip() for p in re.split('[,:]', s)], 'junit_xml': str, # These two are for backwards compatibility 'silent_imports': bool, 'almost_silent': bool, 'plugins': lambda s: [p.strip() for p in s.split(',')], } SHARED_CONFIG_FILES = ('setup.cfg',) def parse_config_file(options: Options, filename: Optional[str]) -> None: """Parse a config file into an Options object. Errors are written to stderr but are not fatal. If filename is None, fall back to default config file and then to setup.cfg. """ if filename is not None: config_files = (filename,) # type: Tuple[str, ...] else: config_files = (defaults.CONFIG_FILE,) + SHARED_CONFIG_FILES parser = configparser.RawConfigParser() for config_file in config_files: if not os.path.exists(config_file): continue try: parser.read(config_file) except configparser.Error as err: print("%s: %s" % (config_file, err), file=sys.stderr) else: file_read = config_file options.config_file = file_read break else: return if 'mypy' not in parser: if filename or file_read not in SHARED_CONFIG_FILES: print("%s: No [mypy] section in config file" % file_read, file=sys.stderr) else: section = parser['mypy'] prefix = '%s: [%s]' % (file_read, 'mypy') updates, report_dirs = parse_section(prefix, options, section) for k, v in updates.items(): setattr(options, k, v) options.report_dirs.update(report_dirs) for name, section in parser.items(): if name.startswith('mypy-'): prefix = '%s: [%s]' % (file_read, name) updates, report_dirs = parse_section(prefix, options, section) if report_dirs: print("%s: Per-module sections should not specify reports (%s)" % (prefix, ', '.join(s + '_report' for s in sorted(report_dirs))), file=sys.stderr) if set(updates) - Options.PER_MODULE_OPTIONS: print("%s: Per-module sections should only specify per-module flags (%s)" % (prefix, ', '.join(sorted(set(updates) - Options.PER_MODULE_OPTIONS))), file=sys.stderr) updates = {k: v for k, v in updates.items() if k in Options.PER_MODULE_OPTIONS} globs = name[5:] for glob in globs.split(','): # For backwards compatibility, replace (back)slashes with dots. glob = glob.replace(os.sep, '.') if os.altsep: glob = glob.replace(os.altsep, '.') pattern = re.compile(fnmatch.translate(glob)) options.per_module_options[pattern] = updates options.unused_configs[pattern] = glob def parse_section(prefix: str, template: Options, section: Mapping[str, str]) -> Tuple[Dict[str, object], Dict[str, str]]: """Parse one section of a config file. Returns a dict of option values encountered, and a dict of report directories. """ results = {} # type: Dict[str, object] report_dirs = {} # type: Dict[str, str] for key in section: orig_key = key key = key.replace('-', '_') if key in config_types: ct = config_types[key] else: dv = getattr(template, key, None) if dv is None: if key.endswith('_report'): report_type = key[:-7].replace('_', '-') if report_type in reporter_classes: report_dirs[report_type] = section[orig_key] else: print("%s: Unrecognized report type: %s" % (prefix, orig_key), file=sys.stderr) continue print("%s: Unrecognized option: %s = %s" % (prefix, key, section[orig_key]), file=sys.stderr) continue ct = type(dv) v = None # type: Any try: if ct is bool: v = section.getboolean(key) # type: ignore # Until better stub elif callable(ct): try: v = ct(section.get(key)) except argparse.ArgumentTypeError as err: print("%s: %s: %s" % (prefix, key, err), file=sys.stderr) continue else: print("%s: Don't know what type %s should have" % (prefix, key), file=sys.stderr) continue except ValueError as err: print("%s: %s: %s" % (prefix, key, err), file=sys.stderr) continue if key == 'silent_imports': print("%s: silent_imports has been replaced by " "ignore_missing_imports=True; follow_imports=skip" % prefix, file=sys.stderr) if v: if 'ignore_missing_imports' not in results: results['ignore_missing_imports'] = True if 'follow_imports' not in results: results['follow_imports'] = 'skip' if key == 'almost_silent': print("%s: almost_silent has been replaced by " "follow_imports=error" % prefix, file=sys.stderr) if v: if 'follow_imports' not in results: results['follow_imports'] = 'error' results[key] = v return results, report_dirs def fail(msg: str) -> None: sys.stderr.write('%s\n' % msg) sys.exit(1) mypy-0.560/mypy/maptype.py0000644€tŠÔÚ€2›s®0000000764213215007205021727 0ustar jukkaDROPBOX\Domain Users00000000000000from typing import Dict, List from mypy.expandtype import expand_type from mypy.nodes import TypeInfo from mypy.types import Type, TypeVarId, Instance, AnyType, TypeOfAny def map_instance_to_supertype(instance: Instance, superclass: TypeInfo) -> Instance: """Produce a supertype of `instance` that is an Instance of `superclass`, mapping type arguments up the chain of bases. If `superclass` is not a nominal superclass of `instance.type`, then all type arguments are mapped to 'Any'. """ if instance.type == superclass: # Fast path: `instance` already belongs to `superclass`. return instance if not superclass.type_vars: # Fast path: `superclass` has no type variables to map to. return Instance(superclass, []) return map_instance_to_supertypes(instance, superclass)[0] def map_instance_to_supertypes(instance: Instance, supertype: TypeInfo) -> List[Instance]: # FIX: Currently we should only have one supertype per interface, so no # need to return an array result = [] # type: List[Instance] for path in class_derivation_paths(instance.type, supertype): types = [instance] for sup in path: a = [] # type: List[Instance] for t in types: a.extend(map_instance_to_direct_supertypes(t, sup)) types = a result.extend(types) if result: return result else: # Nothing. Presumably due to an error. Construct a dummy using Any. any_type = AnyType(TypeOfAny.from_error) return [Instance(supertype, [any_type] * len(supertype.type_vars))] def class_derivation_paths(typ: TypeInfo, supertype: TypeInfo) -> List[List[TypeInfo]]: """Return an array of non-empty paths of direct base classes from type to supertype. Return [] if no such path could be found. InterfaceImplementationPaths(A, B) == [[B]] if A inherits B InterfaceImplementationPaths(A, C) == [[B, C]] if A inherits B and B inherits C """ # FIX: Currently we might only ever have a single path, so this could be # simplified result = [] # type: List[List[TypeInfo]] for base in typ.bases: btype = base.type if btype == supertype: result.append([btype]) else: # Try constructing a longer path via the base class. for path in class_derivation_paths(btype, supertype): result.append([btype] + path) return result def map_instance_to_direct_supertypes(instance: Instance, supertype: TypeInfo) -> List[Instance]: # FIX: There should only be one supertypes, always. typ = instance.type result = [] # type: List[Instance] for b in typ.bases: if b.type == supertype: env = instance_to_type_environment(instance) t = expand_type(b, env) assert isinstance(t, Instance) result.append(t) if result: return result else: # Relationship with the supertype not specified explicitly. Use dynamic # type arguments implicitly. any_type = AnyType(TypeOfAny.unannotated) return [Instance(supertype, [any_type] * len(supertype.type_vars))] def instance_to_type_environment(instance: Instance) -> Dict[TypeVarId, Type]: """Given an Instance, produce the resulting type environment for type variables bound by the Instance's class definition. An Instance is a type application of a class (a TypeInfo) to its required number of type arguments. So this environment consists of the class's type variables mapped to the Instance's actual arguments. The type variables are mapped by their `id`. """ return {binder.id: arg for binder, arg in zip(instance.type.defn.type_vars, instance.args)} mypy-0.560/mypy/meet.py0000644€tŠÔÚ€2›s®0000003616613215007205021205 0ustar jukkaDROPBOX\Domain Users00000000000000from collections import OrderedDict from typing import List, Optional, cast, Tuple from mypy.join import is_similar_callables, combine_similar_callables, join_type_list from mypy.types import ( Type, AnyType, TypeVisitor, UnboundType, NoneTyp, TypeVarType, Instance, CallableType, TupleType, TypedDictType, ErasedType, TypeList, UnionType, PartialType, DeletedType, UninhabitedType, TypeType, TypeOfAny ) from mypy.subtypes import is_equivalent, is_subtype, is_protocol_implementation from mypy import experiments # TODO Describe this module. def meet_types(s: Type, t: Type) -> Type: """Return the greatest lower bound of two types.""" if isinstance(s, ErasedType): return s if isinstance(s, AnyType): return t if isinstance(s, UnionType) and not isinstance(t, UnionType): s, t = t, s return t.accept(TypeMeetVisitor(s)) def narrow_declared_type(declared: Type, narrowed: Type) -> Type: """Return the declared type narrowed down to another type.""" if declared == narrowed: return declared if isinstance(declared, UnionType): return UnionType.make_simplified_union([narrow_declared_type(x, narrowed) for x in declared.relevant_items()]) elif not is_overlapping_types(declared, narrowed, use_promotions=True): if experiments.STRICT_OPTIONAL: return UninhabitedType() else: return NoneTyp() elif isinstance(narrowed, UnionType): return UnionType.make_simplified_union([narrow_declared_type(declared, x) for x in narrowed.relevant_items()]) elif isinstance(narrowed, AnyType): return narrowed elif isinstance(declared, (Instance, TupleType)): return meet_types(declared, narrowed) elif isinstance(declared, TypeType) and isinstance(narrowed, TypeType): return TypeType.make_normalized(narrow_declared_type(declared.item, narrowed.item)) return narrowed def is_overlapping_types(t: Type, s: Type, use_promotions: bool = False) -> bool: """Can a value of type t be a value of type s, or vice versa? Note that this effectively checks against erased types, since type variables are erased at runtime and the overlapping check is based on runtime behavior. The exception is protocol types, it is not safe, but convenient and is an opt-in behavior. If use_promotions is True, also consider type promotions (int and float would only be overlapping if it's True). This does not consider multiple inheritance. For example, A and B in the following example are not considered overlapping, even though via C they can be overlapping: class A: ... class B: ... class C(A, B): ... The rationale is that this case is usually very unlikely as multiple inheritance is rare. Also, we can't reliably determine whether multiple inheritance actually occurs somewhere in a program, due to stub files hiding implementation details, dynamic loading etc. TODO: Don't consider callables always overlapping. TODO: Don't consider type variables with values always overlapping. """ # Any overlaps with everything if isinstance(t, AnyType) or isinstance(s, AnyType): return True # object overlaps with everything if (isinstance(t, Instance) and t.type.fullname() == 'builtins.object' or isinstance(s, Instance) and s.type.fullname() == 'builtins.object'): return True # Since we are effectively working with the erased types, we only # need to handle occurrences of TypeVarType at the top level. if isinstance(t, TypeVarType): t = t.erase_to_union_or_bound() if isinstance(s, TypeVarType): s = s.erase_to_union_or_bound() if isinstance(t, TypedDictType): t = t.as_anonymous().fallback if isinstance(s, TypedDictType): s = s.as_anonymous().fallback if isinstance(t, UnionType): return any(is_overlapping_types(item, s) for item in t.relevant_items()) if isinstance(s, UnionType): return any(is_overlapping_types(t, item) for item in s.relevant_items()) # We must check for TupleTypes before Instances, since Tuple[A, ...] # is an Instance tup_overlap = is_overlapping_tuples(t, s, use_promotions) if tup_overlap is not None: return tup_overlap if isinstance(t, Instance): if isinstance(s, Instance): # Consider two classes non-disjoint if one is included in the mro # of another. if use_promotions: # Consider cases like int vs float to be overlapping where # there is only a type promotion relationship but not proper # subclassing. if t.type._promote and is_overlapping_types(t.type._promote, s): return True if s.type._promote and is_overlapping_types(s.type._promote, t): return True if t.type in s.type.mro or s.type in t.type.mro: return True if t.type.is_protocol and is_protocol_implementation(s, t): return True if s.type.is_protocol and is_protocol_implementation(t, s): return True return False if isinstance(t, TypeType) and isinstance(s, TypeType): # If both types are TypeType, compare their inner types. return is_overlapping_types(t.item, s.item, use_promotions) elif isinstance(t, TypeType) or isinstance(s, TypeType): # If exactly only one of t or s is a TypeType, check if one of them # is an `object` or a `type` and otherwise assume no overlap. one = t if isinstance(t, TypeType) else s other = s if isinstance(t, TypeType) else t if isinstance(other, Instance): return other.type.fullname() in {'builtins.object', 'builtins.type'} else: return isinstance(other, CallableType) and is_subtype(other, one) if experiments.STRICT_OPTIONAL: if isinstance(t, NoneTyp) != isinstance(s, NoneTyp): # NoneTyp does not overlap with other non-Union types under strict Optional checking return False # We conservatively assume that non-instance, non-union, non-TupleType and non-TypeType types # can overlap any other types. return True def is_overlapping_tuples(t: Type, s: Type, use_promotions: bool) -> Optional[bool]: """Part of is_overlapping_types(), for tuples only""" t = adjust_tuple(t, s) or t s = adjust_tuple(s, t) or s if isinstance(t, TupleType) or isinstance(s, TupleType): if isinstance(t, TupleType) and isinstance(s, TupleType): if t.length() == s.length(): if all(is_overlapping_types(ti, si, use_promotions) for ti, si in zip(t.items, s.items)): return True # TupleType and non-tuples do not overlap return False # No tuples are involved here return None def adjust_tuple(left: Type, r: Type) -> Optional[TupleType]: """Find out if `left` is a Tuple[A, ...], and adjust its length to `right`""" if isinstance(left, Instance) and left.type.fullname() == 'builtins.tuple': n = r.length() if isinstance(r, TupleType) else 1 return TupleType([left.args[0]] * n, left) return None class TypeMeetVisitor(TypeVisitor[Type]): def __init__(self, s: Type) -> None: self.s = s def visit_unbound_type(self, t: UnboundType) -> Type: if isinstance(self.s, NoneTyp): if experiments.STRICT_OPTIONAL: return AnyType(TypeOfAny.special_form) else: return self.s elif isinstance(self.s, UninhabitedType): return self.s else: return AnyType(TypeOfAny.special_form) def visit_any(self, t: AnyType) -> Type: return self.s def visit_union_type(self, t: UnionType) -> Type: if isinstance(self.s, UnionType): meets = [] # type: List[Type] for x in t.items: for y in self.s.items: meets.append(meet_types(x, y)) else: meets = [meet_types(x, self.s) for x in t.items] return UnionType.make_simplified_union(meets) def visit_none_type(self, t: NoneTyp) -> Type: if experiments.STRICT_OPTIONAL: if isinstance(self.s, NoneTyp) or (isinstance(self.s, Instance) and self.s.type.fullname() == 'builtins.object'): return t else: return UninhabitedType() else: return t def visit_uninhabited_type(self, t: UninhabitedType) -> Type: return t def visit_deleted_type(self, t: DeletedType) -> Type: if isinstance(self.s, NoneTyp): if experiments.STRICT_OPTIONAL: return t else: return self.s elif isinstance(self.s, UninhabitedType): return self.s else: return t def visit_erased_type(self, t: ErasedType) -> Type: return self.s def visit_type_var(self, t: TypeVarType) -> Type: if isinstance(self.s, TypeVarType) and self.s.id == t.id: return self.s else: return self.default(self.s) def visit_instance(self, t: Instance) -> Type: if isinstance(self.s, Instance): si = self.s if t.type == si.type: if is_subtype(t, self.s) or is_subtype(self.s, t): # Combine type arguments. We could have used join below # equivalently. args = [] # type: List[Type] for i in range(len(t.args)): args.append(self.meet(t.args[i], si.args[i])) return Instance(t.type, args) else: if experiments.STRICT_OPTIONAL: return UninhabitedType() else: return NoneTyp() else: if is_subtype(t, self.s): return t elif is_subtype(self.s, t): # See also above comment. return self.s else: if experiments.STRICT_OPTIONAL: return UninhabitedType() else: return NoneTyp() elif isinstance(self.s, TypeType): return meet_types(t, self.s) elif isinstance(self.s, TupleType): return meet_types(t, self.s) else: return self.default(self.s) def visit_callable_type(self, t: CallableType) -> Type: if isinstance(self.s, CallableType) and is_similar_callables(t, self.s): if is_equivalent(t, self.s): return combine_similar_callables(t, self.s) result = meet_similar_callables(t, self.s) if isinstance(result.ret_type, UninhabitedType): # Return a plain None or instead of a weird function. return self.default(self.s) return result else: return self.default(self.s) def visit_tuple_type(self, t: TupleType) -> Type: if isinstance(self.s, TupleType) and self.s.length() == t.length(): items = [] # type: List[Type] for i in range(t.length()): items.append(self.meet(t.items[i], self.s.items[i])) # TODO: What if the fallbacks are different? return TupleType(items, t.fallback) # meet(Tuple[t1, t2, <...>], Tuple[s, ...]) == Tuple[meet(t1, s), meet(t2, s), <...>]. elif (isinstance(self.s, Instance) and self.s.type.fullname() == 'builtins.tuple' and self.s.args): return t.copy_modified(items=[meet_types(it, self.s.args[0]) for it in t.items]) elif (isinstance(self.s, Instance) and t.fallback.type == self.s.type): # Uh oh, a broken named tuple type (https://github.com/python/mypy/issues/3016). # Do something reasonable until that bug is fixed. return t else: return self.default(self.s) def visit_typeddict_type(self, t: TypedDictType) -> Type: if isinstance(self.s, TypedDictType): for (name, l, r) in self.s.zip(t): if (not is_equivalent(l, r) or (name in t.required_keys) != (name in self.s.required_keys)): return self.default(self.s) item_list = [] # type: List[Tuple[str, Type]] for (item_name, s_item_type, t_item_type) in self.s.zipall(t): if s_item_type is not None: item_list.append((item_name, s_item_type)) else: # at least one of s_item_type and t_item_type is not None assert t_item_type is not None item_list.append((item_name, t_item_type)) items = OrderedDict(item_list) mapping_value_type = join_type_list(list(items.values())) fallback = self.s.create_anonymous_fallback(value_type=mapping_value_type) required_keys = t.required_keys | self.s.required_keys return TypedDictType(items, required_keys, fallback) else: return self.default(self.s) def visit_partial_type(self, t: PartialType) -> Type: # We can't determine the meet of partial types. We should never get here. assert False, 'Internal error' def visit_type_type(self, t: TypeType) -> Type: if isinstance(self.s, TypeType): typ = self.meet(t.item, self.s.item) if not isinstance(typ, NoneTyp): typ = TypeType.make_normalized(typ, line=t.line) return typ elif isinstance(self.s, Instance) and self.s.type.fullname() == 'builtins.type': return t else: return self.default(self.s) def meet(self, s: Type, t: Type) -> Type: return meet_types(s, t) def default(self, typ: Type) -> Type: if isinstance(typ, UnboundType): return AnyType(TypeOfAny.special_form) else: if experiments.STRICT_OPTIONAL: return UninhabitedType() else: return NoneTyp() def meet_similar_callables(t: CallableType, s: CallableType) -> CallableType: from mypy.join import join_types arg_types = [] # type: List[Type] for i in range(len(t.arg_types)): arg_types.append(join_types(t.arg_types[i], s.arg_types[i])) # TODO in combine_similar_callables also applies here (names and kinds) # The fallback type can be either 'function' or 'type'. The result should have 'function' as # fallback only if both operands have it as 'function'. if t.fallback.type.fullname() != 'builtins.function': fallback = t.fallback else: fallback = s.fallback return t.copy_modified(arg_types=arg_types, ret_type=meet_types(t.ret_type, s.ret_type), fallback=fallback, name=None) mypy-0.560/mypy/messages.py0000644€tŠÔÚ€2›s®0000021104713215007205022053 0ustar jukkaDROPBOX\Domain Users00000000000000"""Facilities and constants for generating error messages during type checking. Don't add any non-trivial message construction logic to the type checker, as it can compromise clarity and make messages less consistent. Add such logic to this module instead. Literal messages used in multiple places should also be defined as constants in this module so they won't get out of sync. Historically we tried to avoid all message string literals in the type checker but we are moving away from this convention. """ import re import difflib from typing import cast, List, Dict, Any, Sequence, Iterable, Tuple, Set, Optional, Union from mypy.erasetype import erase_type from mypy.errors import Errors from mypy.types import ( Type, CallableType, Instance, TypeVarType, TupleType, TypedDictType, UnionType, NoneTyp, AnyType, Overloaded, FunctionLike, DeletedType, TypeType, UninhabitedType, TypeOfAny, ForwardRef, UnboundType ) from mypy.nodes import ( TypeInfo, Context, MypyFile, op_methods, FuncDef, reverse_type_aliases, ARG_POS, ARG_OPT, ARG_NAMED, ARG_NAMED_OPT, ARG_STAR, ARG_STAR2, ReturnStmt, NameExpr, Var, CONTRAVARIANT, COVARIANT, ) # Constants that represent simple type checker error message, i.e. messages # that do not have any parameters. NO_RETURN_VALUE_EXPECTED = 'No return value expected' MISSING_RETURN_STATEMENT = 'Missing return statement' INVALID_IMPLICIT_RETURN = 'Implicit return in function which does not return' INCOMPATIBLE_RETURN_VALUE_TYPE = 'Incompatible return value type' RETURN_VALUE_EXPECTED = 'Return value expected' NO_RETURN_EXPECTED = 'Return statement in function which does not return' INVALID_EXCEPTION = 'Exception must be derived from BaseException' INVALID_EXCEPTION_TYPE = 'Exception type must be derived from BaseException' INVALID_RETURN_TYPE_FOR_GENERATOR = \ 'The return type of a generator function should be "Generator" or one of its supertypes' INVALID_RETURN_TYPE_FOR_ASYNC_GENERATOR = \ 'The return type of an async generator function should be "AsyncGenerator" or one of its ' \ 'supertypes' INVALID_GENERATOR_RETURN_ITEM_TYPE = \ 'The return type of a generator function must be None in its third type parameter in Python 2' YIELD_VALUE_EXPECTED = 'Yield value expected' INCOMPATIBLE_TYPES = 'Incompatible types' INCOMPATIBLE_TYPES_IN_ASSIGNMENT = 'Incompatible types in assignment' INCOMPATIBLE_REDEFINITION = 'Incompatible redefinition' INCOMPATIBLE_TYPES_IN_AWAIT = 'Incompatible types in "await"' INCOMPATIBLE_TYPES_IN_ASYNC_WITH_AENTER = 'Incompatible types in "async with" for "__aenter__"' INCOMPATIBLE_TYPES_IN_ASYNC_WITH_AEXIT = 'Incompatible types in "async with" for "__aexit__"' INCOMPATIBLE_TYPES_IN_ASYNC_FOR = 'Incompatible types in "async for"' INCOMPATIBLE_TYPES_IN_YIELD = 'Incompatible types in "yield"' INCOMPATIBLE_TYPES_IN_YIELD_FROM = 'Incompatible types in "yield from"' INCOMPATIBLE_TYPES_IN_STR_INTERPOLATION = 'Incompatible types in string interpolation' MUST_HAVE_NONE_RETURN_TYPE = 'The return type of "{}" must be None' INVALID_TUPLE_INDEX_TYPE = 'Invalid tuple index type' TUPLE_INDEX_OUT_OF_RANGE = 'Tuple index out of range' NEED_ANNOTATION_FOR_VAR = 'Need type annotation for variable' ITERABLE_EXPECTED = 'Iterable expected' ASYNC_ITERABLE_EXPECTED = 'AsyncIterable expected' INVALID_SLICE_INDEX = 'Slice index must be an integer or None' CANNOT_INFER_LAMBDA_TYPE = 'Cannot infer type of lambda' CANNOT_INFER_ITEM_TYPE = 'Cannot infer iterable item type' CANNOT_ACCESS_INIT = 'Cannot access "__init__" directly' CANNOT_ASSIGN_TO_METHOD = 'Cannot assign to a method' CANNOT_ASSIGN_TO_TYPE = 'Cannot assign to a type' INCONSISTENT_ABSTRACT_OVERLOAD = \ 'Overloaded method has both abstract and non-abstract variants' READ_ONLY_PROPERTY_OVERRIDES_READ_WRITE = \ 'Read-only property cannot override read-write property' FORMAT_REQUIRES_MAPPING = 'Format requires a mapping' RETURN_TYPE_CANNOT_BE_CONTRAVARIANT = "Cannot use a contravariant type variable as return type" FUNCTION_PARAMETER_CANNOT_BE_COVARIANT = "Cannot use a covariant type variable as a parameter" INCOMPATIBLE_IMPORT_OF = "Incompatible import of" FUNCTION_TYPE_EXPECTED = "Function is missing a type annotation" ONLY_CLASS_APPLICATION = "Type application is only supported for generic classes" RETURN_TYPE_EXPECTED = "Function is missing a return type annotation" ARGUMENT_TYPE_EXPECTED = "Function is missing a type annotation for one or more arguments" KEYWORD_ARGUMENT_REQUIRES_STR_KEY_TYPE = \ 'Keyword argument only valid with "str" key type in call to "dict"' ALL_MUST_BE_SEQ_STR = 'Type of __all__ must be {}, not {}' INVALID_TYPEDDICT_ARGS = \ 'Expected keyword arguments, {...}, or dict(...) in TypedDict constructor' TYPEDDICT_KEY_MUST_BE_STRING_LITERAL = \ 'Expected TypedDict key to be string literal' MALFORMED_ASSERT = 'Assertion is always true, perhaps remove parentheses?' NON_BOOLEAN_IN_CONDITIONAL = 'Condition must be a boolean' DUPLICATE_TYPE_SIGNATURES = 'Function has duplicate type signatures' GENERIC_INSTANCE_VAR_CLASS_ACCESS = 'Access to generic instance variables via class is ambiguous' CANNOT_ISINSTANCE_TYPEDDICT = 'Cannot use isinstance() with a TypedDict type' CANNOT_ISINSTANCE_NEWTYPE = 'Cannot use isinstance() with a NewType type' BARE_GENERIC = 'Missing type parameters for generic type' IMPLICIT_GENERIC_ANY_BUILTIN = 'Implicit generic "Any". Use \'{}\' and specify generic parameters' INCOMPATIBLE_TYPEVAR_VALUE = 'Value of type variable "{}" of {} cannot be {}' UNSUPPORTED_ARGUMENT_2_FOR_SUPER = 'Unsupported argument 2 for "super"' ARG_CONSTRUCTOR_NAMES = { ARG_POS: "Arg", ARG_OPT: "DefaultArg", ARG_NAMED: "NamedArg", ARG_NAMED_OPT: "DefaultNamedArg", ARG_STAR: "VarArg", ARG_STAR2: "KwArg", } class MessageBuilder: """Helper class for reporting type checker error messages with parameters. The methods of this class need to be provided with the context within a file; the errors member manages the wider context. IDEA: Support a 'verbose mode' that includes full information about types in error messages and that may otherwise produce more detailed error messages. """ # Report errors using this instance. It knows about the current file and # import context. errors = None # type: Errors modules = None # type: Dict[str, MypyFile] # Number of times errors have been disabled. disable_count = 0 # Hack to deduplicate error messages from union types disable_type_names = 0 def __init__(self, errors: Errors, modules: Dict[str, MypyFile]) -> None: self.errors = errors self.modules = modules self.disable_count = 0 self.disable_type_names = 0 # # Helpers # def copy(self) -> 'MessageBuilder': new = MessageBuilder(self.errors.copy(), self.modules) new.disable_count = self.disable_count new.disable_type_names = self.disable_type_names return new def add_errors(self, messages: 'MessageBuilder') -> None: """Add errors in messages to this builder.""" if self.disable_count <= 0: for info in messages.errors.error_info: self.errors.add_error_info(info) def disable_errors(self) -> None: self.disable_count += 1 def enable_errors(self) -> None: self.disable_count -= 1 def is_errors(self) -> bool: return self.errors.is_errors() def report(self, msg: str, context: Optional[Context], severity: str, file: Optional[str] = None, origin: Optional[Context] = None, offset: int = 0) -> None: """Report an error or note (unless disabled).""" if self.disable_count <= 0: self.errors.report(context.get_line() if context else -1, context.get_column() if context else -1, msg.strip(), severity=severity, file=file, offset=offset, origin_line=origin.get_line() if origin else None) def fail(self, msg: str, context: Optional[Context], file: Optional[str] = None, origin: Optional[Context] = None) -> None: """Report an error message (unless disabled).""" self.report(msg, context, 'error', file=file, origin=origin) def note(self, msg: str, context: Context, file: Optional[str] = None, origin: Optional[Context] = None, offset: int = 0) -> None: """Report a note (unless disabled).""" self.report(msg, context, 'note', file=file, origin=origin, offset=offset) def warn(self, msg: str, context: Context, file: Optional[str] = None, origin: Optional[Context] = None) -> None: """Report a warning message (unless disabled).""" self.report(msg, context, 'warning', file=file, origin=origin) def quote_type_string(self, type_string: str) -> str: """Quotes a type representation for use in messages.""" no_quote_regex = r'^<(tuple|union): \d+ items>$' if (type_string in ['Module', 'overloaded function', '', ''] or re.match(no_quote_regex, type_string) is not None or type_string.endswith('?')): # Messages are easier to read if these aren't quoted. We use a # regex to match strings with variable contents. return type_string return '"{}"'.format(type_string) def format(self, typ: Type, verbosity: int = 0) -> str: """ Convert a type to a relatively short string suitable for error messages. This method returns a string appropriate for unmodified use in error messages; this means that it will be quoted in most cases. If modification of the formatted string is required, callers should use .format_bare. """ return self.quote_type_string(self.format_bare(typ, verbosity)) def format_bare(self, typ: Type, verbosity: int = 0) -> str: """ Convert a type to a relatively short string suitable for error messages. This method will return an unquoted string. If a caller doesn't need to perform post-processing on the string output, .format should be used instead. (The caller may want to use .quote_type_string after processing has happened, to maintain consistent quoting in messages.) """ if isinstance(typ, Instance): itype = typ # Get the short name of the type. if itype.type.fullname() in ('types.ModuleType', '_importlib_modulespec.ModuleType'): # Make some common error messages simpler and tidier. return 'Module' if verbosity >= 2: base_str = itype.type.fullname() else: base_str = itype.type.name() if itype.args == []: # No type arguments, just return the type name return base_str elif itype.type.fullname() == 'builtins.tuple': item_type_str = self.format_bare(itype.args[0]) return 'Tuple[{}, ...]'.format(item_type_str) elif itype.type.fullname() in reverse_type_aliases: alias = reverse_type_aliases[itype.type.fullname()] alias = alias.split('.')[-1] items = [self.format_bare(arg) for arg in itype.args] return '{}[{}]'.format(alias, ', '.join(items)) else: # There are type arguments. Convert the arguments to strings. # If the result is too long, replace arguments with [...]. a = [] # type: List[str] for arg in itype.args: a.append(self.format_bare(arg)) s = ', '.join(a) if len((base_str + s)) < 150: return '{}[{}]'.format(base_str, s) else: return '{}[...]'.format(base_str) elif isinstance(typ, TypeVarType): # This is similar to non-generic instance types. return typ.name elif isinstance(typ, TupleType): # Prefer the name of the fallback class (if not tuple), as it's more informative. if typ.fallback.type.fullname() != 'builtins.tuple': return self.format_bare(typ.fallback) items = [] for t in typ.items: items.append(self.format_bare(t)) s = 'Tuple[{}]'.format(', '.join(items)) if len(s) < 400: return s else: return ''.format(len(items)) elif isinstance(typ, TypedDictType): # If the TypedDictType is named, return the name if not typ.is_anonymous(): return self.format_bare(typ.fallback) items = [] for (item_name, item_type) in typ.items.items(): modifier = '' if item_name in typ.required_keys else '?' items.append('{!r}{}: {}'.format(item_name, modifier, self.format_bare(item_type))) s = 'TypedDict({{{}}})'.format(', '.join(items)) return s elif isinstance(typ, UnionType): # Only print Unions as Optionals if the Optional wouldn't have to contain another Union print_as_optional = (len(typ.items) - sum(isinstance(t, NoneTyp) for t in typ.items) == 1) if print_as_optional: rest = [t for t in typ.items if not isinstance(t, NoneTyp)] return 'Optional[{}]'.format(self.format_bare(rest[0])) else: items = [] for t in typ.items: items.append(self.format_bare(t)) s = 'Union[{}]'.format(', '.join(items)) if len(s) < 400: return s else: return ''.format(len(items)) elif isinstance(typ, NoneTyp): return 'None' elif isinstance(typ, AnyType): return 'Any' elif isinstance(typ, DeletedType): return '' elif isinstance(typ, UninhabitedType): if typ.is_noreturn: return 'NoReturn' else: return '' elif isinstance(typ, TypeType): return 'Type[{}]'.format(self.format_bare(typ.item, verbosity)) elif isinstance(typ, ForwardRef): # may appear in semanal.py if typ.resolved: return self.format_bare(typ.resolved, verbosity) else: return self.format_bare(typ.unbound, verbosity) elif isinstance(typ, FunctionLike): func = typ if func.is_type_obj(): # The type of a type object type can be derived from the # return type (this always works). return self.format_bare( TypeType.make_normalized( erase_type(func.items()[0].ret_type)), verbosity) elif isinstance(func, CallableType): return_type = self.format_bare(func.ret_type) if func.is_ellipsis_args: return 'Callable[..., {}]'.format(return_type) arg_strings = [] for arg_name, arg_type, arg_kind in zip( func.arg_names, func.arg_types, func.arg_kinds): if (arg_kind == ARG_POS and arg_name is None or verbosity == 0 and arg_kind in (ARG_POS, ARG_OPT)): arg_strings.append( self.format_bare( arg_type, verbosity = max(verbosity - 1, 0))) else: constructor = ARG_CONSTRUCTOR_NAMES[arg_kind] if arg_kind in (ARG_STAR, ARG_STAR2) or arg_name is None: arg_strings.append("{}({})".format( constructor, self.format_bare(arg_type))) else: arg_strings.append("{}({}, {})".format( constructor, self.format_bare(arg_type), repr(arg_name))) return 'Callable[[{}], {}]'.format(", ".join(arg_strings), return_type) else: # Use a simple representation for function types; proper # function types may result in long and difficult-to-read # error messages. return 'overloaded function' elif isinstance(typ, UnboundType): return str(typ) elif typ is None: raise RuntimeError('Type is None') else: # Default case; we simply have to return something meaningful here. return 'object' def format_distinctly(self, type1: Type, type2: Type, bare: bool = False) -> Tuple[str, str]: """Jointly format a pair of types to distinct strings. Increase the verbosity of the type strings until they become distinct. By default, the returned strings are created using .format() and will be quoted accordingly. If ``bare`` is True, the returned strings will not be quoted; callers who need to do post-processing of the strings before quoting them (such as prepending * or **) should use this. """ if bare: format_method = self.format_bare else: format_method = self.format verbosity = 0 for verbosity in range(3): str1 = format_method(type1, verbosity=verbosity) str2 = format_method(type2, verbosity=verbosity) if str1 != str2: return (str1, str2) return (str1, str2) # # Specific operations # # The following operations are for generating specific error messages. They # get some information as arguments, and they build an error message based # on them. def has_no_attr(self, original_type: Type, typ: Type, member: str, context: Context) -> Type: """Report a missing or non-accessible member. original_type is the top-level type on which the error occurred. typ is the actual type that is missing the member. These can be different, e.g., in a union, original_type will be the union and typ will be the specific item in the union that does not have the member attribute. If member corresponds to an operator, use the corresponding operator name in the messages. Return type Any. """ if (isinstance(original_type, Instance) and original_type.type.has_readable_member(member)): self.fail('Member "{}" is not assignable'.format(member), context) elif member == '__contains__': self.fail('Unsupported right operand type for in ({})'.format( self.format(original_type)), context) elif member in op_methods.values(): # Access to a binary operator member (e.g. _add). This case does # not handle indexing operations. for op, method in op_methods.items(): if method == member: self.unsupported_left_operand(op, original_type, context) break elif member == '__neg__': self.fail('Unsupported operand type for unary - ({})'.format( self.format(original_type)), context) elif member == '__pos__': self.fail('Unsupported operand type for unary + ({})'.format( self.format(original_type)), context) elif member == '__invert__': self.fail('Unsupported operand type for ~ ({})'.format( self.format(original_type)), context) elif member == '__getitem__': # Indexed get. # TODO: Fix this consistently in self.format if isinstance(original_type, CallableType) and original_type.is_type_obj(): self.fail('The type {} is not generic and not indexable'.format( self.format(original_type)), context) else: self.fail('Value of type {} is not indexable'.format( self.format(original_type)), context) elif member == '__setitem__': # Indexed set. self.fail('Unsupported target for indexed assignment', context) elif member == '__call__': if isinstance(original_type, Instance) and \ (original_type.type.fullname() == 'builtins.function'): # "'function' not callable" is a confusing error message. # Explain that the problem is that the type of the function is not known. self.fail('Cannot call function of unknown type', context) else: self.fail('{} not callable'.format(self.format(original_type)), context) else: # The non-special case: a missing ordinary attribute. if not self.disable_type_names: failed = False if isinstance(original_type, Instance) and original_type.type.names: alternatives = set(original_type.type.names.keys()) matches = [m for m in COMMON_MISTAKES.get(member, []) if m in alternatives] matches.extend(best_matches(member, alternatives)[:3]) if matches: self.fail('{} has no attribute "{}"; maybe {}?'.format( self.format(original_type), member, pretty_or(matches)), context) failed = True if not failed: self.fail('{} has no attribute "{}"'.format(self.format(original_type), member), context) elif isinstance(original_type, UnionType): # The checker passes "object" in lieu of "None" for attribute # checks, so we manually convert it back. typ_format = self.format(typ) if typ_format == '"object"' and \ any(type(item) == NoneTyp for item in original_type.items): typ_format = '"None"' self.fail('Item {} of {} has no attribute "{}"'.format( typ_format, self.format(original_type), member), context) return AnyType(TypeOfAny.from_error) def unsupported_operand_types(self, op: str, left_type: Any, right_type: Any, context: Context) -> None: """Report unsupported operand types for a binary operation. Types can be Type objects or strings. """ left_str = '' if isinstance(left_type, str): left_str = left_type else: left_str = self.format(left_type) right_str = '' if isinstance(right_type, str): right_str = right_type else: right_str = self.format(right_type) if self.disable_type_names: msg = 'Unsupported operand types for {} (likely involving Union)'.format(op) else: msg = 'Unsupported operand types for {} ({} and {})'.format( op, left_str, right_str) self.fail(msg, context) def unsupported_left_operand(self, op: str, typ: Type, context: Context) -> None: if self.disable_type_names: msg = 'Unsupported left operand type for {} (some union)'.format(op) else: msg = 'Unsupported left operand type for {} ({})'.format( op, self.format(typ)) self.fail(msg, context) def not_callable(self, typ: Type, context: Context) -> Type: self.fail('{} not callable'.format(self.format(typ)), context) return AnyType(TypeOfAny.from_error) def untyped_function_call(self, callee: CallableType, context: Context) -> Type: name = callable_name(callee) or '(unknown)' self.fail('Call to untyped function {} in typed context'.format(name), context) return AnyType(TypeOfAny.from_error) def incompatible_argument(self, n: int, m: int, callee: CallableType, arg_type: Type, arg_kind: int, context: Context) -> None: """Report an error about an incompatible argument type. The argument type is arg_type, argument number is n and the callee type is 'callee'. If the callee represents a method that corresponds to an operator, use the corresponding operator name in the messages. """ target = '' callee_name = callable_name(callee) if callee_name is not None: name = callee_name if callee.bound_args and callee.bound_args[0] is not None: base = self.format(callee.bound_args[0]) else: base = extract_type(name) for op, method in op_methods.items(): for variant in method, '__r' + method[2:]: # FIX: do not rely on textual formatting if name.startswith('"{}" of'.format(variant)): if op == 'in' or variant != method: # Reversed order of base/argument. self.unsupported_operand_types(op, arg_type, base, context) else: self.unsupported_operand_types(op, base, arg_type, context) return if name.startswith('"__getitem__" of'): self.invalid_index_type(arg_type, callee.arg_types[n - 1], base, context) return if name.startswith('"__setitem__" of'): if n == 1: self.invalid_index_type(arg_type, callee.arg_types[n - 1], base, context) else: msg = '{} (expression has type {}, target has type {})' arg_type_str, callee_type_str = self.format_distinctly(arg_type, callee.arg_types[n - 1]) self.fail(msg.format(INCOMPATIBLE_TYPES_IN_ASSIGNMENT, arg_type_str, callee_type_str), context) return target = 'to {} '.format(name) msg = '' notes = [] # type: List[str] if callee_name == '': name = callee_name[1:-1] n -= 1 actual_type_str, expected_type_str = self.format_distinctly(arg_type, callee.arg_types[0]) msg = '{} item {} has incompatible type {}; expected {}'.format( name.title(), n, actual_type_str, expected_type_str) elif callee_name == '': name = callee_name[1:-1] n -= 1 key_type, value_type = cast(TupleType, arg_type).items expected_key_type, expected_value_type = cast(TupleType, callee.arg_types[0]).items # don't increase verbosity unless there is need to do so from mypy.subtypes import is_subtype if is_subtype(key_type, expected_key_type): key_type_str = self.format(key_type) expected_key_type_str = self.format(expected_key_type) else: key_type_str, expected_key_type_str = self.format_distinctly( key_type, expected_key_type) if is_subtype(value_type, expected_value_type): value_type_str = self.format(value_type) expected_value_type_str = self.format(expected_value_type) else: value_type_str, expected_value_type_str = self.format_distinctly( value_type, expected_value_type) msg = '{} entry {} has incompatible type {}: {}; expected {}: {}'.format( name.title(), n, key_type_str, value_type_str, expected_key_type_str, expected_value_type_str) elif callee_name == '': actual_type_str, expected_type_str = map(strip_quotes, self.format_distinctly(arg_type, callee.arg_types[0])) msg = 'List comprehension has incompatible type List[{}]; expected List[{}]'.format( actual_type_str, expected_type_str) elif callee_name == '': actual_type_str, expected_type_str = map(strip_quotes, self.format_distinctly(arg_type, callee.arg_types[0])) msg = 'Set comprehension has incompatible type Set[{}]; expected Set[{}]'.format( actual_type_str, expected_type_str) elif callee_name == '': actual_type_str, expected_type_str = self.format_distinctly(arg_type, callee.arg_types[n - 1]) msg = ('{} expression in dictionary comprehension has incompatible type {}; ' 'expected type {}').format( 'Key' if n == 1 else 'Value', actual_type_str, expected_type_str) elif callee_name == '': actual_type_str, expected_type_str = self.format_distinctly(arg_type, callee.arg_types[0]) msg = 'Generator has incompatible item type {}; expected {}'.format( actual_type_str, expected_type_str) else: try: expected_type = callee.arg_types[m - 1] except IndexError: # Varargs callees expected_type = callee.arg_types[-1] arg_type_str, expected_type_str = self.format_distinctly( arg_type, expected_type, bare=True) if arg_kind == ARG_STAR: arg_type_str = '*' + arg_type_str elif arg_kind == ARG_STAR2: arg_type_str = '**' + arg_type_str msg = 'Argument {} {}has incompatible type {}; expected {}'.format( n, target, self.quote_type_string(arg_type_str), self.quote_type_string(expected_type_str)) if isinstance(arg_type, Instance) and isinstance(expected_type, Instance): notes = append_invariance_notes(notes, arg_type, expected_type) self.fail(msg, context) if notes: for note_msg in notes: self.note(note_msg, context) def invalid_index_type(self, index_type: Type, expected_type: Type, base_str: str, context: Context) -> None: self.fail('Invalid index type {} for {}; expected type {}'.format( self.format(index_type), base_str, self.format(expected_type)), context) def too_few_arguments(self, callee: CallableType, context: Context, argument_names: Optional[Sequence[Optional[str]]]) -> None: if (argument_names is not None and not all(k is None for k in argument_names) and len(argument_names) >= 1): diff = [k for k in callee.arg_names if k not in argument_names] if len(diff) == 1: msg = 'Missing positional argument' else: msg = 'Missing positional arguments' callee_name = callable_name(callee) if callee_name is not None and diff and all(d is not None for d in diff): args = '", "'.join(cast(List[str], diff)) msg += ' "{}" in call to {}'.format(args, callee_name) else: msg = 'Too few arguments' + for_function(callee) self.fail(msg, context) def missing_named_argument(self, callee: CallableType, context: Context, name: str) -> None: msg = 'Missing named argument "{}"'.format(name) + for_function(callee) self.fail(msg, context) def too_many_arguments(self, callee: CallableType, context: Context) -> None: msg = 'Too many arguments' + for_function(callee) self.fail(msg, context) def too_many_positional_arguments(self, callee: CallableType, context: Context) -> None: msg = 'Too many positional arguments' + for_function(callee) self.fail(msg, context) def unexpected_keyword_argument(self, callee: CallableType, name: str, context: Context) -> None: msg = 'Unexpected keyword argument "{}"'.format(name) + for_function(callee) self.fail(msg, context) module = find_defining_module(self.modules, callee) if module: assert callee.definition is not None self.note('{} defined here'.format(callable_name(callee)), callee.definition, file=module.path, origin=context) def duplicate_argument_value(self, callee: CallableType, index: int, context: Context) -> None: self.fail('{} gets multiple values for keyword argument "{}"'. format(callable_name(callee) or 'Function', callee.arg_names[index]), context) def does_not_return_value(self, callee_type: Optional[Type], context: Context) -> None: """Report an error about use of an unusable type.""" name = None # type: Optional[str] if isinstance(callee_type, FunctionLike): name = callable_name(callee_type) if name is not None: self.fail('{} does not return a value'.format(capitalize(name)), context) else: self.fail('Function does not return a value', context) def deleted_as_rvalue(self, typ: DeletedType, context: Context) -> None: """Report an error about using an deleted type as an rvalue.""" if typ.source is None: s = "" else: s = " '{}'".format(typ.source) self.fail('Trying to read deleted variable{}'.format(s), context) def deleted_as_lvalue(self, typ: DeletedType, context: Context) -> None: """Report an error about using an deleted type as an lvalue. Currently, this only occurs when trying to assign to an exception variable outside the local except: blocks. """ if typ.source is None: s = "" else: s = " '{}'".format(typ.source) self.fail('Assignment to variable{} outside except: block'.format(s), context) def no_variant_matches_arguments(self, overload: Overloaded, arg_types: List[Type], context: Context) -> None: name = callable_name(overload) if name: self.fail('No overload variant of {} matches argument types {}' .format(name, arg_types), context) else: self.fail('No overload variant matches argument types {}'.format(arg_types), context) def wrong_number_values_to_unpack(self, provided: int, expected: int, context: Context) -> None: if provided < expected: if provided == 1: self.fail('Need more than 1 value to unpack ({} expected)'.format(expected), context) else: self.fail('Need more than {} values to unpack ({} expected)'.format( provided, expected), context) elif provided > expected: self.fail('Too many values to unpack ({} expected, {} provided)'.format( expected, provided), context) def type_not_iterable(self, type: Type, context: Context) -> None: self.fail('\'{}\' object is not iterable'.format(type), context) def incompatible_operator_assignment(self, op: str, context: Context) -> None: self.fail('Result type of {} incompatible in assignment'.format(op), context) def signature_incompatible_with_supertype( self, name: str, name_in_super: str, supertype: str, context: Context) -> None: target = self.override_target(name, name_in_super, supertype) self.fail('Signature of "{}" incompatible with {}'.format( name, target), context) def argument_incompatible_with_supertype( self, arg_num: int, name: str, name_in_supertype: str, supertype: str, context: Context) -> None: target = self.override_target(name, name_in_supertype, supertype) self.fail('Argument {} of "{}" incompatible with {}' .format(arg_num, name, target), context) def return_type_incompatible_with_supertype( self, name: str, name_in_supertype: str, supertype: str, context: Context) -> None: target = self.override_target(name, name_in_supertype, supertype) self.fail('Return type of "{}" incompatible with {}' .format(name, target), context) def override_target(self, name: str, name_in_super: str, supertype: str) -> str: target = 'supertype "{}"'.format(supertype) if name_in_super != name: target = '"{}" of {}'.format(name_in_super, target) return target def incompatible_type_application(self, expected_arg_count: int, actual_arg_count: int, context: Context) -> None: if expected_arg_count == 0: self.fail('Type application targets a non-generic function or class', context) elif actual_arg_count > expected_arg_count: self.fail('Type application has too many types ({} expected)' .format(expected_arg_count), context) else: self.fail('Type application has too few types ({} expected)' .format(expected_arg_count), context) def could_not_infer_type_arguments(self, callee_type: CallableType, n: int, context: Context) -> None: callee_name = callable_name(callee_type) if callee_name is not None and n > 0: self.fail('Cannot infer type argument {} of {}'.format(n, callee_name), context) else: self.fail('Cannot infer function type argument', context) def invalid_var_arg(self, typ: Type, context: Context) -> None: self.fail('List or tuple expected as variable arguments', context) def invalid_keyword_var_arg(self, typ: Type, is_mapping: bool, context: Context) -> None: if isinstance(typ, Instance) and is_mapping: self.fail('Keywords must be strings', context) else: suffix = '' if isinstance(typ, Instance): suffix = ', not {}'.format(self.format(typ)) self.fail( 'Argument after ** must be a mapping{}'.format(suffix), context) def undefined_in_superclass(self, member: str, context: Context) -> None: self.fail('"{}" undefined in superclass'.format(member), context) def first_argument_for_super_must_be_type(self, actual: Type, context: Context) -> None: if isinstance(actual, Instance): # Don't include type of instance, because it can look confusingly like a type # object. type_str = 'a non-type instance' else: type_str = self.format(actual) self.fail('Argument 1 for "super" must be a type object; got {}'.format(type_str), context) def too_few_string_formatting_arguments(self, context: Context) -> None: self.fail('Not enough arguments for format string', context) def too_many_string_formatting_arguments(self, context: Context) -> None: self.fail('Not all arguments converted during string formatting', context) def unsupported_placeholder(self, placeholder: str, context: Context) -> None: self.fail('Unsupported format character \'%s\'' % placeholder, context) def string_interpolation_with_star_and_key(self, context: Context) -> None: self.fail('String interpolation contains both stars and mapping keys', context) def requires_int_or_char(self, context: Context) -> None: self.fail('%c requires int or char', context) def key_not_in_mapping(self, key: str, context: Context) -> None: self.fail('Key \'%s\' not found in mapping' % key, context) def string_interpolation_mixing_key_and_non_keys(self, context: Context) -> None: self.fail('String interpolation mixes specifier with and without mapping keys', context) def cannot_determine_type(self, name: str, context: Context) -> None: self.fail("Cannot determine type of '%s'" % name, context) def cannot_determine_type_in_base(self, name: str, base: str, context: Context) -> None: self.fail("Cannot determine type of '%s' in base class '%s'" % (name, base), context) def no_formal_self(self, name: str, item: CallableType, context: Context) -> None: self.fail('Attribute function "%s" with type %s does not accept self argument' % (name, self.format(item)), context) def incompatible_self_argument(self, name: str, arg: Type, sig: CallableType, is_classmethod: bool, context: Context) -> None: kind = 'class attribute function' if is_classmethod else 'attribute function' self.fail('Invalid self argument %s to %s "%s" with type %s' % (self.format(arg), kind, name, self.format(sig)), context) def incompatible_conditional_function_def(self, defn: FuncDef) -> None: self.fail('All conditional function variants must have identical ' 'signatures', defn) def cannot_instantiate_abstract_class(self, class_name: str, abstract_attributes: List[str], context: Context) -> None: attrs = format_string_list("'%s'" % a for a in abstract_attributes) self.fail("Cannot instantiate abstract class '%s' with abstract " "attribute%s %s" % (class_name, plural_s(abstract_attributes), attrs), context) def base_class_definitions_incompatible(self, name: str, base1: TypeInfo, base2: TypeInfo, context: Context) -> None: self.fail('Definition of "{}" in base class "{}" is incompatible ' 'with definition in base class "{}"'.format( name, base1.name(), base2.name()), context) def cant_assign_to_method(self, context: Context) -> None: self.fail(CANNOT_ASSIGN_TO_METHOD, context) def cant_assign_to_classvar(self, name: str, context: Context) -> None: self.fail('Cannot assign to class variable "%s" via instance' % name, context) def read_only_property(self, name: str, type: TypeInfo, context: Context) -> None: self.fail('Property "{}" defined in "{}" is read-only'.format( name, type.name()), context) def incompatible_typevar_value(self, callee: CallableType, typ: Type, typevar_name: str, context: Context) -> None: self.fail(INCOMPATIBLE_TYPEVAR_VALUE.format(typevar_name, callable_name(callee) or 'function', self.format(typ)), context) def overloaded_signatures_overlap(self, index1: int, index2: int, context: Context) -> None: self.fail('Overloaded function signatures {} and {} overlap with ' 'incompatible return types'.format(index1, index2), context) def overloaded_signatures_arg_specific(self, index1: int, context: Context) -> None: self.fail('Overloaded function implementation does not accept all possible arguments ' 'of signature {}'.format(index1), context) def overloaded_signatures_ret_specific(self, index1: int, context: Context) -> None: self.fail('Overloaded function implementation cannot produce return type ' 'of signature {}'.format(index1), context) def operator_method_signatures_overlap( self, reverse_class: str, reverse_method: str, forward_class: str, forward_method: str, context: Context) -> None: self.fail('Signatures of "{}" of "{}" and "{}" of "{}" ' 'are unsafely overlapping'.format( reverse_method, reverse_class, forward_method, forward_class), context) def forward_operator_not_callable( self, forward_method: str, context: Context) -> None: self.fail('Forward operator "{}" is not callable'.format( forward_method), context) def signatures_incompatible(self, method: str, other_method: str, context: Context) -> None: self.fail('Signatures of "{}" and "{}" are incompatible'.format( method, other_method), context) def yield_from_invalid_operand_type(self, expr: Type, context: Context) -> Type: text = self.format(expr) if self.format(expr) != 'object' else expr self.fail('"yield from" can\'t be applied to {}'.format(text), context) return AnyType(TypeOfAny.from_error) def invalid_signature(self, func_type: Type, context: Context) -> None: self.fail('Invalid signature "{}"'.format(func_type), context) def reveal_type(self, typ: Type, context: Context) -> None: self.fail('Revealed type is \'{}\''.format(typ), context) def unsupported_type_type(self, item: Type, context: Context) -> None: self.fail('Unsupported type Type[{}]'.format(self.format(item)), context) def redundant_cast(self, typ: Type, context: Context) -> None: self.note('Redundant cast to {}'.format(self.format(typ)), context) def unimported_type_becomes_any(self, prefix: str, typ: Type, ctx: Context) -> None: self.fail("{} becomes {} due to an unfollowed import".format(prefix, self.format(typ)), ctx) def explicit_any(self, ctx: Context) -> None: self.fail('Explicit "Any" is not allowed', ctx) def unexpected_typeddict_keys( self, typ: TypedDictType, expected_keys: List[str], actual_keys: List[str], context: Context) -> None: actual_set = set(actual_keys) expected_set = set(expected_keys) if not typ.is_anonymous(): # Generate simpler messages for some common special cases. if actual_set < expected_set: # Use list comprehension instead of set operations to preserve order. missing = [key for key in expected_keys if key not in actual_set] self.fail('{} missing for TypedDict {}'.format( format_key_list(missing, short=True).capitalize(), self.format(typ)), context) return else: extra = [key for key in actual_keys if key not in expected_set] if extra: # If there are both extra and missing keys, only report extra ones for # simplicity. self.fail('Extra {} for TypedDict {}'.format( format_key_list(extra, short=True), self.format(typ)), context) return if not expected_keys: expected = '(no keys)' else: expected = format_key_list(expected_keys) found = format_key_list(actual_keys, short=True) if actual_keys and actual_set < expected_set: found = 'only {}'.format(found) self.fail('Expected {} but found {}'.format(expected, found), context) def typeddict_key_must_be_string_literal( self, typ: TypedDictType, context: Context) -> None: self.fail( 'TypedDict key must be a string literal; expected one of {}'.format( format_item_name_list(typ.items.keys())), context) def typeddict_key_not_found( self, typ: TypedDictType, item_name: str, context: Context) -> None: if typ.is_anonymous(): self.fail('\'{}\' is not a valid TypedDict key; expected one of {}'.format( item_name, format_item_name_list(typ.items.keys())), context) else: self.fail("TypedDict {} has no key '{}'".format(self.format(typ), item_name), context) def type_arguments_not_allowed(self, context: Context) -> None: self.fail('Parameterized generics cannot be used with class or instance checks', context) def disallowed_any_type(self, typ: Type, context: Context) -> None: if isinstance(typ, AnyType): message = 'Expression has type "Any"' else: message = 'Expression type contains "Any" (has type {})'.format(self.format(typ)) self.fail(message, context) def incorrectly_returning_any(self, typ: Type, context: Context) -> None: message = 'Returning Any from function declared to return {}'.format( self.format(typ)) self.warn(message, context) def untyped_decorated_function(self, typ: Type, context: Context) -> None: if isinstance(typ, AnyType): self.fail("Function is untyped after decorator transformation", context) else: self.fail('Type of decorated function contains type "Any" ({})'.format( self.format(typ)), context) def typed_function_untyped_decorator(self, func_name: str, context: Context) -> None: self.fail('Untyped decorator makes function "{}" untyped'.format(func_name), context) def bad_proto_variance(self, actual: int, tvar_name: str, expected: int, context: Context) -> None: msg = capitalize("{} type variable '{}' used in protocol where" " {} one is expected".format(variance_string(actual), tvar_name, variance_string(expected))) self.fail(msg, context) def concrete_only_assign(self, typ: Type, context: Context) -> None: self.fail("Can only assign concrete classes to a variable of type {}" .format(self.format(typ)), context) def concrete_only_call(self, typ: Type, context: Context) -> None: self.fail("Only concrete class can be given where {} is expected" .format(self.format(typ)), context) def report_non_method_protocol(self, tp: TypeInfo, members: List[str], context: Context) -> None: self.fail("Only protocols that don't have non-method members can be" " used with issubclass()", context) if len(members) < 3: attrs = ', '.join(members) self.note('Protocol "{}" has non-method member(s): {}' .format(tp.name(), attrs), context) def note_call(self, subtype: Type, call: Type, context: Context) -> None: self.note('"{}.__call__" has type {}'.format(self.format_bare(subtype), self.format(call, verbosity=1)), context) def report_protocol_problems(self, subtype: Union[Instance, TupleType, TypedDictType], supertype: Instance, context: Context) -> None: """Report possible protocol conflicts between 'subtype' and 'supertype'. This includes missing members, incompatible types, and incompatible attribute flags, such as settable vs read-only or class variable vs instance variable. """ from mypy.subtypes import is_subtype, IS_SETTABLE, IS_CLASSVAR, IS_CLASS_OR_STATIC OFFSET = 4 # Four spaces, so that notes will look like this: # note: 'Cls' is missing following 'Proto' members: # note: method, attr MAX_ITEMS = 2 # Maximum number of conflicts, missing members, and overloads shown # List of special situations where we don't want to report additional problems exclusions = {TypedDictType: ['typing.Mapping'], TupleType: ['typing.Iterable', 'typing.Sequence'], Instance: []} # type: Dict[type, List[str]] if supertype.type.fullname() in exclusions[type(subtype)]: return if any(isinstance(tp, UninhabitedType) for tp in supertype.args): # We don't want to add notes for failed inference (e.g. Iterable[]). # This will be only confusing a user even more. return if isinstance(subtype, (TupleType, TypedDictType)): if not isinstance(subtype.fallback, Instance): return subtype = subtype.fallback # Report missing members missing = get_missing_protocol_members(subtype, supertype) if (missing and len(missing) < len(supertype.type.protocol_members) and len(missing) <= MAX_ITEMS): self.note("'{}' is missing following '{}' protocol member{}:" .format(subtype.type.name(), supertype.type.name(), plural_s(missing)), context) self.note(', '.join(missing), context, offset=OFFSET) elif len(missing) > MAX_ITEMS or len(missing) == len(supertype.type.protocol_members): # This is an obviously wrong type: too many missing members return # Report member type conflicts conflict_types = get_conflict_protocol_types(subtype, supertype) if conflict_types and (not is_subtype(subtype, erase_type(supertype)) or not subtype.type.defn.type_vars or not supertype.type.defn.type_vars): self.note('Following member(s) of {} have ' 'conflicts:'.format(self.format(subtype)), context) for name, got, exp in conflict_types[:MAX_ITEMS]: if (not isinstance(exp, (CallableType, Overloaded)) or not isinstance(got, (CallableType, Overloaded))): self.note('{}: expected {}, got {}'.format(name, *self.format_distinctly(exp, got)), context, offset=OFFSET) else: self.note('Expected:', context, offset=OFFSET) if isinstance(exp, CallableType): self.note(self.pretty_callable(exp), context, offset=2 * OFFSET) else: assert isinstance(exp, Overloaded) self.pretty_overload(exp, context, OFFSET, MAX_ITEMS) self.note('Got:', context, offset=OFFSET) if isinstance(got, CallableType): self.note(self.pretty_callable(got), context, offset=2 * OFFSET) else: assert isinstance(got, Overloaded) self.pretty_overload(got, context, OFFSET, MAX_ITEMS) self.print_more(conflict_types, context, OFFSET, MAX_ITEMS) # Report flag conflicts (i.e. settable vs read-only etc.) conflict_flags = get_bad_protocol_flags(subtype, supertype) for name, subflags, superflags in conflict_flags[:MAX_ITEMS]: if IS_CLASSVAR in subflags and IS_CLASSVAR not in superflags: self.note('Protocol member {}.{} expected instance variable,' ' got class variable'.format(supertype.type.name(), name), context) if IS_CLASSVAR in superflags and IS_CLASSVAR not in subflags: self.note('Protocol member {}.{} expected class variable,' ' got instance variable'.format(supertype.type.name(), name), context) if IS_SETTABLE in superflags and IS_SETTABLE not in subflags: self.note('Protocol member {}.{} expected settable variable,' ' got read-only attribute'.format(supertype.type.name(), name), context) if IS_CLASS_OR_STATIC in superflags and IS_CLASS_OR_STATIC not in subflags: self.note('Protocol member {}.{} expected class or static method' .format(supertype.type.name(), name), context) self.print_more(conflict_flags, context, OFFSET, MAX_ITEMS) def pretty_overload(self, tp: Overloaded, context: Context, offset: int, max_items: int) -> None: for item in tp.items()[:max_items]: self.note('@overload', context, offset=2 * offset) self.note(self.pretty_callable(item), context, offset=2 * offset) if len(tp.items()) > max_items: self.note('<{} more overload(s) not shown>'.format(len(tp.items()) - max_items), context, offset=2 * offset) def print_more(self, conflicts: Sequence[Any], context: Context, offset: int, max_items: int) -> None: if len(conflicts) > max_items: self.note('<{} more conflict(s) not shown>' .format(len(conflicts) - max_items), context, offset=offset) def pretty_callable(self, tp: CallableType) -> str: """Return a nice easily-readable representation of a callable type. For example: def [T <: int] f(self, x: int, y: T) -> None """ s = '' asterisk = False for i in range(len(tp.arg_types)): if s: s += ', ' if tp.arg_kinds[i] in (ARG_NAMED, ARG_NAMED_OPT) and not asterisk: s += '*, ' asterisk = True if tp.arg_kinds[i] == ARG_STAR: s += '*' asterisk = True if tp.arg_kinds[i] == ARG_STAR2: s += '**' name = tp.arg_names[i] if name: s += name + ': ' s += self.format_bare(tp.arg_types[i]) if tp.arg_kinds[i] in (ARG_OPT, ARG_NAMED_OPT): s += ' = ...' # If we got a "special arg" (i.e: self, cls, etc...), prepend it to the arg list if tp.definition is not None and tp.definition.name() is not None: definition_args = getattr(tp.definition, 'arg_names') if definition_args and tp.arg_names != definition_args \ and len(definition_args) > 0: if s: s = ', ' + s s = definition_args[0] + s s = '{}({})'.format(tp.definition.name(), s) else: s = '({})'.format(s) s += ' -> ' + self.format_bare(tp.ret_type) if tp.variables: tvars = [] for tvar in tp.variables: if (tvar.upper_bound and isinstance(tvar.upper_bound, Instance) and tvar.upper_bound.type.fullname() != 'builtins.object'): tvars.append('{} <: {}'.format(tvar.name, self.format_bare(tvar.upper_bound))) elif tvar.values: tvars.append('{} in ({})' .format(tvar.name, ', '.join([self.format_bare(tp) for tp in tvar.values]))) else: tvars.append(tvar.name) s = '[{}] {}'.format(', '.join(tvars), s) return 'def {}'.format(s) def variance_string(variance: int) -> str: if variance == COVARIANT: return 'covariant' elif variance == CONTRAVARIANT: return 'contravariant' else: return 'invariant' def get_missing_protocol_members(left: Instance, right: Instance) -> List[str]: """Find all protocol members of 'right' that are not implemented (i.e. completely missing) in 'left'. """ from mypy.subtypes import find_member assert right.type.is_protocol missing = [] # type: List[str] for member in right.type.protocol_members: if not find_member(member, left, left): missing.append(member) return missing def get_conflict_protocol_types(left: Instance, right: Instance) -> List[Tuple[str, Type, Type]]: """Find members that are defined in 'left' but have incompatible types. Return them as a list of ('member', 'got', 'expected'). """ from mypy.subtypes import find_member, is_subtype, get_member_flags, IS_SETTABLE assert right.type.is_protocol conflicts = [] # type: List[Tuple[str, Type, Type]] for member in right.type.protocol_members: if member in ('__init__', '__new__'): continue supertype = find_member(member, right, left) assert supertype is not None subtype = find_member(member, left, left) if not subtype: continue is_compat = is_subtype(subtype, supertype, ignore_pos_arg_names=True) if IS_SETTABLE in get_member_flags(member, right.type): is_compat = is_compat and is_subtype(supertype, subtype) if not is_compat: conflicts.append((member, subtype, supertype)) return conflicts def get_bad_protocol_flags(left: Instance, right: Instance ) -> List[Tuple[str, Set[int], Set[int]]]: """Return all incompatible attribute flags for members that are present in both 'left' and 'right'. """ from mypy.subtypes import (find_member, get_member_flags, IS_SETTABLE, IS_CLASSVAR, IS_CLASS_OR_STATIC) assert right.type.is_protocol all_flags = [] # type: List[Tuple[str, Set[int], Set[int]]] for member in right.type.protocol_members: if find_member(member, left, left): item = (member, get_member_flags(member, left.type), get_member_flags(member, right.type)) all_flags.append(item) bad_flags = [] for name, subflags, superflags in all_flags: if (IS_CLASSVAR in subflags and IS_CLASSVAR not in superflags or IS_CLASSVAR in superflags and IS_CLASSVAR not in subflags or IS_SETTABLE in superflags and IS_SETTABLE not in subflags or IS_CLASS_OR_STATIC in superflags and IS_CLASS_OR_STATIC not in subflags): bad_flags.append((name, subflags, superflags)) return bad_flags def capitalize(s: str) -> str: """Capitalize the first character of a string.""" if s == '': return '' else: return s[0].upper() + s[1:] def extract_type(name: str) -> str: """If the argument is the name of a method (of form C.m), return the type portion in quotes (e.g. "y"). Otherwise, return the string unmodified. """ name = re.sub('^"[a-zA-Z0-9_]+" of ', '', name) return name def strip_quotes(s: str) -> str: """Strip a double quote at the beginning and end of the string, if any.""" s = re.sub('^"', '', s) s = re.sub('"$', '', s) return s def plural_s(s: Sequence[Any]) -> str: if len(s) > 1: return 's' else: return '' def format_string_list(s: Iterable[str]) -> str: lst = list(s) assert len(lst) > 0 if len(lst) == 1: return lst[0] elif len(lst) <= 5: return '%s and %s' % (', '.join(lst[:-1]), lst[-1]) else: return '%s, ... and %s (%i methods suppressed)' % ( ', '.join(lst[:2]), lst[-1], len(lst) - 3) def format_item_name_list(s: Iterable[str]) -> str: lst = list(s) if len(lst) <= 5: return '(' + ', '.join(["'%s'" % name for name in lst]) + ')' else: return '(' + ', '.join(["'%s'" % name for name in lst[:5]]) + ', ...)' def callable_name(type: FunctionLike) -> Optional[str]: name = type.get_name() if name is not None and name[0] != '<': return '"{}"'.format(name).replace(' of ', '" of "') return name def for_function(callee: CallableType) -> str: name = callable_name(callee) if name is not None: return ' for {}'.format(name) return '' def find_defining_module(modules: Dict[str, MypyFile], typ: CallableType) -> Optional[MypyFile]: if not typ.definition: return None fullname = typ.definition.fullname() if fullname is not None and '.' in fullname: for i in range(fullname.count('.')): module_name = fullname.rsplit('.', i + 1)[0] try: return modules[module_name] except KeyError: pass assert False, "Couldn't determine module from CallableType" return None def temp_message_builder() -> MessageBuilder: """Return a message builder usable for throwaway errors (which may not format properly).""" return MessageBuilder(Errors(), {}) # For hard-coding suggested missing member alternatives. COMMON_MISTAKES = { 'add': ('append', 'extend'), } # type: Dict[str, Sequence[str]] def best_matches(current: str, options: Iterable[str]) -> List[str]: ratios = {v: difflib.SequenceMatcher(a=current, b=v).ratio() for v in options} return sorted((o for o in options if ratios[o] > 0.75), reverse=True, key=lambda v: (ratios[v], v)) def pretty_or(args: List[str]) -> str: quoted = ['"' + a + '"' for a in args] if len(quoted) == 1: return quoted[0] if len(quoted) == 2: return "{} or {}".format(quoted[0], quoted[1]) return ", ".join(quoted[:-1]) + ", or " + quoted[-1] def append_invariance_notes(notes: List[str], arg_type: Instance, expected_type: Instance) -> List[str]: """Explain that the type is invariant and give notes for how to solve the issue.""" from mypy.subtypes import is_subtype from mypy.sametypes import is_same_type invariant_type = '' covariant_suggestion = '' if (arg_type.type.fullname() == 'builtins.list' and expected_type.type.fullname() == 'builtins.list' and is_subtype(arg_type.args[0], expected_type.args[0])): invariant_type = 'List' covariant_suggestion = 'Consider using "Sequence" instead, which is covariant' elif (arg_type.type.fullname() == 'builtins.dict' and expected_type.type.fullname() == 'builtins.dict' and is_same_type(arg_type.args[0], expected_type.args[0]) and is_subtype(arg_type.args[1], expected_type.args[1])): invariant_type = 'Dict' covariant_suggestion = ('Consider using "Mapping" instead, ' 'which is covariant in the value type') if invariant_type and covariant_suggestion: notes.append( '"{}" is invariant -- see '.format(invariant_type) + 'http://mypy.readthedocs.io/en/latest/common_issues.html#variance') notes.append(covariant_suggestion) return notes def make_inferred_type_note(context: Context, subtype: Type, supertype: Type, supertype_str: str) -> str: """Explain that the user may have forgotten to type a variable. The user does not expect an error if the inferred container type is the same as the return type of a function and the argument type(s) are a subtype of the argument type(s) of the return type. This note suggests that they add a type annotation with the return type instead of relying on the inferred type. """ from mypy.subtypes import is_subtype if (isinstance(subtype, Instance) and isinstance(supertype, Instance) and subtype.type.fullname() == supertype.type.fullname() and subtype.args and supertype.args and isinstance(context, ReturnStmt) and isinstance(context.expr, NameExpr) and isinstance(context.expr.node, Var) and context.expr.node.is_inferred): for subtype_arg, supertype_arg in zip(subtype.args, supertype.args): if not is_subtype(subtype_arg, supertype_arg): return '' var_name = context.expr.name return 'Perhaps you need a type annotation for "{}"? Suggestion: {}'.format( var_name, supertype_str) return '' def format_key_list(keys: List[str], *, short: bool = False) -> str: reprs = [repr(key) for key in keys] td = '' if short else 'TypedDict ' if len(keys) == 0: return 'no {}keys'.format(td) elif len(keys) == 1: return '{}key {}'.format(td, reprs[0]) else: return '{}keys ({})'.format(td, ', '.join(reprs)) mypy-0.560/mypy/moduleinfo.py0000644€tŠÔÚ€2›s®0000002310713215007205022403 0ustar jukkaDROPBOX\Domain Users00000000000000"""Collection of names of notable Python library modules. Both standard library and third party modules are included. The selection criteria for third party modules is somewhat arbitrary. For packages we usually just include the top-level package name, but sometimes some or all submodules are enumerated. In the latter case if the top-level name is included we include all possible submodules (this is an implementation limitation). These are used to give more useful error messages when there is no stub for a module. """ from typing import Set third_party_modules = { # From Python 3 Wall of Superpowers (https://python3wos.appspot.com/) 'simplejson', 'requests', 'six', 'pip', 'virtualenv', 'boto', 'setuptools', 'dateutil', 'pytz', 'pyasn1', 'nose', 'yaml', 'lxml', 'jinja2', 'docutils', 'markupsafe', 'paramiko', 'Crypto', 'rsa', 'django', 'coverage', 'pika', 'colorama', 'ecdsa', 'psycopg2', 'httplib2', 'mock', 'pycparser', 'pep8', 'redis', 'pymongo', 'sqlalchemy', 'pygments', 'werkzeug', 'selenium', 'supervisor', 'zc.buildout', 'meld3', 'PIL', 'paste', 'flask', '_mysql', 'MySQLdb', 'greenlet', 'carbon', 'psutil', 'paste.deploy', 'kombu', 'babel', 'anyjson', 'py', 'OpenSSL', 'numpy', 'sphinx', 'tornado', 'zope.interface', 'itsdangerous', 'fabric', 'amqp', 'mako', 'pyflakes', 'concurrent.futures', 'pyparsing', 'celery', 'unittest2', 'setuptools_git', 'decorator', 'ordereddict', 'urllib3', 'iso8601', 'gunicorn', 'prettytable', 'webob', 'flake8', 'pytest', 'billiard', 'backports.ssl_match_hostname', 'south', 'gevent', 'netaddr', 'pylint', 'logilab.common', 'twisted', 'msgpack', 'blessings', 'oauth2client', 'ujson', 'mozrunner', 'googleapiclient', 'markdown', 'pyasn1', 'html5lib', 'isodate', 'tox', 'mozdevice', 'cython', 'raven', 'mozprocess', 'mozprofile', 'mozinfo', 'mozlog', 'pandas', 'lockfile', 'pycurl', 'novaclient', 'ply', 'eventlet', 'suds', 'zmq', 'memcache', 'netifaces', 'repoze.lru', 'testtools', 'cups', 'oauth2', 'scipy', 'thrift', 'statsd', 'BeautifulSoup', 'bs4', 'sklearn', 'cmd2', 'alembic', 'gflags', 'gflags_validators', 'sqlparse', 'debug_toolbar', 'cov_core', 'unidecode', 'websocket', 'webtest', 'django_extensions', 'networkx', 'newrelic', 'pymysql', 'pytest_cov', 'matplotlib', 'djcelery', 'google.protobuf', 'compressor', 'aspen', 'subunit', 'libcloud', 'versiontools', 'chardet', 'blinker', 'pystache', 'passlib', 'xlrd', 'pygeoip', 'configobj', 'cjson', 'testrepository', 'zc.recipe.egg', 'jsonpointer', 'amqplib', 'rdflib', 'SPARQLWrapper', 'jsonpatch', 'mrjob', 'pexpect', 'google.apputils', 'ansible', 'django_nose', 'd2to1', 'routes', 'tempita', 'testscenarios', 'demjson', 'openid', 'uwsgidecorators', 'wtforms', 'dns', 'alabaster', 'M2Crypto', 'reportlab', 'feedparser', 'ldap', 'hgtools', 'whoosh', 'path', 'nosexcover', 'iptools', 'xlwt', 'keyring', 'termcolor', 'flask_sqlalchemy', 'httpretty', 'magic', 'leaderboard', 'sqlobject', 'nltk', # Skipped (name considered too generic): # - fixtures # - migrate (from sqlalchemy-migrate) # - git (GitPython) # Other 'formencode', 'pkg_resources', 'wx', 'gi.repository', 'pygtk', 'gtk', 'PyQt4', 'PyQt5', 'pylons', # for use in tests '__dummy_third_party1', } # Modules and packages common to Python 2.7 and 3.x. common_std_lib_modules = { 'abc', 'aifc', 'antigravity', 'argparse', 'array', 'ast', 'asynchat', 'asyncore', 'audioop', 'base64', 'bdb', 'binascii', 'binhex', 'bisect', 'bz2', 'cProfile', 'calendar', 'cgi', 'cgitb', 'chunk', 'cmath', 'cmd', 'code', 'codecs', 'codeop', 'collections', 'colorsys', 'compileall', 'contextlib', 'copy', 'crypt', 'csv', 'ctypes', 'curses', 'datetime', 'decimal', 'difflib', 'dis', 'doctest', 'dummy_threading', 'email', 'fcntl', 'filecmp', 'fileinput', 'fnmatch', 'formatter', 'fractions', 'ftplib', 'functools', 'genericpath', 'getopt', 'getpass', 'gettext', 'glob', 'grp', 'gzip', 'hashlib', 'heapq', 'hmac', 'imaplib', 'imghdr', 'importlib', 'inspect', 'io', 'json', 'keyword', 'lib2to3', 'linecache', 'locale', 'logging', 'macpath', 'macurl2path', 'mailbox', 'mailcap', 'math', 'mimetypes', 'mmap', 'modulefinder', 'msilib', 'multiprocessing', 'netrc', 'nis', 'nntplib', 'ntpath', 'nturl2path', 'numbers', 'opcode', 'operator', 'optparse', 'os', 'ossaudiodev', 'parser', 'pdb', 'pickle', 'pickletools', 'pipes', 'pkgutil', 'platform', 'plistlib', 'poplib', 'posixpath', 'pprint', 'profile', 'pstats', 'pty', 'py_compile', 'pyclbr', 'pydoc', 'pydoc_data', 'pyexpat', 'quopri', 'random', 're', 'resource', 'rlcompleter', 'runpy', 'sched', 'select', 'shelve', 'shlex', 'shutil', 'site', 'smtpd', 'smtplib', 'sndhdr', 'socket', 'spwd', 'sqlite3', 'sqlite3.dbapi2', 'sqlite3.dump', 'sre_compile', 'sre_constants', 'sre_parse', 'ssl', 'stat', 'string', 'stringprep', 'struct', 'subprocess', 'sunau', 'symbol', 'symtable', 'sysconfig', 'syslog', 'tabnanny', 'tarfile', 'telnetlib', 'tempfile', 'termios', 'textwrap', 'this', 'threading', 'timeit', 'token', 'tokenize', 'trace', 'traceback', 'tty', 'types', 'unicodedata', 'unittest', 'urllib', 'uu', 'uuid', 'warnings', 'wave', 'weakref', 'webbrowser', 'wsgiref', 'xdrlib', 'xml.dom', 'xml.dom.NodeFilter', 'xml.dom.domreg', 'xml.dom.expatbuilder', 'xml.dom.minicompat', 'xml.dom.minidom', 'xml.dom.pulldom', 'xml.dom.xmlbuilder', 'xml.etree', 'xml.etree.ElementInclude', 'xml.etree.ElementPath', 'xml.etree.ElementTree', 'xml.etree.cElementTree', 'xml.parsers', 'xml.parsers.expat', 'xml.sax', 'xml.sax._exceptions', 'xml.sax.expatreader', 'xml.sax.handler', 'xml.sax.saxutils', 'xml.sax.xmlreader', 'zipfile', 'zlib', # fake names to use in tests '__dummy_stdlib1', '__dummy_stdlib2', } # Python 2 standard library modules. python2_std_lib_modules = common_std_lib_modules | { 'BaseHTTPServer', 'Bastion', 'CGIHTTPServer', 'ConfigParser', 'Cookie', 'DocXMLRPCServer', 'HTMLParser', 'MimeWriter', 'Queue', 'SimpleHTTPServer', 'SimpleXMLRPCServer', 'SocketServer', 'StringIO', 'UserDict', 'UserList', 'UserString', 'anydbm', 'atexit', 'audiodev', 'bsddb', 'cPickle', 'cStringIO', 'commands', 'cookielib', 'copy_reg', 'curses.wrapper', 'dbhash', 'dircache', 'dumbdbm', 'dummy_thread', 'fpformat', 'future_builtins', 'hotshot', 'htmlentitydefs', 'htmllib', 'httplib', 'ihooks', 'imputil', 'itertools', 'linuxaudiodev', 'markupbase', 'md5', 'mhlib', 'mimetools', 'mimify', 'multifile', 'multiprocessing.forking', 'mutex', 'new', 'os2emxpath', 'popen2', 'posixfile', 'repr', 'rexec', 'rfc822', 'robotparser', 'sets', 'sgmllib', 'sha', 'sre', 'statvfs', 'stringold', 'strop', 'sunaudio', 'time', 'toaiff', 'urllib2', 'urlparse', 'user', 'whichdb', 'xmllib', 'xmlrpclib', } # Python 3 standard library modules (based on Python 3.5.0). python3_std_lib_modules = common_std_lib_modules | { 'asyncio', 'collections.abc', 'concurrent', 'concurrent.futures', 'configparser', 'copyreg', 'dbm', 'ensurepip', 'enum', 'html', 'http', 'imp', 'ipaddress', 'lzma', 'pathlib', 'queue', 'readline', 'reprlib', 'selectors', 'signal', 'socketserver', 'statistics', 'tkinter', 'tracemalloc', 'turtle', 'turtledemo', 'typing', 'unittest.mock', 'urllib.error', 'urllib.parse', 'urllib.request', 'urllib.response', 'urllib.robotparser', 'venv', 'xmlrpc', 'xxlimited', 'zipapp', } def is_third_party_module(id: str) -> bool: return is_in_module_collection(third_party_modules, id) def is_py2_std_lib_module(id: str) -> bool: return is_in_module_collection(python2_std_lib_modules, id) def is_py3_std_lib_module(id: str) -> bool: return is_in_module_collection(python3_std_lib_modules, id) def is_in_module_collection(collection: Set[str], id: str) -> bool: components = id.split('.') for prefix_length in range(1, len(components) + 1): if '.'.join(components[:prefix_length]) in collection: return True return False mypy-0.560/mypy/myunit/0000755€tŠÔÚ€2›s®0000000000013215007242021213 5ustar jukkaDROPBOX\Domain Users00000000000000mypy-0.560/mypy/myunit/__init__.py0000644€tŠÔÚ€2›s®0000002711613215007206023333 0ustar jukkaDROPBOX\Domain Users00000000000000import importlib import os import sys import re import tempfile import time import traceback from typing import List, Tuple, Any, Callable, Union, cast, Optional from types import TracebackType # TODO remove global state is_verbose = False is_quiet = False patterns = [] # type: List[str] times = [] # type: List[Tuple[float, str]] class AssertionFailure(Exception): """Exception used to signal failed test cases.""" def __init__(self, s: Optional[str] = None) -> None: if s: super().__init__(s) else: super().__init__() class SkipTestCaseException(Exception): """Exception used to signal skipped test cases.""" pass def assert_true(b: bool, msg: Optional[str] = None) -> None: if not b: raise AssertionFailure(msg) def assert_false(b: bool, msg: Optional[str] = None) -> None: if b: raise AssertionFailure(msg) def good_repr(obj: object) -> str: if isinstance(obj, str): if obj.count('\n') > 1: bits = ["'''\\"] for line in obj.split('\n'): # force repr to use ' not ", then cut it off bits.append(repr('"' + line)[2:-1]) bits[-1] += "'''" return '\n'.join(bits) return repr(obj) def assert_equal(a: object, b: object, fmt: str = '{} != {}') -> None: if a != b: raise AssertionFailure(fmt.format(good_repr(a), good_repr(b))) def assert_not_equal(a: object, b: object, fmt: str = '{} == {}') -> None: if a == b: raise AssertionFailure(fmt.format(good_repr(a), good_repr(b))) def assert_raises(typ: type, *rest: Any) -> None: """Usage: assert_raises(exception class[, message], function[, args]) Call function with the given arguments and expect an exception of the given type. TODO use overloads for better type checking """ # Parse arguments. msg = None # type: Optional[str] if isinstance(rest[0], str) or rest[0] is None: msg = rest[0] rest = rest[1:] f = rest[0] args = [] # type: List[Any] if len(rest) > 1: args = rest[1] assert len(rest) <= 2 # Perform call and verify the exception. try: f(*args) except BaseException as e: if isinstance(e, KeyboardInterrupt): raise assert_type(typ, e) if msg: assert_equal(e.args[0], msg, 'Invalid message {}, expected {}') else: raise AssertionFailure('No exception raised') def assert_type(typ: type, value: object) -> None: if type(value) != typ: raise AssertionFailure('Invalid type {}, expected {}'.format( typename(type(value)), typename(typ))) def fail() -> None: raise AssertionFailure() class TestCase: def __init__(self, name: str, suite: 'Optional[Suite]' = None, func: Optional[Callable[[], None]] = None) -> None: self.func = func self.name = name self.suite = suite self.old_cwd = None # type: Optional[str] self.tmpdir = None # type: Optional[tempfile.TemporaryDirectory[str]] def run(self) -> None: if self.func: self.func() def set_up(self) -> None: self.old_cwd = os.getcwd() self.tmpdir = tempfile.TemporaryDirectory(prefix='mypy-test-') os.chdir(self.tmpdir.name) os.mkdir('tmp') if self.suite: self.suite.set_up() def tear_down(self) -> None: if self.suite: self.suite.tear_down() assert self.old_cwd is not None and self.tmpdir is not None, \ "test was not properly set up" os.chdir(self.old_cwd) try: self.tmpdir.cleanup() except OSError: pass self.old_cwd = None self.tmpdir = None class Suite: def __init__(self) -> None: self.prefix = typename(type(self)) + '.' # Each test case is either a TestCase object or (str, function). self._test_cases = [] # type: List[Any] self.init() def set_up(self) -> None: pass def tear_down(self) -> None: pass def init(self) -> None: for m in dir(self): if m.startswith('test'): t = getattr(self, m) if isinstance(t, Suite): self.add_test((m + '.', t)) else: self.add_test(TestCase(m, self, getattr(self, m))) def add_test(self, test: Union[TestCase, Tuple[str, Callable[[], None]], Tuple[str, 'Suite']]) -> None: self._test_cases.append(test) def cases(self) -> List[Any]: return self._test_cases[:] def skip(self) -> None: raise SkipTestCaseException() def add_suites_from_module(suites: List[Suite], mod_name: str) -> None: mod = importlib.import_module(mod_name) got_suite = False for suite in mod.__dict__.values(): if isinstance(suite, type) and issubclass(suite, Suite) and suite is not Suite: got_suite = True suites.append(cast(Callable[[], Suite], suite)()) if not got_suite: # Sanity check in case e.g. it uses unittest instead of a myunit. # The codecs tests do since they need to be python2-compatible. sys.exit('Test module %s had no test!' % mod_name) class ListSuite(Suite): def __init__(self, suites: List[Suite]) -> None: for suite in suites: mod_name = type(suite).__module__.replace('.', '_') mod_name = mod_name.replace('mypy_', '') mod_name = mod_name.replace('test_', '') mod_name = mod_name.strip('_').replace('__', '_') type_name = type(suite).__name__ name = 'test_%s_%s' % (mod_name, type_name) setattr(self, name, suite) super().__init__() def main(args: Optional[List[str]] = None) -> None: global patterns, is_verbose, is_quiet if not args: args = sys.argv[1:] is_verbose = False is_quiet = False suites = [] # type: List[Suite] patterns = [] i = 0 while i < len(args): a = args[i] if a == '-v': is_verbose = True elif a == '-q': is_quiet = True elif a == '-m': i += 1 if i == len(args): sys.exit('-m requires an argument') add_suites_from_module(suites, args[i]) elif not a.startswith('-'): patterns.append(a) else: sys.exit('Usage: python -m mypy.myunit [-v] [-q]' + ' -m mypy.test.module [-m mypy.test.module ...] [filter ...]') i += 1 if len(patterns) == 0: patterns.append('*') if not suites: sys.exit('At least one -m argument is required') t = ListSuite(suites) num_total, num_fail, num_skip = run_test_recursive(t, 0, 0, 0, '', 0) skip_msg = '' if num_skip > 0: skip_msg = ', {} skipped'.format(num_skip) if num_fail == 0: if not is_quiet: print('%d test cases run%s, all passed.' % (num_total, skip_msg)) print('*** OK ***') else: sys.stderr.write('%d/%d test cases failed%s.\n' % (num_fail, num_total, skip_msg)) sys.stderr.write('*** FAILURE ***\n') sys.exit(1) def run_test_recursive(test: Any, num_total: int, num_fail: int, num_skip: int, prefix: str, depth: int) -> Tuple[int, int, int]: """The first argument may be TestCase, Suite or (str, Suite).""" if isinstance(test, TestCase): name = prefix + test.name for pattern in patterns: if match_pattern(name, pattern): match = True break else: match = False if match: is_fail, is_skip = run_single_test(name, test) if is_fail: num_fail += 1 if is_skip: num_skip += 1 num_total += 1 else: suite_prefix = '' if isinstance(test, list) or isinstance(test, tuple): suite = test[1] # type: Suite suite_prefix = test[0] else: suite = test suite_prefix = test.prefix for stest in suite.cases(): new_prefix = prefix if depth > 0: new_prefix = prefix + suite_prefix num_total, num_fail, num_skip = run_test_recursive( stest, num_total, num_fail, num_skip, new_prefix, depth + 1) return num_total, num_fail, num_skip def run_single_test(name: str, test: Any) -> Tuple[bool, bool]: if is_verbose: sys.stderr.write(name) sys.stderr.flush() time0 = time.time() test.set_up() # FIX: check exceptions exc_traceback = None # type: Any try: test.run() except BaseException as e: if isinstance(e, KeyboardInterrupt): raise exc_type, exc_value, exc_traceback = sys.exc_info() finally: test.tear_down() times.append((time.time() - time0, name)) if exc_traceback: if isinstance(exc_value, SkipTestCaseException): if is_verbose: sys.stderr.write(' (skipped)\n') return False, True else: assert exc_type is not None and exc_value is not None handle_failure(name, exc_type, exc_value, exc_traceback) return True, False elif is_verbose: sys.stderr.write('\n') return False, False def handle_failure(name: str, exc_type: type, exc_value: BaseException, exc_traceback: TracebackType, ) -> None: # Report failed test case. if is_verbose: sys.stderr.write('\n\n') msg = '' if exc_value.args and exc_value.args[0]: msg = ': ' + str(exc_value) else: msg = '' if not isinstance(exc_value, SystemExit): # We assume that before doing exit() (which raises SystemExit) we've printed # enough context about what happened so that a stack trace is not useful. # In particular, uncaught exceptions during semantic analysis or type checking # call exit() and they already print out a stack trace. sys.stderr.write('Traceback (most recent call last):\n') tb = traceback.format_tb(exc_traceback) tb = clean_traceback(tb) for s in tb: sys.stderr.write(s) else: sys.stderr.write('\n') exception = typename(exc_type) sys.stderr.write('{}{}\n\n'.format(exception, msg)) sys.stderr.write('{} failed\n\n'.format(name)) def typename(t: type) -> str: if '.' in str(t): return str(t).split('.')[-1].rstrip("'>") else: return str(t)[8:-2] def match_pattern(s: str, p: str) -> bool: if len(p) == 0: return len(s) == 0 elif p[0] == '*': if len(p) == 1: return True else: for i in range(len(s) + 1): if match_pattern(s[i:], p[1:]): return True return False elif len(s) == 0: return False else: return s[0] == p[0] and match_pattern(s[1:], p[1:]) def clean_traceback(tb: List[str]) -> List[str]: # Remove clutter from the traceback. start = 0 for i, s in enumerate(tb): if '\n test.run()\n' in s or '\n self.func()\n' in s: start = i + 1 tb = tb[start:] for f in ['assert_equal', 'assert_not_equal', 'assert_type', 'assert_raises', 'assert_true']: if tb != [] and ', in {}\n'.format(f) in tb[-1]: tb = tb[:-1] return tb mypy-0.560/mypy/myunit/__main__.py0000644€tŠÔÚ€2›s®0000000033613215007205023306 0ustar jukkaDROPBOX\Domain Users00000000000000# This is a separate module from mypy.myunit so it doesn't exist twice. """Myunit test runner command line tool. Usually used as a slave by runtests.py, but can be used directly. """ from mypy.myunit import main main() mypy-0.560/mypy/nodes.py0000644€tŠÔÚ€2›s®0000025644113215007206021364 0ustar jukkaDROPBOX\Domain Users00000000000000"""Abstract syntax tree node classes (i.e. parse tree).""" import os from abc import abstractmethod from collections import OrderedDict from typing import ( Any, TypeVar, List, Tuple, cast, Set, Dict, Union, Optional, Callable, Sequence, ) import mypy.strconv from mypy.util import short_type from mypy.visitor import NodeVisitor, StatementVisitor, ExpressionVisitor class Context: """Base type for objects that are valid as error message locations.""" line = -1 column = -1 def __init__(self, line: int = -1, column: int = -1) -> None: self.line = line self.column = column def set_line(self, target: Union['Context', int], column: Optional[int] = None) -> None: """If target is a node, pull line (and column) information into this node. If column is specified, this will override any column information coming from a node. """ if isinstance(target, int): self.line = target else: self.line = target.line self.column = target.column if column is not None: self.column = column def get_line(self) -> int: """Don't use. Use x.line.""" return self.line def get_column(self) -> int: """Don't use. Use x.column.""" return self.column if False: # break import cycle only needed for mypy import mypy.types T = TypeVar('T') JsonDict = Dict[str, Any] # Symbol table node kinds # # TODO rename to use more descriptive names LDEF = 0 # type: int GDEF = 1 # type: int MDEF = 2 # type: int MODULE_REF = 3 # type: int # Type variable declared using TypeVar(...) has kind TVAR. It's not # valid as a type unless bound in a TypeVarScope. That happens within: # (1) a generic class that uses the type variable as a type argument or # (2) a generic function that refers to the type variable in its signature. TVAR = 4 # type: int TYPE_ALIAS = 6 # type: int # Placeholder for a name imported via 'from ... import'. Second phase of # semantic will replace this the actual imported reference. This is # needed so that we can detect whether a name has been imported during # XXX what? UNBOUND_IMPORTED = 7 # type: int LITERAL_YES = 2 LITERAL_TYPE = 1 LITERAL_NO = 0 # Hard coded name of Enum baseclass. ENUM_BASECLASS = "enum.Enum" node_kinds = { LDEF: 'Ldef', GDEF: 'Gdef', MDEF: 'Mdef', MODULE_REF: 'ModuleRef', TVAR: 'Tvar', TYPE_ALIAS: 'TypeAlias', UNBOUND_IMPORTED: 'UnboundImported', } inverse_node_kinds = {_kind: _name for _name, _kind in node_kinds.items()} implicit_module_attrs = {'__name__': '__builtins__.str', '__doc__': None, # depends on Python version, see semanal.py '__file__': '__builtins__.str', '__package__': '__builtins__.str'} type_aliases = { 'typing.List': '__builtins__.list', 'typing.Dict': '__builtins__.dict', 'typing.Set': '__builtins__.set', 'typing.FrozenSet': '__builtins__.frozenset', } reverse_type_aliases = dict((name.replace('__builtins__', 'builtins'), alias) for alias, name in type_aliases.items()) # type: Dict[str, str] collections_type_aliases = { 'typing.ChainMap': '__mypy_collections__.ChainMap', 'typing.Counter': '__mypy_collections__.Counter', 'typing.DefaultDict': '__mypy_collections__.defaultdict', 'typing.Deque': '__mypy_collections__.deque', } reverse_collection_aliases = dict((name.replace('__mypy_collections__', 'collections'), alias) for alias, name in collections_type_aliases.items()) # type: Dict[str, str] nongen_builtins = {'builtins.tuple': 'typing.Tuple', 'builtins.enumerate': ''} nongen_builtins.update(reverse_type_aliases) nongen_builtins.update(reverse_collection_aliases) class Node(Context): """Common base class for all non-type parse tree nodes.""" def __str__(self) -> str: ans = self.accept(mypy.strconv.StrConv()) if ans is None: return repr(self) return ans def accept(self, visitor: NodeVisitor[T]) -> T: raise RuntimeError('Not implemented') class Statement(Node): """A statement node.""" def accept(self, visitor: StatementVisitor[T]) -> T: raise RuntimeError('Not implemented') class Expression(Node): """An expression node.""" def accept(self, visitor: ExpressionVisitor[T]) -> T: raise RuntimeError('Not implemented') # TODO: # Lvalue = Union['NameExpr', 'MemberExpr', 'IndexExpr', 'SuperExpr', 'StarExpr' # 'TupleExpr', 'ListExpr']; see #1783. Lvalue = Expression class SymbolNode(Node): # Nodes that can be stored in a symbol table. # TODO do not use methods for these @abstractmethod def name(self) -> str: pass @abstractmethod def fullname(self) -> str: pass @abstractmethod def serialize(self) -> JsonDict: pass @classmethod def deserialize(cls, data: JsonDict) -> 'SymbolNode': classname = data['.class'] method = deserialize_map.get(classname) if method is not None: return method(data) raise NotImplementedError('unexpected .class {}'.format(classname)) class MypyFile(SymbolNode): """The abstract syntax tree of a single source file.""" # Module name ('__main__' for initial file) _name = None # type: str # Fully qualified module name _fullname = None # type: str # Path to the file (None if not known) path = '' # Top-level definitions and statements defs = None # type: List[Statement] # Is there a UTF-8 BOM at the start? is_bom = False names = None # type: SymbolTable # All import nodes within the file (also ones within functions etc.) imports = None # type: List[ImportBase] # Lines to ignore when checking ignored_lines = None # type: Set[int] # Is this file represented by a stub file (.pyi)? is_stub = False def __init__(self, defs: List[Statement], imports: List['ImportBase'], is_bom: bool = False, ignored_lines: Optional[Set[int]] = None) -> None: self.defs = defs self.line = 1 # Dummy line number self.imports = imports self.is_bom = is_bom if ignored_lines: self.ignored_lines = ignored_lines else: self.ignored_lines = set() def name(self) -> str: return self._name def fullname(self) -> str: return self._fullname def accept(self, visitor: NodeVisitor[T]) -> T: return visitor.visit_mypy_file(self) def is_package_init_file(self) -> bool: return not (self.path is None) and len(self.path) != 0 \ and os.path.basename(self.path).startswith('__init__.') def serialize(self) -> JsonDict: return {'.class': 'MypyFile', '_name': self._name, '_fullname': self._fullname, 'names': self.names.serialize(self._fullname), 'is_stub': self.is_stub, 'path': self.path, } @classmethod def deserialize(cls, data: JsonDict) -> 'MypyFile': assert data['.class'] == 'MypyFile', data tree = MypyFile([], []) tree._name = data['_name'] tree._fullname = data['_fullname'] tree.names = SymbolTable.deserialize(data['names']) tree.is_stub = data['is_stub'] tree.path = data['path'] return tree class ImportBase(Statement): """Base class for all import statements.""" is_unreachable = False # Set by semanal.SemanticAnalyzerPass1 if inside `if False` etc. is_top_level = False # Ditto if outside any class or def is_mypy_only = False # Ditto if inside `if TYPE_CHECKING` or `if MYPY` # If an import replaces existing definitions, we construct dummy assignment # statements that assign the imported names to the names in the current scope, # for type checking purposes. Example: # # x = 1 # from m import x <-- add assignment representing "x = m.x" assignments = None # type: List[AssignmentStmt] def __init__(self) -> None: self.assignments = [] class Import(ImportBase): """import m [as n]""" ids = None # type: List[Tuple[str, Optional[str]]] # (module id, as id) def __init__(self, ids: List[Tuple[str, Optional[str]]]) -> None: super().__init__() self.ids = ids def accept(self, visitor: StatementVisitor[T]) -> T: return visitor.visit_import(self) class ImportFrom(ImportBase): """from m import x [as y], ...""" id = None # type: str relative = None # type: int names = None # type: List[Tuple[str, Optional[str]]] # Tuples (name, as name) def __init__(self, id: str, relative: int, names: List[Tuple[str, Optional[str]]]) -> None: super().__init__() self.id = id self.names = names self.relative = relative def accept(self, visitor: StatementVisitor[T]) -> T: return visitor.visit_import_from(self) class ImportAll(ImportBase): """from m import *""" id = None # type: str relative = None # type: int def __init__(self, id: str, relative: int) -> None: super().__init__() self.id = id self.relative = relative def accept(self, visitor: StatementVisitor[T]) -> T: return visitor.visit_import_all(self) class FuncBase(Node): """Abstract base class for function-like nodes""" # Type signature. This is usually CallableType or Overloaded, but it can be something else for # decorated functions/ type = None # type: Optional[mypy.types.Type] # Original, not semantically analyzed type (used for reprocessing) unanalyzed_type = None # type: Optional[mypy.types.Type] # If method, reference to TypeInfo info = None # type: TypeInfo is_property = False _fullname = None # type: str # Name with module prefix @abstractmethod def name(self) -> str: pass def fullname(self) -> str: return self._fullname OverloadPart = Union['FuncDef', 'Decorator'] class OverloadedFuncDef(FuncBase, SymbolNode, Statement): """A logical node representing all the variants of a multi-declaration function. A multi-declaration function is often an @overload, but can also be a @property with a setter and a/or a deleter. This node has no explicit representation in the source program. Overloaded variants must be consecutive in the source file. """ items = None # type: List[OverloadPart] impl = None # type: Optional[OverloadPart] def __init__(self, items: List['OverloadPart']) -> None: assert len(items) > 0 self.items = items self.impl = None self.set_line(items[0].line) def name(self) -> str: return self.items[0].name() def accept(self, visitor: StatementVisitor[T]) -> T: return visitor.visit_overloaded_func_def(self) def serialize(self) -> JsonDict: return {'.class': 'OverloadedFuncDef', 'items': [i.serialize() for i in self.items], 'type': None if self.type is None else self.type.serialize(), 'fullname': self._fullname, 'is_property': self.is_property, 'impl': None if self.impl is None else self.impl.serialize() } @classmethod def deserialize(cls, data: JsonDict) -> 'OverloadedFuncDef': assert data['.class'] == 'OverloadedFuncDef' res = OverloadedFuncDef([ cast(OverloadPart, SymbolNode.deserialize(d)) for d in data['items']]) if data.get('impl') is not None: res.impl = cast(OverloadPart, SymbolNode.deserialize(data['impl'])) if data.get('type') is not None: res.type = mypy.types.deserialize_type(data['type']) res._fullname = data['fullname'] res.is_property = data['is_property'] # NOTE: res.info will be set in the fixup phase. return res class Argument(Node): """A single argument in a FuncItem.""" variable = None # type: Var type_annotation = None # type: Optional[mypy.types.Type] initializer = None # type: Optional[Expression] kind = None # type: int # must be an ARG_* constant def __init__(self, variable: 'Var', type_annotation: 'Optional[mypy.types.Type]', initializer: Optional[Expression], kind: int) -> None: self.variable = variable self.type_annotation = type_annotation self.initializer = initializer self.kind = kind def set_line(self, target: Union[Context, int], column: Optional[int] = None) -> None: super().set_line(target, column) if self.initializer: self.initializer.set_line(self.line, self.column) self.variable.set_line(self.line, self.column) class FuncItem(FuncBase): arguments = [] # type: List[Argument] arg_names = [] # type: List[str] arg_kinds = [] # type: List[int] # Minimum number of arguments min_args = 0 # Maximum number of positional arguments, -1 if no explicit limit (*args not included) max_pos = 0 body = None # type: Block # Is this an overload variant of function with more than one overload variant? is_overload = False is_generator = False # Contains a yield statement? is_coroutine = False # Defined using 'async def' syntax? is_async_generator = False # Is an async def generator? is_awaitable_coroutine = False # Decorated with '@{typing,asyncio}.coroutine'? is_static = False # Uses @staticmethod? is_class = False # Uses @classmethod? # Variants of function with type variables with values expanded expanded = None # type: List[FuncItem] FLAGS = [ 'is_overload', 'is_generator', 'is_coroutine', 'is_async_generator', 'is_awaitable_coroutine', 'is_static', 'is_class', ] def __init__(self, arguments: List[Argument], body: 'Block', typ: 'Optional[mypy.types.FunctionLike]' = None) -> None: self.arguments = arguments self.arg_names = [arg.variable.name() for arg in self.arguments] self.arg_kinds = [arg.kind for arg in self.arguments] self.max_pos = self.arg_kinds.count(ARG_POS) + self.arg_kinds.count(ARG_OPT) self.body = body self.type = typ self.unanalyzed_type = typ self.expanded = [] self.min_args = 0 for i in range(len(self.arguments)): if self.arguments[i] is None and i < self.max_fixed_argc(): self.min_args = i + 1 def max_fixed_argc(self) -> int: return self.max_pos def set_line(self, target: Union[Context, int], column: Optional[int] = None) -> None: super().set_line(target, column) for arg in self.arguments: arg.set_line(self.line, self.column) def is_dynamic(self) -> bool: return self.type is None class FuncDef(FuncItem, SymbolNode, Statement): """Function definition. This is a non-lambda function defined using 'def'. """ is_decorated = False is_conditional = False # Defined conditionally (within block)? is_abstract = False is_property = False # Original conditional definition original_def = None # type: Union[None, FuncDef, Var, Decorator] FLAGS = FuncItem.FLAGS + [ 'is_decorated', 'is_conditional', 'is_abstract', 'is_property' ] def __init__(self, name: str, # Function name arguments: List[Argument], body: 'Block', typ: 'Optional[mypy.types.FunctionLike]' = None) -> None: super().__init__(arguments, body, typ) self._name = name def name(self) -> str: return self._name def accept(self, visitor: StatementVisitor[T]) -> T: return visitor.visit_func_def(self) def serialize(self) -> JsonDict: # We're deliberating omitting arguments and storing only arg_names and # arg_kinds for space-saving reasons (arguments is not used in later # stages of mypy). # TODO: After a FuncDef is deserialized, the only time we use `arg_names` # and `arg_kinds` is when `type` is None and we need to infer a type. Can # we store the inferred type ahead of time? return {'.class': 'FuncDef', 'name': self._name, 'fullname': self._fullname, 'arg_names': self.arg_names, 'arg_kinds': self.arg_kinds, 'type': None if self.type is None else self.type.serialize(), 'flags': get_flags(self, FuncDef.FLAGS), # TODO: Do we need expanded, original_def? } @classmethod def deserialize(cls, data: JsonDict) -> 'FuncDef': assert data['.class'] == 'FuncDef' body = Block([]) ret = FuncDef(data['name'], [], body, (None if data['type'] is None else cast(mypy.types.FunctionLike, mypy.types.deserialize_type(data['type'])))) ret._fullname = data['fullname'] set_flags(ret, data['flags']) # NOTE: ret.info is set in the fixup phase. ret.arg_names = data['arg_names'] ret.arg_kinds = data['arg_kinds'] # Mark these as 'None' so that future uses will trigger an error _dummy = None # type: Any ret.arguments = _dummy ret.max_pos = _dummy ret.min_args = _dummy return ret class Decorator(SymbolNode, Statement): """A decorated function. A single Decorator object can include any number of function decorators. """ func = None # type: FuncDef # Decorated function decorators = None # type: List[Expression] # Decorators (may be empty) # TODO: This is mostly used for the type; consider replacing with a 'type' attribute var = None # type: Var # Represents the decorated function obj is_overload = False def __init__(self, func: FuncDef, decorators: List[Expression], var: 'Var') -> None: self.func = func self.decorators = decorators self.var = var self.is_overload = False def name(self) -> str: return self.func.name() def fullname(self) -> str: return self.func.fullname() @property def info(self) -> 'TypeInfo': return self.func.info @property def type(self) -> 'Optional[mypy.types.Type]': return self.var.type def accept(self, visitor: StatementVisitor[T]) -> T: return visitor.visit_decorator(self) def serialize(self) -> JsonDict: return {'.class': 'Decorator', 'func': self.func.serialize(), 'var': self.var.serialize(), 'is_overload': self.is_overload, } @classmethod def deserialize(cls, data: JsonDict) -> 'Decorator': assert data['.class'] == 'Decorator' dec = Decorator(FuncDef.deserialize(data['func']), [], Var.deserialize(data['var'])) dec.is_overload = data['is_overload'] return dec class Var(SymbolNode): """A variable. It can refer to global/local variable or a data attribute. """ _name = None # type: str # Name without module prefix _fullname = None # type: str # Name with module prefix info = None # type: TypeInfo # Defining class (for member variables) type = None # type: Optional[mypy.types.Type] # Declared or inferred type, or None # Is this the first argument to an ordinary method (usually "self")? is_self = False is_ready = False # If inferred, is the inferred type available? # Is this initialized explicitly to a non-None value in class body? is_inferred = False is_initialized_in_class = False is_staticmethod = False is_classmethod = False is_property = False is_settable_property = False is_classvar = False is_abstract_var = False # Set to true when this variable refers to a module we were unable to # parse for some reason (eg a silenced module) is_suppressed_import = False FLAGS = [ 'is_self', 'is_ready', 'is_initialized_in_class', 'is_staticmethod', 'is_classmethod', 'is_property', 'is_settable_property', 'is_suppressed_import', 'is_classvar', 'is_abstract_var' ] def __init__(self, name: str, type: 'Optional[mypy.types.Type]' = None) -> None: self._name = name self.type = type if self.type is None: self.is_inferred = True self.is_self = False self.is_ready = True self.is_initialized_in_class = False def name(self) -> str: return self._name def fullname(self) -> str: return self._fullname def accept(self, visitor: StatementVisitor[T]) -> T: return visitor.visit_var(self) def serialize(self) -> JsonDict: # TODO: Leave default values out? # NOTE: Sometimes self.is_ready is False here, but we don't care. data = {'.class': 'Var', 'name': self._name, 'fullname': self._fullname, 'type': None if self.type is None else self.type.serialize(), 'flags': get_flags(self, Var.FLAGS), } # type: JsonDict return data @classmethod def deserialize(cls, data: JsonDict) -> 'Var': assert data['.class'] == 'Var' name = data['name'] type = None if data['type'] is None else mypy.types.deserialize_type(data['type']) v = Var(name, type) v._fullname = data['fullname'] set_flags(v, data['flags']) return v class ClassDef(Statement): """Class definition""" name = None # type: str # Name of the class without module prefix fullname = None # type: str # Fully qualified name of the class defs = None # type: Block type_vars = None # type: List[mypy.types.TypeVarDef] # Base class expressions (not semantically analyzed -- can be arbitrary expressions) base_type_exprs = None # type: List[Expression] info = None # type: TypeInfo # Related TypeInfo metaclass = None # type: Optional[Expression] decorators = None # type: List[Expression] keywords = None # type: OrderedDict[str, Expression] analyzed = None # type: Optional[Expression] has_incompatible_baseclass = False def __init__(self, name: str, defs: 'Block', type_vars: Optional[List['mypy.types.TypeVarDef']] = None, base_type_exprs: Optional[List[Expression]] = None, metaclass: Optional[Expression] = None, keywords: Optional[List[Tuple[str, Expression]]] = None) -> None: self.name = name self.defs = defs self.type_vars = type_vars or [] self.base_type_exprs = base_type_exprs or [] self.metaclass = metaclass self.decorators = [] self.keywords = OrderedDict(keywords or []) def accept(self, visitor: StatementVisitor[T]) -> T: return visitor.visit_class_def(self) def is_generic(self) -> bool: return self.info.is_generic() def serialize(self) -> JsonDict: # Not serialized: defs, base_type_exprs, metaclass, decorators, # analyzed (for named tuples etc.) return {'.class': 'ClassDef', 'name': self.name, 'fullname': self.fullname, 'type_vars': [v.serialize() for v in self.type_vars], } @classmethod def deserialize(self, data: JsonDict) -> 'ClassDef': assert data['.class'] == 'ClassDef' res = ClassDef(data['name'], Block([]), [mypy.types.TypeVarDef.deserialize(v) for v in data['type_vars']], ) res.fullname = data['fullname'] return res class GlobalDecl(Statement): """Declaration global x, y, ...""" names = None # type: List[str] def __init__(self, names: List[str]) -> None: self.names = names def accept(self, visitor: StatementVisitor[T]) -> T: return visitor.visit_global_decl(self) class NonlocalDecl(Statement): """Declaration nonlocal x, y, ...""" names = None # type: List[str] def __init__(self, names: List[str]) -> None: self.names = names def accept(self, visitor: StatementVisitor[T]) -> T: return visitor.visit_nonlocal_decl(self) class Block(Statement): body = None # type: List[Statement] # True if we can determine that this block is not executed. For example, # this applies to blocks that are protected by something like "if PY3:" # when using Python 2. is_unreachable = False def __init__(self, body: List[Statement]) -> None: self.body = body def accept(self, visitor: StatementVisitor[T]) -> T: return visitor.visit_block(self) # Statements class ExpressionStmt(Statement): """An expression as a statement, such as print(s).""" expr = None # type: Expression def __init__(self, expr: Expression) -> None: self.expr = expr def accept(self, visitor: StatementVisitor[T]) -> T: return visitor.visit_expression_stmt(self) class AssignmentStmt(Statement): """Assignment statement The same node class is used for single assignment, multiple assignment (e.g. x, y = z) and chained assignment (e.g. x = y = z), assignments that define new names, and assignments with explicit types (# type). An lvalue can be NameExpr, TupleExpr, ListExpr, MemberExpr, IndexExpr. """ lvalues = None # type: List[Lvalue] rvalue = None # type: Expression # Declared type in a comment, may be None. type = None # type: Optional[mypy.types.Type] # Original, not semantically analyzed type in annotation (used for reprocessing) unanalyzed_type = None # type: Optional[mypy.types.Type] # This indicates usage of PEP 526 type annotation syntax in assignment. new_syntax = False # type: bool def __init__(self, lvalues: List[Lvalue], rvalue: Expression, type: 'Optional[mypy.types.Type]' = None, new_syntax: bool = False) -> None: self.lvalues = lvalues self.rvalue = rvalue self.type = type self.unanalyzed_type = type self.new_syntax = new_syntax def accept(self, visitor: StatementVisitor[T]) -> T: return visitor.visit_assignment_stmt(self) class OperatorAssignmentStmt(Statement): """Operator assignment statement such as x += 1""" op = '' lvalue = None # type: Lvalue rvalue = None # type: Expression def __init__(self, op: str, lvalue: Lvalue, rvalue: Expression) -> None: self.op = op self.lvalue = lvalue self.rvalue = rvalue def accept(self, visitor: StatementVisitor[T]) -> T: return visitor.visit_operator_assignment_stmt(self) class WhileStmt(Statement): expr = None # type: Expression body = None # type: Block else_body = None # type: Optional[Block] def __init__(self, expr: Expression, body: Block, else_body: Optional[Block]) -> None: self.expr = expr self.body = body self.else_body = else_body def accept(self, visitor: StatementVisitor[T]) -> T: return visitor.visit_while_stmt(self) class ForStmt(Statement): # Index variables index = None # type: Lvalue # Type given by type comments for index, can be None index_type = None # type: Optional[mypy.types.Type] # Inferred iterable item type inferred_item_type = None # type: Optional[mypy.types.Type] # Expression to iterate expr = None # type: Expression body = None # type: Block else_body = None # type: Optional[Block] is_async = False # True if `async for ...` (PEP 492, Python 3.5) def __init__(self, index: Lvalue, expr: Expression, body: Block, else_body: Optional[Block], index_type: 'Optional[mypy.types.Type]' = None) -> None: self.index = index self.index_type = index_type self.expr = expr self.body = body self.else_body = else_body def accept(self, visitor: StatementVisitor[T]) -> T: return visitor.visit_for_stmt(self) class ReturnStmt(Statement): expr = None # type: Optional[Expression] def __init__(self, expr: Optional[Expression]) -> None: self.expr = expr def accept(self, visitor: StatementVisitor[T]) -> T: return visitor.visit_return_stmt(self) class AssertStmt(Statement): expr = None # type: Expression msg = None # type: Optional[Expression] def __init__(self, expr: Expression, msg: Optional[Expression] = None) -> None: self.expr = expr self.msg = msg def accept(self, visitor: StatementVisitor[T]) -> T: return visitor.visit_assert_stmt(self) class DelStmt(Statement): expr = None # type: Lvalue def __init__(self, expr: Lvalue) -> None: self.expr = expr def accept(self, visitor: StatementVisitor[T]) -> T: return visitor.visit_del_stmt(self) class BreakStmt(Statement): def accept(self, visitor: StatementVisitor[T]) -> T: return visitor.visit_break_stmt(self) class ContinueStmt(Statement): def accept(self, visitor: StatementVisitor[T]) -> T: return visitor.visit_continue_stmt(self) class PassStmt(Statement): def accept(self, visitor: StatementVisitor[T]) -> T: return visitor.visit_pass_stmt(self) class IfStmt(Statement): expr = None # type: List[Expression] body = None # type: List[Block] else_body = None # type: Optional[Block] def __init__(self, expr: List[Expression], body: List[Block], else_body: Optional[Block]) -> None: self.expr = expr self.body = body self.else_body = else_body def accept(self, visitor: StatementVisitor[T]) -> T: return visitor.visit_if_stmt(self) class RaiseStmt(Statement): # Plain 'raise' is a valid statement. expr = None # type: Optional[Expression] from_expr = None # type: Optional[Expression] def __init__(self, expr: Optional[Expression], from_expr: Optional[Expression]) -> None: self.expr = expr self.from_expr = from_expr def accept(self, visitor: StatementVisitor[T]) -> T: return visitor.visit_raise_stmt(self) class TryStmt(Statement): body = None # type: Block # Try body # Plain 'except:' also possible types = None # type: List[Optional[Expression]] # Except type expressions vars = None # type: List[Optional[NameExpr]] # Except variable names handlers = None # type: List[Block] # Except bodies else_body = None # type: Optional[Block] finally_body = None # type: Optional[Block] def __init__(self, body: Block, vars: List['Optional[NameExpr]'], types: List[Optional[Expression]], handlers: List[Block], else_body: Optional[Block], finally_body: Optional[Block]) -> None: self.body = body self.vars = vars self.types = types self.handlers = handlers self.else_body = else_body self.finally_body = finally_body def accept(self, visitor: StatementVisitor[T]) -> T: return visitor.visit_try_stmt(self) class WithStmt(Statement): expr = None # type: List[Expression] target = None # type: List[Optional[Lvalue]] # Type given by type comments for target, can be None target_type = None # type: Optional[mypy.types.Type] body = None # type: Block is_async = False # True if `async with ...` (PEP 492, Python 3.5) def __init__(self, expr: List[Expression], target: List[Optional[Lvalue]], body: Block, target_type: 'Optional[mypy.types.Type]' = None) -> None: self.expr = expr self.target = target self.target_type = target_type self.body = body def accept(self, visitor: StatementVisitor[T]) -> T: return visitor.visit_with_stmt(self) class PrintStmt(Statement): """Python 2 print statement""" args = None # type: List[Expression] newline = False # The file-like target object (given using >>). target = None # type: Optional[Expression] def __init__(self, args: List[Expression], newline: bool, target: Optional[Expression] = None) -> None: self.args = args self.newline = newline self.target = target def accept(self, visitor: StatementVisitor[T]) -> T: return visitor.visit_print_stmt(self) class ExecStmt(Statement): """Python 2 exec statement""" expr = None # type: Expression globals = None # type: Optional[Expression] locals = None # type: Optional[Expression] def __init__(self, expr: Expression, globals: Optional[Expression], locals: Optional[Expression]) -> None: self.expr = expr self.globals = globals self.locals = locals def accept(self, visitor: StatementVisitor[T]) -> T: return visitor.visit_exec_stmt(self) # Expressions class IntExpr(Expression): """Integer literal""" value = 0 def __init__(self, value: int) -> None: self.value = value def accept(self, visitor: ExpressionVisitor[T]) -> T: return visitor.visit_int_expr(self) # How mypy uses StrExpr, BytesExpr, and UnicodeExpr: # In Python 2 mode: # b'x', 'x' -> StrExpr # u'x' -> UnicodeExpr # BytesExpr is unused # # In Python 3 mode: # b'x' -> BytesExpr # 'x', u'x' -> StrExpr # UnicodeExpr is unused class StrExpr(Expression): """String literal""" value = '' def __init__(self, value: str) -> None: self.value = value def accept(self, visitor: ExpressionVisitor[T]) -> T: return visitor.visit_str_expr(self) class BytesExpr(Expression): """Bytes literal""" value = '' # TODO use bytes def __init__(self, value: str) -> None: self.value = value def accept(self, visitor: ExpressionVisitor[T]) -> T: return visitor.visit_bytes_expr(self) class UnicodeExpr(Expression): """Unicode literal (Python 2.x)""" value = '' # TODO use bytes def __init__(self, value: str) -> None: self.value = value def accept(self, visitor: ExpressionVisitor[T]) -> T: return visitor.visit_unicode_expr(self) class FloatExpr(Expression): """Float literal""" value = 0.0 def __init__(self, value: float) -> None: self.value = value def accept(self, visitor: ExpressionVisitor[T]) -> T: return visitor.visit_float_expr(self) class ComplexExpr(Expression): """Complex literal""" value = 0.0j def __init__(self, value: complex) -> None: self.value = value def accept(self, visitor: ExpressionVisitor[T]) -> T: return visitor.visit_complex_expr(self) class EllipsisExpr(Expression): """Ellipsis (...)""" def accept(self, visitor: ExpressionVisitor[T]) -> T: return visitor.visit_ellipsis(self) class StarExpr(Expression): """Star expression""" expr = None # type: Expression def __init__(self, expr: Expression) -> None: self.expr = expr # Whether this starred expression is used in a tuple/list and as lvalue self.valid = False def accept(self, visitor: ExpressionVisitor[T]) -> T: return visitor.visit_star_expr(self) class RefExpr(Expression): """Abstract base class for name-like constructs""" kind = None # type: Optional[int] # LDEF/GDEF/MDEF/... (None if not available) node = None # type: Optional[SymbolNode] # Var, FuncDef or TypeInfo that describes this fullname = None # type: Optional[str] # Fully qualified name (or name if not global) # Does this define a new name? is_new_def = False # Does this define a new name with inferred type? # # For members, after semantic analysis, this does not take base # classes into consideration at all; the type checker deals with these. is_inferred_def = False class NameExpr(RefExpr): """Name expression This refers to a local name, global name or a module. """ name = None # type: str # Name referred to (may be qualified) def __init__(self, name: str) -> None: self.name = name def accept(self, visitor: ExpressionVisitor[T]) -> T: return visitor.visit_name_expr(self) def serialize(self) -> JsonDict: # TODO: Find out where and why NameExpr is being serialized (if at all). assert False, "Serializing NameExpr: %s" % (self,) return {'.class': 'NameExpr', 'kind': self.kind, 'node': None if self.node is None else self.node.serialize(), 'fullname': self.fullname, 'is_new_def': self.is_new_def, 'is_inferred_def': self.is_inferred_def, 'name': self.name, } @classmethod def deserialize(cls, data: JsonDict) -> 'NameExpr': assert data['.class'] == 'NameExpr' ret = NameExpr(data['name']) ret.kind = data['kind'] ret.node = None if data['node'] is None else SymbolNode.deserialize(data['node']) ret.fullname = data['fullname'] ret.is_new_def = data['is_new_def'] ret.is_inferred_def = data['is_inferred_def'] return ret class MemberExpr(RefExpr): """Member access expression x.y""" expr = None # type: Expression name = None # type: str # The variable node related to a definition through 'self.x = '. # The nodes of other kinds of member expressions are resolved during type checking. def_var = None # type: Optional[Var] def __init__(self, expr: Expression, name: str) -> None: self.expr = expr self.name = name def accept(self, visitor: ExpressionVisitor[T]) -> T: return visitor.visit_member_expr(self) # Kinds of arguments # Positional argument ARG_POS = 0 # type: int # Positional, optional argument (functions only, not calls) ARG_OPT = 1 # type: int # *arg argument ARG_STAR = 2 # type: int # Keyword argument x=y in call, or keyword-only function arg ARG_NAMED = 3 # type: int # **arg argument ARG_STAR2 = 4 # type: int # In an argument list, keyword-only and also optional ARG_NAMED_OPT = 5 class CallExpr(Expression): """Call expression. This can also represent several special forms that are syntactically calls such as cast(...) and None # type: .... """ callee = None # type: Expression args = None # type: List[Expression] arg_kinds = None # type: List[int] # ARG_ constants # Each name can be None if not a keyword argument. arg_names = None # type: List[Optional[str]] # If not None, the node that represents the meaning of the CallExpr. For # cast(...) this is a CastExpr. analyzed = None # type: Optional[Expression] def __init__(self, callee: Expression, args: List[Expression], arg_kinds: List[int], arg_names: List[Optional[str]], analyzed: Optional[Expression] = None) -> None: if not arg_names: arg_names = [None] * len(args) self.callee = callee self.args = args self.arg_kinds = arg_kinds self.arg_names = arg_names self.analyzed = analyzed def accept(self, visitor: ExpressionVisitor[T]) -> T: return visitor.visit_call_expr(self) class YieldFromExpr(Expression): expr = None # type: Expression def __init__(self, expr: Expression) -> None: self.expr = expr def accept(self, visitor: ExpressionVisitor[T]) -> T: return visitor.visit_yield_from_expr(self) class YieldExpr(Expression): expr = None # type: Optional[Expression] def __init__(self, expr: Optional[Expression]) -> None: self.expr = expr def accept(self, visitor: ExpressionVisitor[T]) -> T: return visitor.visit_yield_expr(self) class IndexExpr(Expression): """Index expression x[y]. Also wraps type application such as List[int] as a special form. """ base = None # type: Expression index = None # type: Expression # Inferred __getitem__ method type method_type = None # type: mypy.types.Type # If not None, this is actually semantically a type application # Class[type, ...] or a type alias initializer. analyzed = None # type: Union[TypeApplication, TypeAliasExpr, None] def __init__(self, base: Expression, index: Expression) -> None: self.base = base self.index = index self.analyzed = None def accept(self, visitor: ExpressionVisitor[T]) -> T: return visitor.visit_index_expr(self) class UnaryExpr(Expression): """Unary operation""" op = '' expr = None # type: Expression # Inferred operator method type method_type = None # type: Optional[mypy.types.Type] def __init__(self, op: str, expr: Expression) -> None: self.op = op self.expr = expr def accept(self, visitor: ExpressionVisitor[T]) -> T: return visitor.visit_unary_expr(self) # Map from binary operator id to related method name (in Python 3). op_methods = { '+': '__add__', '-': '__sub__', '*': '__mul__', '/': '__truediv__', '%': '__mod__', '//': '__floordiv__', '**': '__pow__', '@': '__matmul__', '&': '__and__', '|': '__or__', '^': '__xor__', '<<': '__lshift__', '>>': '__rshift__', '==': '__eq__', '!=': '__ne__', '<': '__lt__', '>=': '__ge__', '>': '__gt__', '<=': '__le__', 'in': '__contains__', } # type: Dict[str, str] comparison_fallback_method = '__cmp__' ops_falling_back_to_cmp = {'__ne__', '__eq__', '__lt__', '__le__', '__gt__', '__ge__'} ops_with_inplace_method = { '+', '-', '*', '/', '%', '//', '**', '@', '&', '|', '^', '<<', '>>'} inplace_operator_methods = set( '__i' + op_methods[op][2:] for op in ops_with_inplace_method) reverse_op_methods = { '__add__': '__radd__', '__sub__': '__rsub__', '__mul__': '__rmul__', '__truediv__': '__rtruediv__', '__mod__': '__rmod__', '__floordiv__': '__rfloordiv__', '__pow__': '__rpow__', '__matmul__': '__rmatmul__', '__and__': '__rand__', '__or__': '__ror__', '__xor__': '__rxor__', '__lshift__': '__rlshift__', '__rshift__': '__rrshift__', '__eq__': '__eq__', '__ne__': '__ne__', '__lt__': '__gt__', '__ge__': '__le__', '__gt__': '__lt__', '__le__': '__ge__', } normal_from_reverse_op = dict((m, n) for n, m in reverse_op_methods.items()) reverse_op_method_set = set(reverse_op_methods.values()) unary_op_methods = { '-': '__neg__', '+': '__pos__', '~': '__invert__', } class OpExpr(Expression): """Binary operation (other than . or [] or comparison operators, which have specific nodes).""" op = '' left = None # type: Expression right = None # type: Expression # Inferred type for the operator method type (when relevant). method_type = None # type: Optional[mypy.types.Type] # Is the right side going to be evaluated every time? right_always = False # Is the right side unreachable? right_unreachable = False def __init__(self, op: str, left: Expression, right: Expression) -> None: self.op = op self.left = left self.right = right def accept(self, visitor: ExpressionVisitor[T]) -> T: return visitor.visit_op_expr(self) class ComparisonExpr(Expression): """Comparison expression (e.g. a < b > c < d).""" operators = None # type: List[str] operands = None # type: List[Expression] # Inferred type for the operator methods (when relevant; None for 'is'). method_types = None # type: List[Optional[mypy.types.Type]] def __init__(self, operators: List[str], operands: List[Expression]) -> None: self.operators = operators self.operands = operands self.method_types = [] def accept(self, visitor: ExpressionVisitor[T]) -> T: return visitor.visit_comparison_expr(self) class SliceExpr(Expression): """Slice expression (e.g. 'x:y', 'x:', '::2' or ':'). This is only valid as index in index expressions. """ begin_index = None # type: Optional[Expression] end_index = None # type: Optional[Expression] stride = None # type: Optional[Expression] def __init__(self, begin_index: Optional[Expression], end_index: Optional[Expression], stride: Optional[Expression]) -> None: self.begin_index = begin_index self.end_index = end_index self.stride = stride def accept(self, visitor: ExpressionVisitor[T]) -> T: return visitor.visit_slice_expr(self) class CastExpr(Expression): """Cast expression cast(type, expr).""" expr = None # type: Expression type = None # type: mypy.types.Type def __init__(self, expr: Expression, typ: 'mypy.types.Type') -> None: self.expr = expr self.type = typ def accept(self, visitor: ExpressionVisitor[T]) -> T: return visitor.visit_cast_expr(self) class RevealTypeExpr(Expression): """Reveal type expression reveal_type(expr).""" expr = None # type: Expression def __init__(self, expr: Expression) -> None: self.expr = expr def accept(self, visitor: ExpressionVisitor[T]) -> T: return visitor.visit_reveal_type_expr(self) class SuperExpr(Expression): """Expression super().name""" name = '' info = None # type: TypeInfo # Type that contains this super expression call = None # type: CallExpr # The expression super(...) def __init__(self, name: str, call: CallExpr) -> None: self.name = name self.call = call def accept(self, visitor: ExpressionVisitor[T]) -> T: return visitor.visit_super_expr(self) class LambdaExpr(FuncItem, Expression): """Lambda expression""" def name(self) -> str: return '' def expr(self) -> Expression: """Return the expression (the body) of the lambda.""" ret = cast(ReturnStmt, self.body.body[-1]) expr = ret.expr assert expr is not None # lambda can't have empty body return expr def accept(self, visitor: ExpressionVisitor[T]) -> T: return visitor.visit_lambda_expr(self) def is_dynamic(self) -> bool: return False class ListExpr(Expression): """List literal expression [...].""" items = None # type: List[Expression] def __init__(self, items: List[Expression]) -> None: self.items = items def accept(self, visitor: ExpressionVisitor[T]) -> T: return visitor.visit_list_expr(self) class DictExpr(Expression): """Dictionary literal expression {key: value, ...}.""" items = None # type: List[Tuple[Expression, Expression]] def __init__(self, items: List[Tuple[Expression, Expression]]) -> None: self.items = items def accept(self, visitor: ExpressionVisitor[T]) -> T: return visitor.visit_dict_expr(self) class TupleExpr(Expression): """Tuple literal expression (..., ...)""" items = None # type: List[Expression] def __init__(self, items: List[Expression]) -> None: self.items = items def accept(self, visitor: ExpressionVisitor[T]) -> T: return visitor.visit_tuple_expr(self) class SetExpr(Expression): """Set literal expression {value, ...}.""" items = None # type: List[Expression] def __init__(self, items: List[Expression]) -> None: self.items = items def accept(self, visitor: ExpressionVisitor[T]) -> T: return visitor.visit_set_expr(self) class GeneratorExpr(Expression): """Generator expression ... for ... in ... [ for ... in ... ] [ if ... ].""" left_expr = None # type: Expression sequences = None # type: List[Expression] condlists = None # type: List[List[Expression]] is_async = None # type: List[bool] indices = None # type: List[Lvalue] def __init__(self, left_expr: Expression, indices: List[Lvalue], sequences: List[Expression], condlists: List[List[Expression]], is_async: List[bool]) -> None: self.left_expr = left_expr self.sequences = sequences self.condlists = condlists self.indices = indices self.is_async = is_async def accept(self, visitor: ExpressionVisitor[T]) -> T: return visitor.visit_generator_expr(self) class ListComprehension(Expression): """List comprehension (e.g. [x + 1 for x in a])""" generator = None # type: GeneratorExpr def __init__(self, generator: GeneratorExpr) -> None: self.generator = generator def accept(self, visitor: ExpressionVisitor[T]) -> T: return visitor.visit_list_comprehension(self) class SetComprehension(Expression): """Set comprehension (e.g. {x + 1 for x in a})""" generator = None # type: GeneratorExpr def __init__(self, generator: GeneratorExpr) -> None: self.generator = generator def accept(self, visitor: ExpressionVisitor[T]) -> T: return visitor.visit_set_comprehension(self) class DictionaryComprehension(Expression): """Dictionary comprehension (e.g. {k: v for k, v in a}""" key = None # type: Expression value = None # type: Expression sequences = None # type: List[Expression] condlists = None # type: List[List[Expression]] is_async = None # type: List[bool] indices = None # type: List[Lvalue] def __init__(self, key: Expression, value: Expression, indices: List[Lvalue], sequences: List[Expression], condlists: List[List[Expression]], is_async: List[bool]) -> None: self.key = key self.value = value self.sequences = sequences self.condlists = condlists self.indices = indices self.is_async = is_async def accept(self, visitor: ExpressionVisitor[T]) -> T: return visitor.visit_dictionary_comprehension(self) class ConditionalExpr(Expression): """Conditional expression (e.g. x if y else z)""" cond = None # type: Expression if_expr = None # type: Expression else_expr = None # type: Expression def __init__(self, cond: Expression, if_expr: Expression, else_expr: Expression) -> None: self.cond = cond self.if_expr = if_expr self.else_expr = else_expr def accept(self, visitor: ExpressionVisitor[T]) -> T: return visitor.visit_conditional_expr(self) class BackquoteExpr(Expression): """Python 2 expression `...`.""" expr = None # type: Expression def __init__(self, expr: Expression) -> None: self.expr = expr def accept(self, visitor: ExpressionVisitor[T]) -> T: return visitor.visit_backquote_expr(self) class TypeApplication(Expression): """Type application expr[type, ...]""" expr = None # type: Expression types = None # type: List[mypy.types.Type] def __init__(self, expr: Expression, types: List['mypy.types.Type']) -> None: self.expr = expr self.types = types def accept(self, visitor: ExpressionVisitor[T]) -> T: return visitor.visit_type_application(self) # Variance of a type variable. For example, T in the definition of # List[T] is invariant, so List[int] is not a subtype of List[object], # and also List[object] is not a subtype of List[int]. # # The T in Iterable[T] is covariant, so Iterable[int] is a subtype of # Iterable[object], but not vice versa. # # If T is contravariant in Foo[T], Foo[object] is a subtype of # Foo[int], but not vice versa. INVARIANT = 0 # type: int COVARIANT = 1 # type: int CONTRAVARIANT = 2 # type: int class TypeVarExpr(SymbolNode, Expression): """Type variable expression TypeVar(...).""" _name = '' _fullname = '' # Value restriction: only types in the list are valid as values. If the # list is empty, there is no restriction. values = None # type: List[mypy.types.Type] # Upper bound: only subtypes of upper_bound are valid as values. By default # this is 'object', meaning no restriction. upper_bound = None # type: mypy.types.Type # Variance of the type variable. Invariant is the default. # TypeVar(..., covariant=True) defines a covariant type variable. # TypeVar(..., contravariant=True) defines a contravariant type # variable. variance = INVARIANT def __init__(self, name: str, fullname: str, values: List['mypy.types.Type'], upper_bound: 'mypy.types.Type', variance: int=INVARIANT) -> None: self._name = name self._fullname = fullname self.values = values self.upper_bound = upper_bound self.variance = variance def name(self) -> str: return self._name def fullname(self) -> str: return self._fullname def accept(self, visitor: ExpressionVisitor[T]) -> T: return visitor.visit_type_var_expr(self) def serialize(self) -> JsonDict: return {'.class': 'TypeVarExpr', 'name': self._name, 'fullname': self._fullname, 'values': [t.serialize() for t in self.values], 'upper_bound': self.upper_bound.serialize(), 'variance': self.variance, } @classmethod def deserialize(cls, data: JsonDict) -> 'TypeVarExpr': assert data['.class'] == 'TypeVarExpr' return TypeVarExpr(data['name'], data['fullname'], [mypy.types.deserialize_type(v) for v in data['values']], mypy.types.deserialize_type(data['upper_bound']), data['variance']) class TypeAliasExpr(Expression): """Type alias expression (rvalue).""" type = None # type: mypy.types.Type # Simple fallback type for aliases that are invalid in runtime expressions # (for example Union, Tuple, Callable). fallback = None # type: mypy.types.Type # This type alias is subscripted in a runtime expression like Alias[int](42) # (not in a type context like type annotation or base class). in_runtime = False # type: bool def __init__(self, type: 'mypy.types.Type', tvars: List[str], fallback: 'mypy.types.Type', in_runtime: bool = False) -> None: self.type = type self.fallback = fallback self.in_runtime = in_runtime self.tvars = tvars def accept(self, visitor: ExpressionVisitor[T]) -> T: return visitor.visit_type_alias_expr(self) class NamedTupleExpr(Expression): """Named tuple expression namedtuple(...) or NamedTuple(...).""" # The class representation of this named tuple (its tuple_type attribute contains # the tuple item types) info = None # type: TypeInfo def __init__(self, info: 'TypeInfo') -> None: self.info = info def accept(self, visitor: ExpressionVisitor[T]) -> T: return visitor.visit_namedtuple_expr(self) class TypedDictExpr(Expression): """Typed dict expression TypedDict(...).""" # The class representation of this typed dict info = None # type: TypeInfo def __init__(self, info: 'TypeInfo') -> None: self.info = info def accept(self, visitor: ExpressionVisitor[T]) -> T: return visitor.visit_typeddict_expr(self) class EnumCallExpr(Expression): """Named tuple expression Enum('name', 'val1 val2 ...').""" # The class representation of this enumerated type info = None # type: TypeInfo # The item names (for debugging) items = None # type: List[str] values = None # type: List[Optional[Expression]] def __init__(self, info: 'TypeInfo', items: List[str], values: List[Optional[Expression]]) -> None: self.info = info self.items = items self.values = values def accept(self, visitor: ExpressionVisitor[T]) -> T: return visitor.visit_enum_call_expr(self) class PromoteExpr(Expression): """Ducktype class decorator expression _promote(...).""" type = None # type: mypy.types.Type def __init__(self, type: 'mypy.types.Type') -> None: self.type = type def accept(self, visitor: ExpressionVisitor[T]) -> T: return visitor.visit__promote_expr(self) class NewTypeExpr(Expression): """NewType expression NewType(...).""" name = None # type: str # The base type (the second argument to NewType) old_type = None # type: Optional[mypy.types.Type] # The synthesized class representing the new type (inherits old_type) info = None # type: Optional[TypeInfo] def __init__(self, name: str, old_type: 'Optional[mypy.types.Type]', line: int) -> None: self.name = name self.old_type = old_type self.line = line def accept(self, visitor: ExpressionVisitor[T]) -> T: return visitor.visit_newtype_expr(self) class AwaitExpr(Expression): """Await expression (await ...).""" expr = None # type: Expression def __init__(self, expr: Expression) -> None: self.expr = expr def accept(self, visitor: ExpressionVisitor[T]) -> T: return visitor.visit_await_expr(self) # Constants class TempNode(Expression): """Temporary dummy node used during type checking. This node is not present in the original program; it is just an artifact of the type checker implementation. It only represents an opaque node with some fixed type. """ type = None # type: mypy.types.Type # Is this TempNode used to indicate absence of a right hand side in an annotated assignment? # (e.g. for 'x: int' the rvalue is TempNode(AnyType(TypeOfAny.special_form), no_rhs=True)) no_rhs = False # type: bool def __init__(self, typ: 'mypy.types.Type', no_rhs: bool = False) -> None: self.type = typ self.no_rhs = no_rhs def __repr__(self) -> str: return 'TempNode(%s)' % str(self.type) def accept(self, visitor: ExpressionVisitor[T]) -> T: return visitor.visit_temp_node(self) class TypeInfo(SymbolNode): """The type structure of a single class. Each TypeInfo corresponds one-to-one to a ClassDef, which represents the AST of the class. In type-theory terms, this is a "type constructor", and if the class is generic then it will be a type constructor of higher kind. Where the class is used in an actual type, it's in the form of an Instance, which amounts to a type application of the tycon to the appropriate number of arguments. """ _fullname = None # type: str # Fully qualified name # Fully qualified name for the module this type was defined in. This # information is also in the fullname, but is harder to extract in the # case of nested class definitions. module_name = None # type: str defn = None # type: ClassDef # Corresponding ClassDef # Method Resolution Order: the order of looking up attributes. The first # value always to refers to this class. mro = None # type: List[TypeInfo] declared_metaclass = None # type: Optional[mypy.types.Instance] metaclass_type = None # type: Optional[mypy.types.Instance] names = None # type: SymbolTable # Names defined directly in this type is_abstract = False # Does the class have any abstract attributes? is_protocol = False # Is this a protocol class? runtime_protocol = False # Does this protocol support isinstance checks? abstract_attributes = None # type: List[str] # Protocol members are names of all attributes/methods defined in a protocol # and in all its supertypes (except for 'object'). protocol_members = None # type: List[str] # The attributes 'assuming' and 'assuming_proper' represent structural subtype matrices. # # In languages with structural subtyping, one can keep a global subtype matrix like this: # . A B C . # A 1 0 0 # B 1 1 1 # C 1 0 1 # . # where 1 indicates that the type in corresponding row is a subtype of the type # in corresponding column. This matrix typically starts filled with all 1's and # a typechecker tries to "disprove" every subtyping relation using atomic (or nominal) types. # However, we don't want to keep this huge global state. Instead, we keep the subtype # information in the form of list of pairs (subtype, supertype) shared by all 'Instance's # with given supertype's TypeInfo. When we enter a subtype check we push a pair in this list # thus assuming that we started with 1 in corresponding matrix element. Such algorithm allows # to treat recursive and mutually recursive protocols and other kinds of complex situations. # # If concurrent/parallel type checking will be added in future, # then there should be one matrix per thread/process to avoid false negatives # during the type checking phase. assuming = None # type: List[Tuple[mypy.types.Instance, mypy.types.Instance]] assuming_proper = None # type: List[Tuple[mypy.types.Instance, mypy.types.Instance]] # Ditto for temporary 'inferring' stack of recursive constraint inference. # It contains Instance's of protocol types that appeared as an argument to # constraints.infer_constraints(). We need 'inferring' to avoid infinite recursion for # recursive and mutually recursive protocols. # # We make 'assuming' and 'inferring' attributes here instead of passing they as kwargs, # since this would require to pass them in many dozens of calls. In particular, # there is a dependency infer_constraint -> is_subtype -> is_callable_subtype -> # -> infer_constraints. inferring = None # type: List[mypy.types.Instance] # '_cache' and '_cache_proper' are subtype caches, implemented as sets of pairs # of (subtype, supertype), where supertypes are instances of given TypeInfo. # We need the caches, since subtype checks for structural types are very slow. _cache = None # type: Set[Tuple[mypy.types.Type, mypy.types.Type]] _cache_proper = None # type: Set[Tuple[mypy.types.Type, mypy.types.Type]] # 'inferring' and 'assuming' can't be also made sets, since we need to use # is_same_type to correctly treat unions. # Classes inheriting from Enum shadow their true members with a __getattr__, so we # have to treat them as a special case. is_enum = False # If true, any unknown attributes should have type 'Any' instead # of generating a type error. This would be true if there is a # base class with type 'Any', but other use cases may be # possible. This is similar to having __getattr__ that returns Any # (and __setattr__), but without the __getattr__ method. fallback_to_any = False # Information related to type annotations. # Generic type variable names (full names) type_vars = None # type: List[str] # Direct base classes. bases = None # type: List[mypy.types.Instance] # Another type which this type will be treated as a subtype of, # even though it's not a subclass in Python. The non-standard # `@_promote` decorator introduces this, and there are also # several builtin examples, in particular `int` -> `float`. _promote = None # type: Optional[mypy.types.Type] # Representation of a Tuple[...] base class, if the class has any # (e.g., for named tuples). If this is not None, the actual Type # object used for this class is not an Instance but a TupleType; # the corresponding Instance is set as the fallback type of the # tuple type. tuple_type = None # type: Optional[mypy.types.TupleType] # Is this a named tuple type? is_named_tuple = False # If this class is defined by the TypedDict type constructor, # then this is not None. typeddict_type = None # type: Optional[mypy.types.TypedDictType] # Is this a newtype type? is_newtype = False # If during analysis of ClassDef associated with this TypeInfo a syntethic # type (NamedTuple or TypedDict) was generated, store the corresponding # TypeInfo here. (This attribute does not need to be serialized, it is only # needed during the semantic passes.) replaced = None # type: TypeInfo FLAGS = [ 'is_abstract', 'is_enum', 'fallback_to_any', 'is_named_tuple', 'is_newtype', 'is_protocol', 'runtime_protocol' ] def __init__(self, names: 'SymbolTable', defn: ClassDef, module_name: str) -> None: """Initialize a TypeInfo.""" self.names = names self.defn = defn self.module_name = module_name self.type_vars = [] self.bases = [] # Leave self.mro uninitialized until we compute it for real, # so we don't accidentally try to use it prematurely. self._fullname = defn.fullname self.is_abstract = False self.abstract_attributes = [] self.assuming = [] self.assuming_proper = [] self.inferring = [] self._cache = set() self._cache_proper = set() self.add_type_vars() def add_type_vars(self) -> None: if self.defn.type_vars: for vd in self.defn.type_vars: self.type_vars.append(vd.fullname) def name(self) -> str: """Short name.""" return self.defn.name def fullname(self) -> str: return self._fullname def is_generic(self) -> bool: """Is the type generic (i.e. does it have type variables)?""" return len(self.type_vars) > 0 def get(self, name: str) -> 'Optional[SymbolTableNode]': if self.mro is None: # Might be because of a previous error. return None for cls in self.mro: n = cls.names.get(name) if n: return n return None def get_containing_type_info(self, name: str) -> 'Optional[TypeInfo]': for cls in self.mro: if name in cls.names: return cls return None def record_subtype_cache_entry(self, left: 'mypy.types.Instance', right: 'mypy.types.Instance', proper_subtype: bool = False) -> None: if proper_subtype: self._cache_proper.add((left, right)) else: self._cache.add((left, right)) def is_cached_subtype_check(self, left: 'mypy.types.Instance', right: 'mypy.types.Instance', proper_subtype: bool = False) -> bool: if not proper_subtype: return (left, right) in self._cache return (left, right) in self._cache_proper def __getitem__(self, name: str) -> 'SymbolTableNode': n = self.get(name) if n: return n else: raise KeyError(name) def __repr__(self) -> str: return '' % self.fullname() # IDEA: Refactor the has* methods to be more consistent and document # them. def has_readable_member(self, name: str) -> bool: return self.get(name) is not None def has_method(self, name: str) -> bool: return self.get_method(name) is not None def get_method(self, name: str) -> Optional[FuncBase]: if self.mro is None: # Might be because of a previous error. return None for cls in self.mro: if name in cls.names: node = cls.names[name].node if isinstance(node, FuncBase): return node else: return None return None def calculate_mro(self) -> None: """Calculate and set mro (method resolution order). Raise MroError if cannot determine mro. """ mro = linearize_hierarchy(self) assert mro, "Could not produce a MRO at all for %s" % (self,) self.mro = mro self.is_enum = self._calculate_is_enum() def calculate_metaclass_type(self) -> 'Optional[mypy.types.Instance]': declared = self.declared_metaclass if declared is not None and not declared.type.has_base('builtins.type'): return declared if self._fullname == 'builtins.type': return mypy.types.Instance(self, []) candidates = [s.declared_metaclass for s in self.mro if s.declared_metaclass is not None and s.declared_metaclass.type is not None] for c in candidates: if c.type.mro is None: continue if all(other.type in c.type.mro for other in candidates): return c return None def is_metaclass(self) -> bool: return (self.has_base('builtins.type') or self.fullname() == 'abc.ABCMeta' or self.fallback_to_any) def _calculate_is_enum(self) -> bool: """ If this is "enum.Enum" itself, then yes, it's an enum. If the flag .is_enum has been set on anything in the MRO, it's an enum. """ if self.fullname() == ENUM_BASECLASS: return True if self.mro: return any(type_info.is_enum for type_info in self.mro) return False def has_base(self, fullname: str) -> bool: """Return True if type has a base type with the specified name. This can be either via extension or via implementation. """ if self.mro: for cls in self.mro: if cls.fullname() == fullname: return True return False def direct_base_classes(self) -> 'List[TypeInfo]': """Return a direct base classes. Omit base classes of other base classes. """ return [base.type for base in self.bases] def __str__(self) -> str: """Return a string representation of the type. This includes the most important information about the type. """ return self.dump() def dump(self, str_conv: 'Optional[mypy.strconv.StrConv]' = None, type_str_conv: 'Optional[mypy.types.TypeStrVisitor]' = None) -> str: """Return a string dump of the contents of the TypeInfo.""" if not str_conv: str_conv = mypy.strconv.StrConv() base = '' # type: str def type_str(typ: 'mypy.types.Type') -> str: if type_str_conv: return typ.accept(type_str_conv) return str(typ) head = 'TypeInfo' + str_conv.format_id(self) if self.bases: base = 'Bases({})'.format(', '.join(type_str(base) for base in self.bases)) mro = 'Mro({})'.format(', '.join(item.fullname() + str_conv.format_id(item) for item in self.mro)) names = [] for name in sorted(self.names): description = name + str_conv.format_id(self.names[name].node) node = self.names[name].node if isinstance(node, Var) and node.type: description += ' ({})'.format(type_str(node.type)) names.append(description) return mypy.strconv.dump_tagged( ['Name({})'.format(self.fullname()), base, mro, ('Names', names)], head, str_conv=str_conv) def serialize(self) -> JsonDict: # NOTE: This is where all ClassDefs originate, so there shouldn't be duplicates. data = {'.class': 'TypeInfo', 'module_name': self.module_name, 'fullname': self.fullname(), 'names': self.names.serialize(self.fullname()), 'defn': self.defn.serialize(), 'abstract_attributes': self.abstract_attributes, 'protocol_members': self.protocol_members, 'type_vars': self.type_vars, 'bases': [b.serialize() for b in self.bases], '_promote': None if self._promote is None else self._promote.serialize(), 'declared_metaclass': (None if self.declared_metaclass is None else self.declared_metaclass.serialize()), 'metaclass_type': None if self.metaclass_type is None else self.metaclass_type.serialize(), 'tuple_type': None if self.tuple_type is None else self.tuple_type.serialize(), 'typeddict_type': None if self.typeddict_type is None else self.typeddict_type.serialize(), 'flags': get_flags(self, TypeInfo.FLAGS), } return data @classmethod def deserialize(cls, data: JsonDict) -> 'TypeInfo': names = SymbolTable.deserialize(data['names']) defn = ClassDef.deserialize(data['defn']) module_name = data['module_name'] ti = TypeInfo(names, defn, module_name) ti._fullname = data['fullname'] # TODO: Is there a reason to reconstruct ti.subtypes? ti.abstract_attributes = data['abstract_attributes'] ti.protocol_members = data['protocol_members'] ti.type_vars = data['type_vars'] ti.bases = [mypy.types.Instance.deserialize(b) for b in data['bases']] ti._promote = (None if data['_promote'] is None else mypy.types.deserialize_type(data['_promote'])) ti.declared_metaclass = (None if data['declared_metaclass'] is None else mypy.types.Instance.deserialize(data['declared_metaclass'])) ti.metaclass_type = (None if data['metaclass_type'] is None else mypy.types.Instance.deserialize(data['metaclass_type'])) # NOTE: ti.mro will be set in the fixup phase. ti.tuple_type = (None if data['tuple_type'] is None else mypy.types.TupleType.deserialize(data['tuple_type'])) ti.typeddict_type = (None if data['typeddict_type'] is None else mypy.types.TypedDictType.deserialize(data['typeddict_type'])) set_flags(ti, data['flags']) return ti class FakeInfo(TypeInfo): # types.py defines a single instance of this class, called types.NOT_READY. # This instance is used as a temporary placeholder in the process of de-serialization # of 'Instance' types. The de-serialization happens in two steps: In the first step, # Instance.type is set to NOT_READY. In the second step (in fixup.py) it is replaced by # an actual TypeInfo. If you see the assertion error below, then most probably something # went wrong during the second step and an 'Instance' that raised this error was not fixed. # Note: # 'None' is not used as a dummy value for two reasons: # 1. This will require around 80-100 asserts to make 'mypy --strict-optional mypy' # pass cleanly. # 2. If NOT_READY value is accidentally used somewhere, it will be obvious where the value # is from, whereas a 'None' value could come from anywhere. def __init__(self, *args: Any, **kwargs: Any) -> None: pass def __getattribute__(self, attr: str) -> None: raise AssertionError('De-serialization failure: TypeInfo not fixed') class SymbolTableNode: """Description of a name binding in a symbol table. These are only used as values in module (global), function (local) and class symbol tables (see SymbolTable). The name that is bound is the key in SymbolTable. Symbol tables don't contain direct references to AST nodes primarily because there can be multiple symbol table references to a single AST node (due to imports and aliases), and different references can behave differently. This class describes the unique properties of each reference. The most fundamental attributes are 'kind' and 'node'. The 'node' attribute defines the AST node that the name refers to. For many bindings, including those targeting variables, functions and classes, the kind is one of LDEF, GDEF or MDEF, depending on the scope of the definition. These three kinds can usually be used interchangeably and the difference between local, global and class scopes is mostly descriptive, with no semantic significance. However, some tools that consume mypy ASTs may care about these so they should be correct. A few definitions get special kinds, including type variables (TVAR), imported modules and module aliases (MODULE_REF), and type aliases (TYPE_ALIAS). Type aliases are very special and have additional attributes that are only used for them ('type_override', 'alias_tvars' at least). """ # TODO: This is a mess. Refactor! # TODO: Describe how type aliases work. # Kind of node. Possible values: # - LDEF: local definition # - GDEF: global (module-level) definition # - MDEF: class member definition # - TVAR: TypeVar(...) definition in any scope # - MODULE_REF: reference to a module # - TYPE_ALIAS: type alias # - UNBOUND_IMPORTED: temporary kind for imported names (we don't know the final kind yet) kind = None # type: int # AST node of definition (among others, this can be FuncDef/Var/TypeInfo/TypeVarExpr/MypyFile, # or None for a bound type variable). node = None # type: Optional[SymbolNode] # If this not None, override the type of the 'node' attribute. This is only used for # type aliases. type_override = None # type: Optional[mypy.types.Type] # For generic aliases this stores the (qualified) names of type variables. # (For example see testGenericAliasWithTypeVarsFromDifferentModules.) alias_tvars = None # type: Optional[List[str]] # If False, this name won't be imported via 'from import *'. # This has no effect on names within classes. module_public = True # If True, the name will be never exported (needed for stub files) module_hidden = False # For deserialized MODULE_REF nodes, the referenced module name; # for other nodes, optionally the name of the referenced object. cross_ref = None # type: Optional[str] # Used to distinguish between 'typing.List' and 'builtins.list'. This is # True when the former has been normalized to the latter, and it allow us # to reject 'list[str]' and similar. normalized = False # type: bool # Was this defined by assignment to self attribute? implicit = False # type: bool def __init__(self, kind: int, node: Optional[SymbolNode], typ: 'Optional[mypy.types.Type]' = None, module_public: bool = True, normalized: bool = False, alias_tvars: Optional[List[str]] = None, implicit: bool = False, module_hidden: bool = False) -> None: self.kind = kind self.node = node self.type_override = typ self.module_hidden = module_hidden self.module_public = module_public self.normalized = normalized self.alias_tvars = alias_tvars self.implicit = implicit @property def fullname(self) -> Optional[str]: if self.node is not None: return self.node.fullname() else: return None @property def type(self) -> 'Optional[mypy.types.Type]': # IDEA: Get rid of the Any type. node = self.node # type: Any if self.type_override is not None: return self.type_override elif ((isinstance(node, Var) or isinstance(node, FuncDef)) and node.type is not None): return node.type elif isinstance(node, Decorator): return node.var.type else: return None def __str__(self) -> str: s = '{}/{}'.format(node_kinds[self.kind], short_type(self.node)) if isinstance(self.node, SymbolNode): s += ' ({})'.format(self.node.fullname()) # Include declared type of variables and functions. if self.type is not None: s += ' : {}'.format(self.type) return s def serialize(self, prefix: str, name: str) -> JsonDict: """Serialize a SymbolTableNode. Args: prefix: full name of the containing module or class; or None name: name of this object relative to the containing object """ data = {'.class': 'SymbolTableNode', 'kind': node_kinds[self.kind], } # type: JsonDict if self.module_hidden: data['module_hidden'] = True if not self.module_public: data['module_public'] = False if self.normalized: data['normalized'] = True if self.implicit: data['implicit'] = True if self.kind == MODULE_REF: assert self.node is not None, "Missing module cross ref in %s for %s" % (prefix, name) data['cross_ref'] = self.node.fullname() else: if self.node is not None: if prefix is not None: fullname = self.node.fullname() if (fullname is not None and '.' in fullname and fullname != prefix + '.' + name): data['cross_ref'] = fullname return data data['node'] = self.node.serialize() if self.type_override is not None: data['type_override'] = self.type_override.serialize() data['alias_tvars'] = self.alias_tvars return data @classmethod def deserialize(cls, data: JsonDict) -> 'SymbolTableNode': assert data['.class'] == 'SymbolTableNode' kind = inverse_node_kinds[data['kind']] if 'cross_ref' in data: # This will be fixed up later. stnode = SymbolTableNode(kind, None) stnode.cross_ref = data['cross_ref'] else: node = None if 'node' in data: node = SymbolNode.deserialize(data['node']) typ = None if 'type_override' in data: typ = mypy.types.deserialize_type(data['type_override']) stnode = SymbolTableNode(kind, node, typ=typ) if 'alias_tvars' in data: stnode.alias_tvars = data['alias_tvars'] if 'module_hidden' in data: stnode.module_hidden = data['module_hidden'] if 'module_public' in data: stnode.module_public = data['module_public'] if 'normalized' in data: stnode.normalized = data['normalized'] if 'implicit' in data: stnode.implicit = data['implicit'] return stnode class SymbolTable(Dict[str, SymbolTableNode]): def __str__(self) -> str: a = [] # type: List[str] for key, value in self.items(): # Filter out the implicit import of builtins. if isinstance(value, SymbolTableNode): if (value.fullname != 'builtins' and (value.fullname or '').split('.')[-1] not in implicit_module_attrs): a.append(' ' + str(key) + ' : ' + str(value)) else: a.append(' ') a = sorted(a) a.insert(0, 'SymbolTable(') a[-1] += ')' return '\n'.join(a) def serialize(self, fullname: str) -> JsonDict: data = {'.class': 'SymbolTable'} # type: JsonDict for key, value in self.items(): # Skip __builtins__: it's a reference to the builtins # module that gets added to every module by # SemanticAnalyzerPass2.visit_file(), but it shouldn't be # accessed by users of the module. if key == '__builtins__': continue data[key] = value.serialize(fullname, key) return data @classmethod def deserialize(cls, data: JsonDict) -> 'SymbolTable': assert data['.class'] == 'SymbolTable' st = SymbolTable() for key, value in data.items(): if key != '.class': st[key] = SymbolTableNode.deserialize(value) return st class MroError(Exception): """Raised if a consistent mro cannot be determined for a class.""" def linearize_hierarchy(info: TypeInfo) -> List[TypeInfo]: # TODO describe if info.mro: return info.mro bases = info.direct_base_classes() lin_bases = [] for base in bases: assert base is not None, "Cannot linearize bases for %s %s" % (info.fullname(), bases) lin_bases.append(linearize_hierarchy(base)) lin_bases.append(bases) return [info] + merge(lin_bases) def merge(seqs: List[List[TypeInfo]]) -> List[TypeInfo]: seqs = [s[:] for s in seqs] result = [] # type: List[TypeInfo] while True: seqs = [s for s in seqs if s] if not seqs: return result for seq in seqs: head = seq[0] if not [s for s in seqs if head in s[1:]]: break else: raise MroError() result.append(head) for s in seqs: if s[0] is head: del s[0] def get_flags(node: Node, names: List[str]) -> List[str]: return [name for name in names if getattr(node, name)] def set_flags(node: Node, flags: List[str]) -> None: for name in flags: setattr(node, name, True) def get_member_expr_fullname(expr: MemberExpr) -> Optional[str]: """Return the qualified name representation of a member expression. Return a string of form foo.bar, foo.bar.baz, or similar, or None if the argument cannot be represented in this form. """ initial = None # type: Optional[str] if isinstance(expr.expr, NameExpr): initial = expr.expr.name elif isinstance(expr.expr, MemberExpr): initial = get_member_expr_fullname(expr.expr) else: return None return '{}.{}'.format(initial, expr.name) deserialize_map = { key: obj.deserialize # type: ignore for key, obj in globals().items() if isinstance(obj, type) and issubclass(obj, SymbolNode) and obj is not SymbolNode } def check_arg_kinds(arg_kinds: List[int], nodes: List[T], fail: Callable[[str, T], None]) -> None: is_var_arg = False is_kw_arg = False seen_named = False seen_opt = False for kind, node in zip(arg_kinds, nodes): if kind == ARG_POS: if is_var_arg or is_kw_arg or seen_named or seen_opt: fail("Required positional args may not appear " "after default, named or var args", node) break elif kind == ARG_OPT: if is_var_arg or is_kw_arg or seen_named: fail("Positional default args may not appear after named or var args", node) break seen_opt = True elif kind == ARG_STAR: if is_var_arg or is_kw_arg or seen_named: fail("Var args may not appear after named or var args", node) break is_var_arg = True elif kind == ARG_NAMED or kind == ARG_NAMED_OPT: seen_named = True if is_kw_arg: fail("A **kwargs argument must be the last argument", node) break elif kind == ARG_STAR2: if is_kw_arg: fail("You may only have one **kwargs argument", node) break is_kw_arg = True def check_arg_names(names: Sequence[Optional[str]], nodes: List[T], fail: Callable[[str, T], None], description: str = 'function definition') -> None: seen_names = set() # type: Set[Optional[str]] for name, node in zip(names, nodes): if name is not None and name in seen_names: fail("Duplicate argument '{}' in {}".format(name, description), node) break seen_names.add(name) mypy-0.560/mypy/options.py0000644€tŠÔÚ€2›s®0000001634113215007205021737 0ustar jukkaDROPBOX\Domain Users00000000000000from collections import OrderedDict import fnmatch import pprint import sys from typing import Dict, List, Mapping, MutableMapping, Optional, Pattern, Set, Tuple from mypy import defaults class BuildType: STANDARD = 0 MODULE = 1 PROGRAM_TEXT = 2 class Options: """Options collected from flags.""" PER_MODULE_OPTIONS = { "ignore_missing_imports", "follow_imports", "disallow_any_generics", "disallow_any_unimported", "disallow_any_expr", "disallow_any_decorated", "disallow_any_explicit", "disallow_subclassing_any", "disallow_untyped_calls", "disallow_untyped_defs", "check_untyped_defs", "debug_cache", "strict_optional_whitelist", "show_none_errors", "warn_no_return", "warn_return_any", "ignore_errors", "strict_boolean", "no_implicit_optional", "strict_optional", "disallow_untyped_decorators", } OPTIONS_AFFECTING_CACHE = ((PER_MODULE_OPTIONS | {"quick_and_dirty", "platform"}) - {"debug_cache"}) def __init__(self) -> None: # Cache for clone_for_module() self.clone_cache = {} # type: Dict[str, Options] # -- build options -- self.build_type = BuildType.STANDARD self.python_version = defaults.PYTHON3_VERSION self.platform = sys.platform self.custom_typing_module = None # type: Optional[str] self.custom_typeshed_dir = None # type: Optional[str] self.mypy_path = [] # type: List[str] self.report_dirs = {} # type: Dict[str, str] self.ignore_missing_imports = False self.follow_imports = 'normal' # normal|silent|skip|error # disallow_any options self.disallow_any_generics = False self.disallow_any_unimported = False self.disallow_any_expr = False self.disallow_any_decorated = False self.disallow_any_explicit = False # Disallow calling untyped functions from typed ones self.disallow_untyped_calls = False # Disallow defining untyped (or incompletely typed) functions self.disallow_untyped_defs = False # Disallow defining incompletely typed functions self.disallow_incomplete_defs = False # Type check unannotated functions self.check_untyped_defs = False # Disallow decorating typed functions with untyped decorators self.disallow_untyped_decorators = False # Disallow subclassing values of type 'Any' self.disallow_subclassing_any = False # Also check typeshed for missing annotations self.warn_incomplete_stub = False # Warn about casting an expression to its inferred type self.warn_redundant_casts = False # Warn about falling off the end of a function returning non-None self.warn_no_return = True # Warn about returning objects of type Any when the function is # declared with a precise type self.warn_return_any = False # Warn about unused '# type: ignore' comments self.warn_unused_ignores = False # Warn about unused '[mypy-] config sections self.warn_unused_configs = False # Files in which to ignore all non-fatal errors self.ignore_errors = False # Only allow booleans in conditions self.strict_boolean = False # Apply strict None checking self.strict_optional = False # Show "note: In function "foo":" messages. self.show_error_context = False # Files in which to allow strict-Optional related errors # TODO: Kill this in favor of show_none_errors self.strict_optional_whitelist = None # type: Optional[List[str]] # Alternate way to show/hide strict-None-checking related errors self.show_none_errors = True # Don't assume arguments with default values of None are Optional self.no_implicit_optional = False # Use script name instead of __main__ self.scripts_are_modules = False # Config file name self.config_file = None # type: Optional[str] # Write junit.xml to given file self.junit_xml = None # type: Optional[str] # Caching options self.incremental = False self.cache_dir = defaults.CACHE_DIR self.debug_cache = False self.quick_and_dirty = False self.skip_version_check = False # Paths of user plugins self.plugins = [] # type: List[str] # Per-module options (raw) pm_opts = OrderedDict() # type: OrderedDict[Pattern[str], Dict[str, object]] self.per_module_options = pm_opts # Map pattern back to glob self.unused_configs = OrderedDict() # type: OrderedDict[Pattern[str], str] # -- development options -- self.verbosity = 0 # More verbose messages (for troubleshooting) self.pdb = False self.show_traceback = False self.dump_type_stats = False self.dump_inference_stats = False # -- test options -- # Stop after the semantic analysis phase self.semantic_analysis_only = False # Use stub builtins fixtures to speed up tests self.use_builtins_fixtures = False # -- experimental options -- self.shadow_file = None # type: Optional[Tuple[str, str]] self.show_column_numbers = False # type: bool self.dump_graph = False self.dump_deps = False def __eq__(self, other: object) -> bool: return self.__class__ == other.__class__ and self.__dict__ == other.__dict__ def __ne__(self, other: object) -> bool: return not self == other def __repr__(self) -> str: return 'Options({})'.format(pprint.pformat(self.__dict__)) def clone_for_module(self, module: str) -> 'Options': """Create an Options object that incorporates per-module options. NOTE: Once this method is called all Options objects should be considered read-only, else the caching might be incorrect. """ res = self.clone_cache.get(module) if res is not None: return res updates = {} for pattern in self.per_module_options: if self.module_matches_pattern(module, pattern): if pattern in self.unused_configs: del self.unused_configs[pattern] updates.update(self.per_module_options[pattern]) if not updates: self.clone_cache[module] = self return self new_options = Options() new_options.__dict__.update(self.__dict__) new_options.__dict__.update(updates) self.clone_cache[module] = new_options return new_options def module_matches_pattern(self, module: str, pattern: Pattern[str]) -> bool: # If the pattern is 'mod.*', we want 'mod' to match that too. # (That's so that a pattern specifying a package also matches # that package's __init__.) return pattern.match(module) is not None or pattern.match(module + '.') is not None def select_options_affecting_cache(self) -> Mapping[str, bool]: return {opt: getattr(self, opt) for opt in self.OPTIONS_AFFECTING_CACHE} mypy-0.560/mypy/parse.py0000644€tŠÔÚ€2›s®0000000241413215007205021352 0ustar jukkaDROPBOX\Domain Users00000000000000from typing import List, Tuple, Set, cast, Union, Optional from mypy.errors import Errors from mypy.options import Options from mypy.nodes import MypyFile def parse(source: Union[str, bytes], fnam: str, module: Optional[str], errors: Optional[Errors], options: Options) -> MypyFile: """Parse a source file, without doing any semantic analysis. Return the parse tree. If errors is not provided, raise ParseError on failure. Otherwise, use the errors object to report parse errors. The python_version (major, minor) option determines the Python syntax variant. """ is_stub_file = fnam.endswith('.pyi') if options.python_version[0] >= 3 or is_stub_file: import mypy.fastparse return mypy.fastparse.parse(source, fnam=fnam, module=module, errors=errors, options=options) else: import mypy.fastparse2 return mypy.fastparse2.parse(source, fnam=fnam, module=module, errors=errors, options=options) mypy-0.560/mypy/plugin.py0000644€tŠÔÚ€2›s®0000003203613215007206021542 0ustar jukkaDROPBOX\Domain Users00000000000000"""Plugin system for extending mypy.""" from collections import OrderedDict from abc import abstractmethod from typing import Callable, List, Tuple, Optional, NamedTuple, TypeVar from mypy.nodes import Expression, StrExpr, IntExpr, UnaryExpr, Context, DictExpr from mypy.types import ( Type, Instance, CallableType, TypedDictType, UnionType, NoneTyp, FunctionLike, TypeVarType, AnyType, TypeList, UnboundType, TypeOfAny ) from mypy.messages import MessageBuilder from mypy.options import Options class AnalyzerPluginInterface: """Interface for accessing semantic analyzer functionality in plugins.""" @abstractmethod def fail(self, msg: str, ctx: Context) -> None: raise NotImplementedError @abstractmethod def named_type(self, name: str, args: List[Type]) -> Instance: raise NotImplementedError @abstractmethod def analyze_type(self, typ: Type) -> Type: raise NotImplementedError @abstractmethod def analyze_callable_args(self, arglist: TypeList) -> Optional[Tuple[List[Type], List[int], List[Optional[str]]]]: raise NotImplementedError # A context for a hook that semantically analyzes an unbound type. AnalyzeTypeContext = NamedTuple( 'AnalyzeTypeContext', [ ('type', UnboundType), # Type to analyze ('context', Context), ('api', AnalyzerPluginInterface)]) class CheckerPluginInterface: """Interface for accessing type checker functionality in plugins.""" msg = None # type: MessageBuilder @abstractmethod def named_generic_type(self, name: str, args: List[Type]) -> Instance: raise NotImplementedError # A context for a function hook that infers the return type of a function with # a special signature. # # A no-op callback would just return the inferred return type, but a useful # callback at least sometimes can infer a more precise type. FunctionContext = NamedTuple( 'FunctionContext', [ ('arg_types', List[List[Type]]), # List of actual caller types for each formal argument ('default_return_type', Type), # Return type inferred from signature ('args', List[List[Expression]]), # Actual expressions for each formal argument ('context', Context), ('api', CheckerPluginInterface)]) # A context for a method signature hook that infers a better signature for a # method. Note that argument types aren't available yet. If you need them, # you have to use a method hook instead. MethodSigContext = NamedTuple( 'MethodSigContext', [ ('type', Type), # Base object type for method call ('args', List[List[Expression]]), # Actual expressions for each formal argument ('default_signature', CallableType), # Original signature of the method ('context', Context), ('api', CheckerPluginInterface)]) # A context for a method hook that infers the return type of a method with a # special signature. # # This is very similar to FunctionContext (only differences are documented). MethodContext = NamedTuple( 'MethodContext', [ ('type', Type), # Base object type for method call ('arg_types', List[List[Type]]), ('default_return_type', Type), ('args', List[List[Expression]]), ('context', Context), ('api', CheckerPluginInterface)]) # A context for an attribute type hook that infers the type of an attribute. AttributeContext = NamedTuple( 'AttributeContext', [ ('type', Type), # Type of object with attribute ('default_attr_type', Type), # Original attribute type ('context', Context), ('api', CheckerPluginInterface)]) class Plugin: """Base class of all type checker plugins. This defines a no-op plugin. Subclasses can override some methods to provide some actual functionality. All get_ methods are treated as pure functions (you should assume that results might be cached). Look at the comments of various *Context objects for descriptions of various hooks. """ def __init__(self, options: Options) -> None: self.options = options self.python_version = options.python_version def get_type_analyze_hook(self, fullname: str ) -> Optional[Callable[[AnalyzeTypeContext], Type]]: return None def get_function_hook(self, fullname: str ) -> Optional[Callable[[FunctionContext], Type]]: return None def get_method_signature_hook(self, fullname: str ) -> Optional[Callable[[MethodSigContext], CallableType]]: return None def get_method_hook(self, fullname: str ) -> Optional[Callable[[MethodContext], Type]]: return None def get_attribute_hook(self, fullname: str ) -> Optional[Callable[[AttributeContext], Type]]: return None # TODO: metaclass / class decorator hook T = TypeVar('T') class ChainedPlugin(Plugin): """A plugin that represents a sequence of chained plugins. Each lookup method returns the hook for the first plugin that reports a match. This class should not be subclassed -- use Plugin as the base class for all plugins. """ # TODO: Support caching of lookup results (through a LRU cache, for example). def __init__(self, options: Options, plugins: List[Plugin]) -> None: """Initialize chained plugin. Assume that the child plugins aren't mutated (results may be cached). """ super().__init__(options) self._plugins = plugins def get_type_analyze_hook(self, fullname: str ) -> Optional[Callable[[AnalyzeTypeContext], Type]]: return self._find_hook(lambda plugin: plugin.get_type_analyze_hook(fullname)) def get_function_hook(self, fullname: str ) -> Optional[Callable[[FunctionContext], Type]]: return self._find_hook(lambda plugin: plugin.get_function_hook(fullname)) def get_method_signature_hook(self, fullname: str ) -> Optional[Callable[[MethodSigContext], CallableType]]: return self._find_hook(lambda plugin: plugin.get_method_signature_hook(fullname)) def get_method_hook(self, fullname: str ) -> Optional[Callable[[MethodContext], Type]]: return self._find_hook(lambda plugin: plugin.get_method_hook(fullname)) def get_attribute_hook(self, fullname: str ) -> Optional[Callable[[AttributeContext], Type]]: return self._find_hook(lambda plugin: plugin.get_attribute_hook(fullname)) def _find_hook(self, lookup: Callable[[Plugin], T]) -> Optional[T]: for plugin in self._plugins: hook = lookup(plugin) if hook: return hook return None class DefaultPlugin(Plugin): """Type checker plugin that is enabled by default.""" def get_function_hook(self, fullname: str ) -> Optional[Callable[[FunctionContext], Type]]: if fullname == 'contextlib.contextmanager': return contextmanager_callback elif fullname == 'builtins.open' and self.python_version[0] == 3: return open_callback return None def get_method_signature_hook(self, fullname: str ) -> Optional[Callable[[MethodSigContext], CallableType]]: if fullname == 'typing.Mapping.get': return typed_dict_get_signature_callback return None def get_method_hook(self, fullname: str ) -> Optional[Callable[[MethodContext], Type]]: if fullname == 'typing.Mapping.get': return typed_dict_get_callback elif fullname == 'builtins.int.__pow__': return int_pow_callback return None def open_callback(ctx: FunctionContext) -> Type: """Infer a better return type for 'open'. Infer TextIO or BinaryIO as the return value if the mode argument is not given or is a literal. """ mode = None if not ctx.arg_types or len(ctx.arg_types[1]) != 1: mode = 'r' elif isinstance(ctx.args[1][0], StrExpr): mode = ctx.args[1][0].value if mode is not None: assert isinstance(ctx.default_return_type, Instance) if 'b' in mode: return ctx.api.named_generic_type('typing.BinaryIO', []) else: return ctx.api.named_generic_type('typing.TextIO', []) return ctx.default_return_type def contextmanager_callback(ctx: FunctionContext) -> Type: """Infer a better return type for 'contextlib.contextmanager'.""" # Be defensive, just in case. if ctx.arg_types and len(ctx.arg_types[0]) == 1: arg_type = ctx.arg_types[0][0] if (isinstance(arg_type, CallableType) and isinstance(ctx.default_return_type, CallableType)): # The stub signature doesn't preserve information about arguments so # add them back here. return ctx.default_return_type.copy_modified( arg_types=arg_type.arg_types, arg_kinds=arg_type.arg_kinds, arg_names=arg_type.arg_names, variables=arg_type.variables, is_ellipsis_args=arg_type.is_ellipsis_args) return ctx.default_return_type def typed_dict_get_signature_callback(ctx: MethodSigContext) -> CallableType: """Try to infer a better signature type for TypedDict.get. This is used to get better type context for the second argument that depends on a TypedDict value type. """ signature = ctx.default_signature if (isinstance(ctx.type, TypedDictType) and len(ctx.args) == 2 and len(ctx.args[0]) == 1 and isinstance(ctx.args[0][0], StrExpr) and len(signature.arg_types) == 2 and len(signature.variables) == 1 and len(ctx.args[1]) == 1): key = ctx.args[0][0].value value_type = ctx.type.items.get(key) ret_type = signature.ret_type if value_type: default_arg = ctx.args[1][0] if (isinstance(value_type, TypedDictType) and isinstance(default_arg, DictExpr) and len(default_arg.items) == 0): # Caller has empty dict {} as default for typed dict. value_type = value_type.copy_modified(required_keys=set()) # Tweak the signature to include the value type as context. It's # only needed for type inference since there's a union with a type # variable that accepts everything. tv = TypeVarType(signature.variables[0]) return signature.copy_modified( arg_types=[signature.arg_types[0], UnionType.make_simplified_union([value_type, tv])], ret_type=ret_type) return signature def typed_dict_get_callback(ctx: MethodContext) -> Type: """Infer a precise return type for TypedDict.get with literal first argument.""" if (isinstance(ctx.type, TypedDictType) and len(ctx.arg_types) >= 1 and len(ctx.arg_types[0]) == 1): if isinstance(ctx.args[0][0], StrExpr): key = ctx.args[0][0].value value_type = ctx.type.items.get(key) if value_type: if len(ctx.arg_types) == 1: return UnionType.make_simplified_union([value_type, NoneTyp()]) elif (len(ctx.arg_types) == 2 and len(ctx.arg_types[1]) == 1 and len(ctx.args[1]) == 1): default_arg = ctx.args[1][0] if (isinstance(default_arg, DictExpr) and len(default_arg.items) == 0 and isinstance(value_type, TypedDictType)): # Special case '{}' as the default for a typed dict type. return value_type.copy_modified(required_keys=set()) else: return UnionType.make_simplified_union([value_type, ctx.arg_types[1][0]]) else: ctx.api.msg.typeddict_key_not_found(ctx.type, key, ctx.context) return AnyType(TypeOfAny.from_error) return ctx.default_return_type def int_pow_callback(ctx: MethodContext) -> Type: """Infer a more precise return type for int.__pow__.""" if (len(ctx.arg_types) == 1 and len(ctx.arg_types[0]) == 1): arg = ctx.args[0][0] if isinstance(arg, IntExpr): exponent = arg.value elif isinstance(arg, UnaryExpr) and arg.op == '-' and isinstance(arg.expr, IntExpr): exponent = -arg.expr.value else: # Right operand not an int literal or a negated literal -- give up. return ctx.default_return_type if exponent >= 0: return ctx.api.named_generic_type('builtins.int', []) else: return ctx.api.named_generic_type('builtins.float', []) return ctx.default_return_type mypy-0.560/mypy/report.py0000644€tŠÔÚ€2›s®0000007201713215007205021561 0ustar jukkaDROPBOX\Domain Users00000000000000"""Classes for producing HTML reports about imprecision.""" from abc import ABCMeta, abstractmethod import collections import json import os import shutil import tokenize import typing from operator import attrgetter from urllib.request import pathname2url from typing import Any, Callable, Dict, List, Optional, Tuple, cast import time import sys import itertools from mypy.nodes import MypyFile, Expression, FuncDef from mypy import stats from mypy.options import Options from mypy.traverser import TraverserVisitor from mypy.types import Type, TypeOfAny from mypy.version import __version__ try: import lxml.etree as etree # type: ignore LXML_INSTALLED = True except ImportError: LXML_INSTALLED = False type_of_any_name_map = collections.OrderedDict([ (TypeOfAny.unannotated, "Unannotated"), (TypeOfAny.explicit, "Explicit"), (TypeOfAny.from_unimported_type, "Unimported"), (TypeOfAny.from_omitted_generics, "Omitted Generics"), (TypeOfAny.from_error, "Error"), (TypeOfAny.special_form, "Special Form"), ]) # type: collections.OrderedDict[TypeOfAny, str] reporter_classes = {} # type: Dict[str, Tuple[Callable[[Reports, str], AbstractReporter], bool]] class Reports: def __init__(self, data_dir: str, report_dirs: Dict[str, str]) -> None: self.data_dir = data_dir self.reporters = [] # type: List[AbstractReporter] self.named_reporters = {} # type: Dict[str, AbstractReporter] for report_type, report_dir in sorted(report_dirs.items()): self.add_report(report_type, report_dir) def add_report(self, report_type: str, report_dir: str) -> 'AbstractReporter': try: return self.named_reporters[report_type] except KeyError: pass reporter_cls, needs_lxml = reporter_classes[report_type] if needs_lxml and not LXML_INSTALLED: print(('You must install the lxml package before you can run mypy' ' with `--{}-report`.\n' 'You can do this with `python3 -m pip install lxml`.').format(report_type), file=sys.stderr) raise ImportError reporter = reporter_cls(self, report_dir) self.reporters.append(reporter) self.named_reporters[report_type] = reporter return reporter def file(self, tree: MypyFile, type_map: Dict[Expression, Type], options: Options) -> None: for reporter in self.reporters: reporter.on_file(tree, type_map, options) def finish(self) -> None: for reporter in self.reporters: reporter.on_finish() class AbstractReporter(metaclass=ABCMeta): def __init__(self, reports: Reports, output_dir: str) -> None: self.output_dir = output_dir @abstractmethod def on_file(self, tree: MypyFile, type_map: Dict[Expression, Type], options: Options) -> None: pass @abstractmethod def on_finish(self) -> None: pass def register_reporter(report_name: str, reporter: Callable[[Reports, str], AbstractReporter], needs_lxml: bool = False) -> None: reporter_classes[report_name] = (reporter, needs_lxml) def alias_reporter(source_reporter: str, target_reporter: str) -> None: reporter_classes[target_reporter] = reporter_classes[source_reporter] class FuncCounterVisitor(TraverserVisitor): def __init__(self) -> None: super().__init__() self.counts = [0, 0] def visit_func_def(self, defn: FuncDef) -> None: self.counts[defn.type is not None] += 1 class LineCountReporter(AbstractReporter): def __init__(self, reports: Reports, output_dir: str) -> None: super().__init__(reports, output_dir) self.counts = {} # type: Dict[str, Tuple[int, int, int, int]] stats.ensure_dir_exists(output_dir) def on_file(self, tree: MypyFile, type_map: Dict[Expression, Type], options: Options) -> None: # Count physical lines. This assumes the file's encoding is a # superset of ASCII (or at least uses \n in its line endings). with open(tree.path, 'rb') as f: physical_lines = len(f.readlines()) func_counter = FuncCounterVisitor() tree.accept(func_counter) unannotated_funcs, annotated_funcs = func_counter.counts total_funcs = annotated_funcs + unannotated_funcs # Don't count lines or functions as annotated if they have their errors ignored. if options.ignore_errors: annotated_funcs = 0 imputed_annotated_lines = (physical_lines * annotated_funcs // total_funcs if total_funcs else physical_lines) self.counts[tree._fullname] = (imputed_annotated_lines, physical_lines, annotated_funcs, total_funcs) def on_finish(self) -> None: counts = sorted(((c, p) for p, c in self.counts.items()), reverse=True) # type: List[Tuple[Tuple[int, int, int, int], str]] total_counts = tuple(sum(c[i] for c, p in counts) for i in range(4)) with open(os.path.join(self.output_dir, 'linecount.txt'), 'w') as f: f.write('{:7} {:7} {:6} {:6} total\n'.format(*total_counts)) for c, p in counts: f.write('{:7} {:7} {:6} {:6} {}\n'.format( c[0], c[1], c[2], c[3], p)) register_reporter('linecount', LineCountReporter) class AnyExpressionsReporter(AbstractReporter): def __init__(self, reports: Reports, output_dir: str) -> None: super().__init__(reports, output_dir) self.counts = {} # type: Dict[str, Tuple[int, int]] self.any_types_counter = {} # type: Dict[str, typing.Counter[TypeOfAny]] stats.ensure_dir_exists(output_dir) def on_file(self, tree: MypyFile, type_map: Dict[Expression, Type], options: Options) -> None: visitor = stats.StatisticsVisitor(inferred=True, filename=tree.fullname(), typemap=type_map, all_nodes=True, visit_untyped_defs=False) tree.accept(visitor) self.any_types_counter[tree.fullname()] = visitor.type_of_any_counter num_unanalyzed_lines = list(visitor.line_map.values()).count(stats.TYPE_UNANALYZED) # count each line of dead code as one expression of type "Any" num_any = visitor.num_any_exprs + num_unanalyzed_lines num_total = visitor.num_imprecise_exprs + visitor.num_precise_exprs + num_any if num_total > 0: self.counts[tree.fullname()] = (num_any, num_total) def on_finish(self) -> None: self._report_any_exprs() self._report_types_of_anys() def _write_out_report(self, filename: str, header: List[str], rows: List[List[str]], footer: List[str], ) -> None: row_len = len(header) assert all(len(row) == row_len for row in rows + [header, footer]) min_column_distance = 3 # minimum distance between numbers in two columns widths = [-1] * row_len for row in rows + [header, footer]: for i, value in enumerate(row): widths[i] = max(widths[i], len(value)) for i, w in enumerate(widths): # Do not add min_column_distance to the first column. if i > 0: widths[i] = w + min_column_distance with open(os.path.join(self.output_dir, filename), 'w') as f: header_str = ("{:>{}}" * len(widths)).format(*itertools.chain(*zip(header, widths))) separator = '-' * len(header_str) f.write(header_str + '\n') f.write(separator + '\n') for row_values in rows: r = ("{:>{}}" * len(widths)).format(*itertools.chain(*zip(row_values, widths))) f.writelines(r + '\n') f.write(separator + '\n') footer_str = ("{:>{}}" * len(widths)).format(*itertools.chain(*zip(footer, widths))) f.writelines(footer_str + '\n') def _report_any_exprs(self) -> None: total_any = sum(num_any for num_any, _ in self.counts.values()) total_expr = sum(total for _, total in self.counts.values()) total_coverage = 100.0 if total_expr > 0: total_coverage = (float(total_expr - total_any) / float(total_expr)) * 100 column_names = ["Name", "Anys", "Exprs", "Coverage"] rows = [] # type: List[List[str]] for filename in sorted(self.counts): (num_any, num_total) = self.counts[filename] coverage = (float(num_total - num_any) / float(num_total)) * 100 coverage_str = '{:.2f}%'.format(coverage) rows.append([filename, str(num_any), str(num_total), coverage_str]) total_row = ["Total", str(total_any), str(total_expr), '{:.2f}%'.format(total_coverage)] self._write_out_report('any-exprs.txt', column_names, rows, total_row) def _report_types_of_anys(self) -> None: total_counter = collections.Counter() # type: typing.Counter[TypeOfAny] for counter in self.any_types_counter.values(): for any_type, value in counter.items(): total_counter[any_type] += value file_column_name = "Name" total_row_name = "Total" column_names = [file_column_name] + list(type_of_any_name_map.values()) rows = [] # type: List[List[str]] for filename, counter in self.any_types_counter.items(): rows.append([filename] + [str(counter[typ]) for typ in type_of_any_name_map]) total_row = [total_row_name] + [str(total_counter[typ]) for typ in type_of_any_name_map] self._write_out_report('types-of-anys.txt', column_names, rows, total_row) register_reporter('any-exprs', AnyExpressionsReporter) class LineCoverageVisitor(TraverserVisitor): def __init__(self, source: List[str]) -> None: self.source = source # For each line of source, we maintain a pair of # * the indentation level of the surrounding function # (-1 if not inside a function), and # * whether the surrounding function is typed. # Initially, everything is covered at indentation level -1. self.lines_covered = [(-1, True) for l in source] # The Python AST has position information for the starts of # elements, but not for their ends. Fortunately the # indentation-based syntax makes it pretty easy to find where a # block ends without doing any real parsing. # TODO: Handle line continuations (explicit and implicit) and # multi-line string literals. (But at least line continuations # are normally more indented than their surrounding block anyways, # by PEP 8.) def indentation_level(self, line_number: int) -> Optional[int]: """Return the indentation of a line of the source (specified by zero-indexed line number). Returns None for blank lines or comments.""" line = self.source[line_number] indent = 0 for char in list(line): if char == ' ': indent += 1 elif char == '\t': indent = 8 * ((indent + 8) // 8) elif char == '#': # Line is a comment; ignore it return None elif char == '\n': # Line is entirely whitespace; ignore it return None # TODO line continuation (\) else: # Found a non-whitespace character return indent # Line is entirely whitespace, and at end of file # with no trailing newline; ignore it return None def visit_func_def(self, defn: FuncDef) -> None: start_line = defn.get_line() - 1 start_indent = self.indentation_level(start_line) cur_line = start_line + 1 end_line = cur_line # After this loop, function body will be lines [start_line, end_line) while cur_line < len(self.source): cur_indent = self.indentation_level(cur_line) if cur_indent is None: # Consume the line, but don't mark it as belonging to the function yet. cur_line += 1 elif start_indent is not None and cur_indent > start_indent: # A non-blank line that belongs to the function. cur_line += 1 end_line = cur_line else: # We reached a line outside the function definition. break is_typed = defn.type is not None for line in range(start_line, end_line): old_indent, _ = self.lines_covered[line] assert start_indent is not None and start_indent > old_indent self.lines_covered[line] = (start_indent, is_typed) # Visit the body, in case there are nested functions super().visit_func_def(defn) class LineCoverageReporter(AbstractReporter): """Exact line coverage reporter. This reporter writes a JSON dictionary with one field 'lines' to the file 'coverage.json' in the specified report directory. The value of that field is a dictionary which associates to each source file's absolute pathname the list of line numbers that belong to typed functions in that file. """ def __init__(self, reports: Reports, output_dir: str) -> None: super().__init__(reports, output_dir) self.lines_covered = {} # type: Dict[str, List[int]] stats.ensure_dir_exists(output_dir) def on_file(self, tree: MypyFile, type_map: Dict[Expression, Type], options: Options) -> None: with open(tree.path) as f: tree_source = f.readlines() coverage_visitor = LineCoverageVisitor(tree_source) tree.accept(coverage_visitor) covered_lines = [] for line_number, (_, typed) in enumerate(coverage_visitor.lines_covered): if typed: covered_lines.append(line_number + 1) self.lines_covered[os.path.abspath(tree.path)] = covered_lines def on_finish(self) -> None: with open(os.path.join(self.output_dir, 'coverage.json'), 'w') as f: json.dump({'lines': self.lines_covered}, f) register_reporter('linecoverage', LineCoverageReporter) class FileInfo: def __init__(self, name: str, module: str) -> None: self.name = name self.module = module self.counts = [0] * len(stats.precision_names) def total(self) -> int: return sum(self.counts) def attrib(self) -> Dict[str, str]: return {name: str(val) for name, val in zip(stats.precision_names, self.counts)} class MemoryXmlReporter(AbstractReporter): """Internal reporter that generates XML in memory. This is used by all other XML-based reporters to avoid duplication. """ def __init__(self, reports: Reports, output_dir: str) -> None: super().__init__(reports, output_dir) self.xslt_html_path = os.path.join(reports.data_dir, 'xml', 'mypy-html.xslt') self.xslt_txt_path = os.path.join(reports.data_dir, 'xml', 'mypy-txt.xslt') self.css_html_path = os.path.join(reports.data_dir, 'xml', 'mypy-html.css') xsd_path = os.path.join(reports.data_dir, 'xml', 'mypy.xsd') self.schema = etree.XMLSchema(etree.parse(xsd_path)) self.last_xml = None # type: Optional[Any] self.files = [] # type: List[FileInfo] def on_file(self, tree: MypyFile, type_map: Dict[Expression, Type], options: Options) -> None: self.last_xml = None path = os.path.relpath(tree.path) if stats.is_special_module(path): return if path.startswith('..'): return if 'stubs' in path.split('/'): return visitor = stats.StatisticsVisitor(inferred=True, filename=tree.fullname(), typemap=type_map, all_nodes=True) tree.accept(visitor) root = etree.Element('mypy-report-file', name=path, module=tree._fullname) doc = etree.ElementTree(root) file_info = FileInfo(path, tree._fullname) with tokenize.open(path) as input_file: for lineno, line_text in enumerate(input_file, 1): status = visitor.line_map.get(lineno, stats.TYPE_EMPTY) file_info.counts[status] += 1 etree.SubElement(root, 'line', number=str(lineno), precision=stats.precision_names[status], content=line_text.rstrip('\n'), any_info=self._get_any_info_for_line(visitor, lineno)) # Assumes a layout similar to what XmlReporter uses. xslt_path = os.path.relpath('mypy-html.xslt', path) transform_pi = etree.ProcessingInstruction('xml-stylesheet', 'type="text/xsl" href="%s"' % pathname2url(xslt_path)) root.addprevious(transform_pi) self.schema.assertValid(doc) self.last_xml = doc self.files.append(file_info) @staticmethod def _get_any_info_for_line(visitor: stats.StatisticsVisitor, lineno: int) -> str: if lineno in visitor.any_line_map: result = "Any Types on this line: " counter = collections.Counter() # type: typing.Counter[TypeOfAny] for typ in visitor.any_line_map[lineno]: counter[typ.type_of_any] += 1 for any_type, occurrences in counter.items(): result += "\n{} (x{})".format(type_of_any_name_map[any_type], occurrences) return result else: return "No Anys on this line!" def on_finish(self) -> None: self.last_xml = None # index_path = os.path.join(self.output_dir, 'index.xml') output_files = sorted(self.files, key=lambda x: x.module) root = etree.Element('mypy-report-index', name='index') doc = etree.ElementTree(root) for file_info in output_files: etree.SubElement(root, 'file', file_info.attrib(), total=str(file_info.total()), name=file_info.name, module=file_info.module) xslt_path = os.path.relpath('mypy-html.xslt', '.') transform_pi = etree.ProcessingInstruction('xml-stylesheet', 'type="text/xsl" href="%s"' % pathname2url(xslt_path)) root.addprevious(transform_pi) self.schema.assertValid(doc) self.last_xml = doc register_reporter('memory-xml', MemoryXmlReporter, needs_lxml=True) def get_line_rate(covered_lines: int, total_lines: int) -> str: if total_lines == 0: return str(1.0) else: return '{:.4f}'.format(covered_lines / total_lines) class CoberturaPackage(object): """Container for XML and statistics mapping python modules to Cobertura package """ def __init__(self, name: str) -> None: self.name = name self.classes = {} # type: Dict[str, Any] self.packages = {} # type: Dict[str, CoberturaPackage] self.total_lines = 0 self.covered_lines = 0 def as_xml(self) -> Any: package_element = etree.Element('package', name=self.name, complexity='1.0') package_element.attrib['branch-rate'] = '0' package_element.attrib['line-rate'] = get_line_rate(self.covered_lines, self.total_lines) classes_element = etree.SubElement(package_element, 'classes') for class_name in sorted(self.classes): classes_element.append(self.classes[class_name]) self.add_packages(package_element) return package_element def add_packages(self, parent_element: Any) -> None: if self.packages: packages_element = etree.SubElement(parent_element, 'packages') for package in sorted(self.packages.values(), key=attrgetter('name')): packages_element.append(package.as_xml()) class CoberturaXmlReporter(AbstractReporter): """Reporter for generating Cobertura compliant XML. """ def __init__(self, reports: Reports, output_dir: str) -> None: super().__init__(reports, output_dir) self.root = etree.Element('coverage', timestamp=str(int(time.time())), version=__version__) self.doc = etree.ElementTree(self.root) self.root_package = CoberturaPackage('.') def on_file(self, tree: MypyFile, type_map: Dict[Expression, Type], options: Options) -> None: path = os.path.relpath(tree.path) visitor = stats.StatisticsVisitor(inferred=True, filename=tree.fullname(), typemap=type_map, all_nodes=True) tree.accept(visitor) class_name = os.path.basename(path) file_info = FileInfo(path, tree._fullname) class_element = etree.Element('class', filename=path, complexity='1.0', name=class_name) etree.SubElement(class_element, 'methods') lines_element = etree.SubElement(class_element, 'lines') with tokenize.open(path) as input_file: class_lines_covered = 0 class_total_lines = 0 for lineno, _ in enumerate(input_file, 1): status = visitor.line_map.get(lineno, stats.TYPE_EMPTY) hits = 0 branch = False if status == stats.TYPE_EMPTY: continue class_total_lines += 1 if status != stats.TYPE_ANY: class_lines_covered += 1 hits = 1 if status == stats.TYPE_IMPRECISE: branch = True file_info.counts[status] += 1 line_element = etree.SubElement(lines_element, 'line', number=str(lineno), precision=stats.precision_names[status], hits=str(hits), branch=str(branch).lower()) if branch: line_element.attrib['condition-coverage'] = '50% (1/2)' class_element.attrib['branch-rate'] = '0' class_element.attrib['line-rate'] = get_line_rate(class_lines_covered, class_total_lines) # parent_module is set to whichever module contains this file. For most files, we want # to simply strip the last element off of the module. But for __init__.py files, # the module == the parent module. parent_module = file_info.module.rsplit('.', 1)[0] if file_info.name.endswith('__init__.py'): parent_module = file_info.module if parent_module not in self.root_package.packages: self.root_package.packages[parent_module] = CoberturaPackage(parent_module) current_package = self.root_package.packages[parent_module] packages_to_update = [self.root_package, current_package] for package in packages_to_update: package.total_lines += class_total_lines package.covered_lines += class_lines_covered current_package.classes[class_name] = class_element def on_finish(self) -> None: self.root.attrib['line-rate'] = get_line_rate(self.root_package.covered_lines, self.root_package.total_lines) self.root.attrib['branch-rate'] = '0' sources = etree.SubElement(self.root, 'sources') source_element = etree.SubElement(sources, 'source') source_element.text = os.getcwd() self.root_package.add_packages(self.root) out_path = os.path.join(self.output_dir, 'cobertura.xml') self.doc.write(out_path, encoding='utf-8', pretty_print=True) print('Generated Cobertura report:', os.path.abspath(out_path)) register_reporter('cobertura-xml', CoberturaXmlReporter, needs_lxml=True) class AbstractXmlReporter(AbstractReporter): """Internal abstract class for reporters that work via XML.""" def __init__(self, reports: Reports, output_dir: str) -> None: super().__init__(reports, output_dir) memory_reporter = reports.add_report('memory-xml', '') # The dependency will be called first. self.memory_xml = cast(MemoryXmlReporter, memory_reporter) class XmlReporter(AbstractXmlReporter): """Public reporter that exports XML. The produced XML files contain a reference to the absolute path of the html transform, so they will be locally viewable in a browser. However, there is a bug in Chrome and all other WebKit-based browsers that makes it fail from file:// URLs but work on http:// URLs. """ def on_file(self, tree: MypyFile, type_map: Dict[Expression, Type], options: Options) -> None: last_xml = self.memory_xml.last_xml if last_xml is None: return path = os.path.relpath(tree.path) if path.startswith('..'): return out_path = os.path.join(self.output_dir, 'xml', path + '.xml') stats.ensure_dir_exists(os.path.dirname(out_path)) last_xml.write(out_path, encoding='utf-8') def on_finish(self) -> None: last_xml = self.memory_xml.last_xml assert last_xml is not None out_path = os.path.join(self.output_dir, 'index.xml') out_xslt = os.path.join(self.output_dir, 'mypy-html.xslt') out_css = os.path.join(self.output_dir, 'mypy-html.css') last_xml.write(out_path, encoding='utf-8') shutil.copyfile(self.memory_xml.xslt_html_path, out_xslt) shutil.copyfile(self.memory_xml.css_html_path, out_css) print('Generated XML report:', os.path.abspath(out_path)) register_reporter('xml', XmlReporter, needs_lxml=True) class XsltHtmlReporter(AbstractXmlReporter): """Public reporter that exports HTML via XSLT. This is slightly different than running `xsltproc` on the .xml files, because it passes a parameter to rewrite the links. """ def __init__(self, reports: Reports, output_dir: str) -> None: super().__init__(reports, output_dir) self.xslt_html = etree.XSLT(etree.parse(self.memory_xml.xslt_html_path)) self.param_html = etree.XSLT.strparam('html') def on_file(self, tree: MypyFile, type_map: Dict[Expression, Type], options: Options) -> None: last_xml = self.memory_xml.last_xml if last_xml is None: return path = os.path.relpath(tree.path) if path.startswith('..'): return out_path = os.path.join(self.output_dir, 'html', path + '.html') stats.ensure_dir_exists(os.path.dirname(out_path)) transformed_html = bytes(self.xslt_html(last_xml, ext=self.param_html)) with open(out_path, 'wb') as out_file: out_file.write(transformed_html) def on_finish(self) -> None: last_xml = self.memory_xml.last_xml assert last_xml is not None out_path = os.path.join(self.output_dir, 'index.html') out_css = os.path.join(self.output_dir, 'mypy-html.css') transformed_html = bytes(self.xslt_html(last_xml, ext=self.param_html)) with open(out_path, 'wb') as out_file: out_file.write(transformed_html) shutil.copyfile(self.memory_xml.css_html_path, out_css) print('Generated HTML report (via XSLT):', os.path.abspath(out_path)) register_reporter('xslt-html', XsltHtmlReporter, needs_lxml=True) class XsltTxtReporter(AbstractXmlReporter): """Public reporter that exports TXT via XSLT. Currently this only does the summary, not the individual reports. """ def __init__(self, reports: Reports, output_dir: str) -> None: super().__init__(reports, output_dir) self.xslt_txt = etree.XSLT(etree.parse(self.memory_xml.xslt_txt_path)) def on_file(self, tree: MypyFile, type_map: Dict[Expression, Type], options: Options) -> None: pass def on_finish(self) -> None: last_xml = self.memory_xml.last_xml assert last_xml is not None out_path = os.path.join(self.output_dir, 'index.txt') stats.ensure_dir_exists(os.path.dirname(out_path)) transformed_txt = bytes(self.xslt_txt(last_xml)) with open(out_path, 'wb') as out_file: out_file.write(transformed_txt) print('Generated TXT report (via XSLT):', os.path.abspath(out_path)) register_reporter('xslt-txt', XsltTxtReporter, needs_lxml=True) alias_reporter('xslt-html', 'html') alias_reporter('xslt-txt', 'txt') mypy-0.560/mypy/sametypes.py0000644€tŠÔÚ€2›s®0000001265613215007205022263 0ustar jukkaDROPBOX\Domain Users00000000000000from typing import Sequence from mypy.types import ( Type, UnboundType, AnyType, NoneTyp, TupleType, TypedDictType, UnionType, CallableType, TypeVarType, Instance, TypeVisitor, ErasedType, TypeList, Overloaded, PartialType, DeletedType, UninhabitedType, TypeType ) def is_same_type(left: Type, right: Type) -> bool: """Is 'left' the same type as 'right'?""" if isinstance(right, UnboundType): # Make unbound types same as anything else to reduce the number of # generated spurious error messages. return True else: # Simplify types to canonical forms. # # There are multiple possible union types that represent the same type, # such as Union[int, bool, str] and Union[int, str]. Also, some union # types can be simplified to non-union types such as Union[int, bool] # -> int. It would be nice if we always had simplified union types but # this is currently not the case, though it often is. left = simplify_union(left) right = simplify_union(right) return left.accept(SameTypeVisitor(right)) def simplify_union(t: Type) -> Type: if isinstance(t, UnionType): return UnionType.make_simplified_union(t.items) return t def is_same_types(a1: Sequence[Type], a2: Sequence[Type]) -> bool: if len(a1) != len(a2): return False for i in range(len(a1)): if not is_same_type(a1[i], a2[i]): return False return True class SameTypeVisitor(TypeVisitor[bool]): """Visitor for checking whether two types are the 'same' type.""" def __init__(self, right: Type) -> None: self.right = right # visit_x(left) means: is left (which is an instance of X) the same type as # right? def visit_unbound_type(self, left: UnboundType) -> bool: return True def visit_any(self, left: AnyType) -> bool: return isinstance(self.right, AnyType) def visit_none_type(self, left: NoneTyp) -> bool: return isinstance(self.right, NoneTyp) def visit_uninhabited_type(self, t: UninhabitedType) -> bool: return isinstance(self.right, UninhabitedType) def visit_erased_type(self, left: ErasedType) -> bool: # We can get here when isinstance is used inside a lambda # whose type is being inferred. In any event, we have no reason # to think that an ErasedType will end up being the same as # any other type, except another ErasedType (for protocols). return isinstance(self.right, ErasedType) def visit_deleted_type(self, left: DeletedType) -> bool: return isinstance(self.right, DeletedType) def visit_instance(self, left: Instance) -> bool: return (isinstance(self.right, Instance) and left.type == self.right.type and is_same_types(left.args, self.right.args)) def visit_type_var(self, left: TypeVarType) -> bool: return (isinstance(self.right, TypeVarType) and left.id == self.right.id) def visit_callable_type(self, left: CallableType) -> bool: # FIX generics if isinstance(self.right, CallableType): cright = self.right return (is_same_type(left.ret_type, cright.ret_type) and is_same_types(left.arg_types, cright.arg_types) and left.arg_names == cright.arg_names and left.arg_kinds == cright.arg_kinds and left.is_type_obj() == cright.is_type_obj() and left.is_ellipsis_args == cright.is_ellipsis_args) else: return False def visit_tuple_type(self, left: TupleType) -> bool: if isinstance(self.right, TupleType): return is_same_types(left.items, self.right.items) else: return False def visit_typeddict_type(self, left: TypedDictType) -> bool: if isinstance(self.right, TypedDictType): if left.items.keys() != self.right.items.keys(): return False for (_, left_item_type, right_item_type) in left.zip(self.right): if not is_same_type(left_item_type, right_item_type): return False return True else: return False def visit_union_type(self, left: UnionType) -> bool: if isinstance(self.right, UnionType): # Check that everything in left is in right for left_item in left.items: if not any(is_same_type(left_item, right_item) for right_item in self.right.items): return False # Check that everything in right is in left for right_item in self.right.items: if not any(is_same_type(right_item, left_item) for left_item in left.items): return False return True else: return False def visit_overloaded(self, left: Overloaded) -> bool: if isinstance(self.right, Overloaded): return is_same_types(left.items(), self.right.items()) else: return False def visit_partial_type(self, left: PartialType) -> bool: # A partial type is not fully defined, so the result is indeterminate. We shouldn't # get here. raise RuntimeError def visit_type_type(self, left: TypeType) -> bool: if isinstance(self.right, TypeType): return is_same_type(left.item, self.right.item) else: return False mypy-0.560/mypy/semanal.py0000644€tŠÔÚ€2›s®0000057022013215007206021666 0ustar jukkaDROPBOX\Domain Users00000000000000"""The semantic analyzer passes 1 and 2. Bind names to definitions and do various other simple consistency checks. For example, consider this program: x = 1 y = x Here semantic analysis would detect that the assignment 'x = 1' defines a new variable, the type of which is to be inferred (in a later pass; type inference or type checking is not part of semantic analysis). Also, it would bind both references to 'x' to the same module-level variable (Var) node. The second assignment would also be analyzed, and the type of 'y' marked as being inferred. Semantic analysis is the first analysis pass after parsing, and it is subdivided into three passes: * SemanticAnalyzerPass1 is defined in mypy.semanal_pass1. * SemanticAnalyzerPass2 is the second pass. It does the bulk of the work. It assumes that dependent modules have been semantically analyzed, up to the second pass, unless there is a import cycle. * SemanticAnalyzerPass3 is the third pass. It's in mypy.semanal_pass3. Semantic analysis of types is implemented in module mypy.typeanal. TODO: Check if the third pass slows down type checking significantly. We could probably get rid of it -- for example, we could collect all analyzed types in a collection and check them without having to traverse the entire AST. """ from collections import OrderedDict from contextlib import contextmanager from typing import ( List, Dict, Set, Tuple, cast, TypeVar, Union, Optional, Callable, Iterator, Iterable ) from mypy.nodes import ( MypyFile, TypeInfo, Node, AssignmentStmt, FuncDef, OverloadedFuncDef, ClassDef, Var, GDEF, MODULE_REF, FuncItem, Import, Expression, Lvalue, ImportFrom, ImportAll, Block, LDEF, NameExpr, MemberExpr, IndexExpr, TupleExpr, ListExpr, ExpressionStmt, ReturnStmt, RaiseStmt, AssertStmt, OperatorAssignmentStmt, WhileStmt, ForStmt, BreakStmt, ContinueStmt, IfStmt, TryStmt, WithStmt, DelStmt, PassStmt, GlobalDecl, SuperExpr, DictExpr, CallExpr, RefExpr, OpExpr, UnaryExpr, SliceExpr, CastExpr, RevealTypeExpr, TypeApplication, Context, SymbolTable, SymbolTableNode, TVAR, ListComprehension, GeneratorExpr, LambdaExpr, MDEF, FuncBase, Decorator, SetExpr, TypeVarExpr, NewTypeExpr, StrExpr, BytesExpr, PrintStmt, ConditionalExpr, PromoteExpr, ComparisonExpr, StarExpr, ARG_POS, ARG_NAMED, ARG_NAMED_OPT, MroError, type_aliases, YieldFromExpr, NamedTupleExpr, TypedDictExpr, NonlocalDecl, SymbolNode, SetComprehension, DictionaryComprehension, TYPE_ALIAS, TypeAliasExpr, YieldExpr, ExecStmt, Argument, BackquoteExpr, ImportBase, AwaitExpr, IntExpr, FloatExpr, UnicodeExpr, EllipsisExpr, TempNode, EnumCallExpr, COVARIANT, CONTRAVARIANT, INVARIANT, UNBOUND_IMPORTED, LITERAL_YES, ARG_OPT, nongen_builtins, collections_type_aliases, get_member_expr_fullname, ) from mypy.literals import literal from mypy.tvar_scope import TypeVarScope from mypy.typevars import fill_typevars from mypy.visitor import NodeVisitor from mypy.traverser import TraverserVisitor from mypy.errors import Errors, report_internal_error from mypy.messages import CANNOT_ASSIGN_TO_TYPE, MessageBuilder from mypy.types import ( FunctionLike, UnboundType, TypeVarDef, TypeType, TupleType, UnionType, StarType, function_type, TypedDictType, NoneTyp, CallableType, Overloaded, Instance, Type, TypeVarType, AnyType, TypeTranslator, TypeOfAny, TypeVisitor, UninhabitedType, ErasedType, DeletedType ) from mypy.nodes import implicit_module_attrs from mypy.typeanal import ( TypeAnalyser, analyze_type_alias, no_subscript_builtin_alias, TypeVariableQuery, TypeVarList, remove_dups, has_any_from_unimported_type, check_for_explicit_any ) from mypy.exprtotype import expr_to_unanalyzed_type, TypeTranslationError from mypy.sametypes import is_same_type from mypy.options import Options from mypy import experiments from mypy.plugin import Plugin from mypy import join from mypy.util import get_prefix T = TypeVar('T') # Inferred truth value of an expression. ALWAYS_TRUE = 1 MYPY_TRUE = 2 # True in mypy, False at runtime ALWAYS_FALSE = 3 MYPY_FALSE = 4 # False in mypy, True at runtime TRUTH_VALUE_UNKNOWN = 5 inverted_truth_mapping = { ALWAYS_TRUE: ALWAYS_FALSE, ALWAYS_FALSE: ALWAYS_TRUE, TRUTH_VALUE_UNKNOWN: TRUTH_VALUE_UNKNOWN, MYPY_TRUE: MYPY_FALSE, MYPY_FALSE: MYPY_TRUE, } # Map from obsolete name to the current spelling. obsolete_name_mapping = { 'typing.Function': 'typing.Callable', 'typing.typevar': 'typing.TypeVar', } # Hard coded type promotions (shared between all Python versions). # These add extra ad-hoc edges to the subtyping relation. For example, # int is considered a subtype of float, even though there is no # subclass relationship. TYPE_PROMOTIONS = { 'builtins.int': 'builtins.float', 'builtins.float': 'builtins.complex', } # Hard coded type promotions for Python 3. # # Note that the bytearray -> bytes promotion is a little unsafe # as some functions only accept bytes objects. Here convenience # trumps safety. TYPE_PROMOTIONS_PYTHON3 = TYPE_PROMOTIONS.copy() TYPE_PROMOTIONS_PYTHON3.update({ 'builtins.bytearray': 'builtins.bytes', }) # Hard coded type promotions for Python 2. # # These promotions are unsafe, but we are doing them anyway # for convenience and also for Python 3 compatibility # (bytearray -> str). TYPE_PROMOTIONS_PYTHON2 = TYPE_PROMOTIONS.copy() TYPE_PROMOTIONS_PYTHON2.update({ 'builtins.str': 'builtins.unicode', 'builtins.bytearray': 'builtins.str', }) # When analyzing a function, should we analyze the whole function in one go, or # should we only perform one phase of the analysis? The latter is used for # nested functions. In the first phase we add the function to the symbol table # but don't process body. In the second phase we process function body. This # way we can have mutually recursive nested functions. FUNCTION_BOTH_PHASES = 0 # Everything in one go FUNCTION_FIRST_PHASE_POSTPONE_SECOND = 1 # Add to symbol table but postpone body FUNCTION_SECOND_PHASE = 2 # Only analyze body # Matches "_prohibited" in typing.py, but adds __annotations__, which works at runtime but can't # easily be supported in a static checker. NAMEDTUPLE_PROHIBITED_NAMES = ('__new__', '__init__', '__slots__', '__getnewargs__', '_fields', '_field_defaults', '_field_types', '_make', '_replace', '_asdict', '_source', '__annotations__') # Map from the full name of a missing definition to the test fixture (under # test-data/unit/fixtures/) that provides the definition. This is used for # generating better error messages when running mypy tests only. SUGGESTED_TEST_FIXTURES = { 'typing.List': 'list.pyi', 'typing.Dict': 'dict.pyi', 'typing.Set': 'set.pyi', 'builtins.bool': 'bool.pyi', 'builtins.Exception': 'exception.pyi', 'builtins.BaseException': 'exception.pyi', 'builtins.isinstance': 'isinstancelist.pyi', 'builtins.property': 'property.pyi', 'builtins.classmethod': 'classmethod.pyi', } class SemanticAnalyzerPass2(NodeVisitor[None]): """Semantically analyze parsed mypy files. The analyzer binds names and does various consistency checks for a parse tree. Note that type checking is performed as a separate pass. This is the second phase of semantic analysis. """ # Library search paths lib_path = None # type: List[str] # Module name space modules = None # type: Dict[str, MypyFile] # Global name space for current module globals = None # type: SymbolTable # Names declared using "global" (separate set for each scope) global_decls = None # type: List[Set[str]] # Names declated using "nonlocal" (separate set for each scope) nonlocal_decls = None # type: List[Set[str]] # Local names of function scopes; None for non-function scopes. locals = None # type: List[Optional[SymbolTable]] # Nested block depths of scopes block_depth = None # type: List[int] # TypeInfo of directly enclosing class (or None) type = None # type: Optional[TypeInfo] # Stack of outer classes (the second tuple item contains tvars). type_stack = None # type: List[Optional[TypeInfo]] # Type variables that are bound by the directly enclosing class bound_tvars = None # type: List[SymbolTableNode] # Type variables bound by the current scope, be it class or function tvar_scope = None # type: TypeVarScope # Per-module options options = None # type: Options # Stack of functions being analyzed function_stack = None # type: List[FuncItem] # Status of postponing analysis of nested function bodies. By using this we # can have mutually recursive nested functions. Values are FUNCTION_x # constants. Note that separate phasea are not used for methods. postpone_nested_functions_stack = None # type: List[int] # Postponed functions collected if # postpone_nested_functions_stack[-1] == FUNCTION_FIRST_PHASE_POSTPONE_SECOND. postponed_functions_stack = None # type: List[List[Node]] loop_depth = 0 # Depth of breakable loops cur_mod_id = '' # Current module id (or None) (phase 2) is_stub_file = False # Are we analyzing a stub file? is_typeshed_stub_file = False # Are we analyzing a typeshed stub file? imports = None # type: Set[str] # Imported modules (during phase 2 analysis) errors = None # type: Errors # Keeps track of generated errors plugin = None # type: Plugin # Mypy plugin for special casing of library features def __init__(self, modules: Dict[str, MypyFile], missing_modules: Set[str], lib_path: List[str], errors: Errors, plugin: Plugin) -> None: """Construct semantic analyzer. Use lib_path to search for modules, and report analysis errors using the Errors instance. """ self.locals = [None] self.imports = set() self.type = None self.type_stack = [] self.tvar_scope = TypeVarScope() self.function_stack = [] self.block_depth = [0] self.loop_depth = 0 self.lib_path = lib_path self.errors = errors self.modules = modules self.msg = MessageBuilder(errors, modules) self.missing_modules = missing_modules self.postpone_nested_functions_stack = [FUNCTION_BOTH_PHASES] self.postponed_functions_stack = [] self.all_exports = set() # type: Set[str] self.plugin = plugin def visit_file(self, file_node: MypyFile, fnam: str, options: Options, patches: List[Callable[[], None]]) -> None: """Run semantic analysis phase 2 over a file. Add callbacks by mutating the patches list argument. They will be called after all semantic analysis phases but before type checking. """ self.options = options self.errors.set_file(fnam, file_node.fullname()) self.cur_mod_node = file_node self.cur_mod_id = file_node.fullname() self.is_stub_file = fnam.lower().endswith('.pyi') self.is_typeshed_stub_file = self.errors.is_typeshed_file(file_node.path) self.globals = file_node.names self.patches = patches with experiments.strict_optional_set(options.strict_optional): if 'builtins' in self.modules: self.globals['__builtins__'] = SymbolTableNode(MODULE_REF, self.modules['builtins']) for name in implicit_module_attrs: v = self.globals[name].node if isinstance(v, Var): assert v.type is not None, "Type of implicit attribute not set" v.type = self.anal_type(v.type) v.is_ready = True defs = file_node.defs for d in defs: self.accept(d) if self.cur_mod_id == 'builtins': remove_imported_names_from_symtable(self.globals, 'builtins') for alias_name in type_aliases: self.globals.pop(alias_name.split('.')[-1], None) if '__all__' in self.globals: for name, g in self.globals.items(): if name not in self.all_exports: g.module_public = False del self.options del self.patches def refresh_partial(self, node: Union[MypyFile, FuncItem]) -> None: """Refresh a stale target in fine-grained incremental mode.""" if isinstance(node, MypyFile): self.refresh_top_level(node) else: self.accept(node) def refresh_top_level(self, file_node: MypyFile) -> None: """Reanalyze a stale module top-level in fine-grained incremental mode.""" # TODO: Recursion into block statements. for d in file_node.defs: if isinstance(d, ClassDef): self.refresh_class_def(d) elif not isinstance(d, (FuncItem, Decorator)): self.accept(d) def refresh_class_def(self, defn: ClassDef) -> None: # TODO: Recursion into block statements. with self.analyze_class_body(defn) as should_continue: if should_continue: for d in defn.defs.body: # TODO: Make sure refreshing class bodies works. if isinstance(d, ClassDef): self.refresh_class_def(d) elif not isinstance(d, (FuncItem, Decorator)): self.accept(d) @contextmanager def file_context(self, file_node: MypyFile, fnam: str, options: Options, active_type: Optional[TypeInfo]) -> Iterator[None]: # TODO: Use this above in visit_file self.options = options self.errors.set_file(fnam, file_node.fullname()) self.cur_mod_node = file_node self.cur_mod_id = file_node.fullname() self.is_stub_file = fnam.lower().endswith('.pyi') self.is_typeshed_stub_file = self.errors.is_typeshed_file(file_node.path) self.globals = file_node.names if active_type: self.enter_class(active_type.defn.info) # TODO: Bind class type vars yield if active_type: self.leave_class() self.type = None del self.options def visit_func_def(self, defn: FuncDef) -> None: phase_info = self.postpone_nested_functions_stack[-1] if phase_info != FUNCTION_SECOND_PHASE: self.function_stack.append(defn) # First phase of analysis for function. self.errors.push_function(defn.name()) if not defn._fullname: defn._fullname = self.qualified_name(defn.name()) if defn.type: assert isinstance(defn.type, CallableType) self.update_function_type_variables(defn.type, defn) self.errors.pop_function() self.function_stack.pop() defn.is_conditional = self.block_depth[-1] > 0 # TODO(jukka): Figure out how to share the various cases. It doesn't # make sense to have (almost) duplicate code (here and elsewhere) for # 3 cases: module-level, class-level and local names. Maybe implement # a common stack of namespaces. As the 3 kinds of namespaces have # different semantics, this wouldn't always work, but it might still # be a win. if self.is_class_scope(): # Method definition assert self.type is not None, "Type not set at class scope" defn.info = self.type if not defn.is_decorated and not defn.is_overload: if (defn.name() in self.type.names and self.type.names[defn.name()].node != defn): # Redefinition. Conditional redefinition is okay. n = self.type.names[defn.name()].node if not self.set_original_def(n, defn): self.name_already_defined(defn.name(), defn) self.type.names[defn.name()] = SymbolTableNode(MDEF, defn) self.prepare_method_signature(defn, self.type) elif self.is_func_scope(): # Nested function assert self.locals[-1] is not None, "No locals at function scope" if not defn.is_decorated and not defn.is_overload: if defn.name() in self.locals[-1]: # Redefinition. Conditional redefinition is okay. n = self.locals[-1][defn.name()].node if not self.set_original_def(n, defn): self.name_already_defined(defn.name(), defn) else: self.add_local(defn, defn) else: # Top-level function if not defn.is_decorated and not defn.is_overload: symbol = self.globals[defn.name()] if isinstance(symbol.node, FuncDef) and symbol.node != defn: # This is redefinition. Conditional redefinition is okay. if not self.set_original_def(symbol.node, defn): # Report error. self.check_no_global(defn.name(), defn, True) if phase_info == FUNCTION_FIRST_PHASE_POSTPONE_SECOND: # Postpone this function (for the second phase). self.postponed_functions_stack[-1].append(defn) return if phase_info != FUNCTION_FIRST_PHASE_POSTPONE_SECOND: # Second phase of analysis for function. self.errors.push_function(defn.name()) self.analyze_function(defn) if defn.is_coroutine and isinstance(defn.type, CallableType): if defn.is_async_generator: # Async generator types are handled elsewhere pass else: # A coroutine defined as `async def foo(...) -> T: ...` # has external return type `Awaitable[T]`. ret_type = self.named_type_or_none('typing.Awaitable', [defn.type.ret_type]) assert ret_type is not None, "Internal error: typing.Awaitable not found" defn.type = defn.type.copy_modified(ret_type=ret_type) self.errors.pop_function() def prepare_method_signature(self, func: FuncDef, info: TypeInfo) -> None: """Check basic signature validity and tweak annotation of self/cls argument.""" # Only non-static methods are special. functype = func.type if not func.is_static: if not func.arguments: self.fail('Method must have at least one argument', func) elif isinstance(functype, CallableType): self_type = functype.arg_types[0] if isinstance(self_type, AnyType): if func.is_class or func.name() in ('__new__', '__init_subclass__'): leading_type = self.class_type(info) else: leading_type = fill_typevars(info) func.type = replace_implicit_first_type(functype, leading_type) def set_original_def(self, previous: Optional[Node], new: FuncDef) -> bool: """If 'new' conditionally redefine 'previous', set 'previous' as original We reject straight redefinitions of functions, as they are usually a programming error. For example: . def f(): ... . def f(): ... # Error: 'f' redefined """ if isinstance(previous, (FuncDef, Var, Decorator)) and new.is_conditional: new.original_def = previous return True else: return False def update_function_type_variables(self, fun_type: CallableType, defn: FuncItem) -> None: """Make any type variables in the signature of defn explicit. Update the signature of defn to contain type variable definitions if defn is generic. """ with self.tvar_scope_frame(self.tvar_scope.method_frame()): a = self.type_analyzer() fun_type.variables = a.bind_function_type_variables(fun_type, defn) def visit_overloaded_func_def(self, defn: OverloadedFuncDef) -> None: # OverloadedFuncDef refers to any legitimate situation where you have # more than one declaration for the same function in a row. This occurs # with a @property with a setter or a deleter, and for a classic # @overload. # Decide whether to analyze this as a property or an overload. If an # overload, and we're outside a stub, find the impl and set it. Remove # the impl from the item list, it's special. types = [] # type: List[CallableType] non_overload_indexes = [] # See if the first item is a property (and not an overload) first_item = defn.items[0] first_item.is_overload = True first_item.accept(self) if isinstance(first_item, Decorator) and first_item.func.is_property: first_item.func.is_overload = True self.analyze_property_with_multi_part_definition(defn) typ = function_type(first_item.func, self.builtin_type('builtins.function')) assert isinstance(typ, CallableType) types = [typ] else: for i, item in enumerate(defn.items): if i != 0: # The first item was already visited item.is_overload = True item.accept(self) # TODO support decorated overloaded functions properly if isinstance(item, Decorator): callable = function_type(item.func, self.builtin_type('builtins.function')) assert isinstance(callable, CallableType) if not any(refers_to_fullname(dec, 'typing.overload') for dec in item.decorators): if i == len(defn.items) - 1 and not self.is_stub_file: # Last item outside a stub is impl defn.impl = item else: # Oops it wasn't an overload after all. A clear error # will vary based on where in the list it is, record # that. non_overload_indexes.append(i) else: item.func.is_overload = True types.append(callable) elif isinstance(item, FuncDef): if i == len(defn.items) - 1 and not self.is_stub_file: defn.impl = item else: non_overload_indexes.append(i) if non_overload_indexes: if types: # Some of them were overloads, but not all. for idx in non_overload_indexes: if self.is_stub_file: self.fail("An implementation for an overloaded function " "is not allowed in a stub file", defn.items[idx]) else: self.fail("The implementation for an overloaded function " "must come last", defn.items[idx]) else: for idx in non_overload_indexes[1:]: self.name_already_defined(defn.name(), defn.items[idx]) if defn.impl: self.name_already_defined(defn.name(), defn.impl) # Remove the non-overloads for idx in reversed(non_overload_indexes): del defn.items[idx] # If we found an implementation, remove it from the overloads to # consider. if defn.impl is not None: assert defn.impl is defn.items[-1] defn.items = defn.items[:-1] elif not self.is_stub_file and not non_overload_indexes: if not (self.type and not self.is_func_scope() and self.type.is_protocol): self.fail( "An overloaded function outside a stub file must have an implementation", defn) else: for item in defn.items: if isinstance(item, Decorator): item.func.is_abstract = True else: item.is_abstract = True if types: defn.type = Overloaded(types) defn.type.line = defn.line if not defn.items: # It was not any kind of overload def after all. We've visited the # redfinitions already. return if self.type and not self.is_func_scope(): self.type.names[defn.name()] = SymbolTableNode(MDEF, defn, typ=defn.type) defn.info = self.type elif self.is_func_scope(): self.add_local(defn, defn) def analyze_property_with_multi_part_definition(self, defn: OverloadedFuncDef) -> None: """Analyze a property defined using multiple methods (e.g., using @x.setter). Assume that the first method (@property) has already been analyzed. """ defn.is_property = True items = defn.items first_item = cast(Decorator, defn.items[0]) for item in items[1:]: if isinstance(item, Decorator) and len(item.decorators) == 1: node = item.decorators[0] if isinstance(node, MemberExpr): if node.name == 'setter': # The first item represents the entire property. first_item.var.is_settable_property = True # Get abstractness from the original definition. item.func.is_abstract = first_item.func.is_abstract else: self.fail("Decorated property not supported", item) if isinstance(item, Decorator): item.func.accept(self) def analyze_function(self, defn: FuncItem) -> None: is_method = self.is_class_scope() with self.tvar_scope_frame(self.tvar_scope.method_frame()): if defn.type: self.check_classvar_in_signature(defn.type) assert isinstance(defn.type, CallableType) # Signature must be analyzed in the surrounding scope so that # class-level imported names and type variables are in scope. defn.type = self.type_analyzer().visit_callable_type(defn.type, nested=False) self.check_function_signature(defn) if isinstance(defn, FuncDef): defn.type = set_callable_name(defn.type, defn) for arg in defn.arguments: if arg.initializer: arg.initializer.accept(self) # Bind the type variables again to visit the body. if defn.type: a = self.type_analyzer() a.bind_function_type_variables(cast(CallableType, defn.type), defn) self.function_stack.append(defn) self.enter() for arg in defn.arguments: self.add_local(arg.variable, defn) # The first argument of a non-static, non-class method is like 'self' # (though the name could be different), having the enclosing class's # instance type. if is_method and not defn.is_static and not defn.is_class and defn.arguments: defn.arguments[0].variable.is_self = True # First analyze body of the function but ignore nested functions. self.postpone_nested_functions_stack.append(FUNCTION_FIRST_PHASE_POSTPONE_SECOND) self.postponed_functions_stack.append([]) defn.body.accept(self) # Analyze nested functions (if any) as a second phase. self.postpone_nested_functions_stack[-1] = FUNCTION_SECOND_PHASE for postponed in self.postponed_functions_stack[-1]: postponed.accept(self) self.postpone_nested_functions_stack.pop() self.postponed_functions_stack.pop() self.leave() self.function_stack.pop() def check_classvar_in_signature(self, typ: Type) -> None: if isinstance(typ, Overloaded): for t in typ.items(): # type: Type self.check_classvar_in_signature(t) return if not isinstance(typ, CallableType): return for t in typ.arg_types + [typ.ret_type]: if self.is_classvar(t): self.fail_invalid_classvar(t) # Show only one error per signature break def check_function_signature(self, fdef: FuncItem) -> None: sig = fdef.type assert isinstance(sig, CallableType) if len(sig.arg_types) < len(fdef.arguments): self.fail('Type signature has too few arguments', fdef) # Add dummy Any arguments to prevent crashes later. num_extra_anys = len(fdef.arguments) - len(sig.arg_types) extra_anys = [AnyType(TypeOfAny.from_error)] * num_extra_anys sig.arg_types.extend(extra_anys) elif len(sig.arg_types) > len(fdef.arguments): self.fail('Type signature has too many arguments', fdef, blocker=True) def visit_class_def(self, defn: ClassDef) -> None: with self.analyze_class_body(defn) as should_continue: if should_continue: # Analyze class body. defn.defs.accept(self) @contextmanager def analyze_class_body(self, defn: ClassDef) -> Iterator[bool]: with self.tvar_scope_frame(self.tvar_scope.class_frame()): is_protocol = self.detect_protocol_base(defn) self.update_metaclass(defn) self.clean_up_bases_and_infer_type_variables(defn) self.analyze_class_keywords(defn) if self.analyze_typeddict_classdef(defn): yield False return named_tuple_info = self.analyze_namedtuple_classdef(defn) if named_tuple_info is not None: # Temporarily clear the names dict so we don't get errors about duplicate names # that were already set in build_namedtuple_typeinfo. nt_names = named_tuple_info.names named_tuple_info.names = SymbolTable() # This is needed for the cls argument to classmethods to get bound correctly. named_tuple_info.names['__init__'] = nt_names['__init__'] self.enter_class(named_tuple_info) yield True self.leave_class() # make sure we didn't use illegal names, then reset the names in the typeinfo for prohibited in NAMEDTUPLE_PROHIBITED_NAMES: if prohibited in named_tuple_info.names: if nt_names.get(prohibited) is named_tuple_info.names[prohibited]: continue ctx = named_tuple_info.names[prohibited].node assert ctx is not None self.fail('Cannot overwrite NamedTuple attribute "{}"'.format(prohibited), ctx) # Restore the names in the original symbol table. This ensures that the symbol # table contains the field objects created by build_namedtuple_typeinfo. Exclude # __doc__, which can legally be overwritten by the class. named_tuple_info.names.update({ key: value for key, value in nt_names.items() if key not in named_tuple_info.names or key != '__doc__' }) else: self.setup_class_def_analysis(defn) self.analyze_base_classes(defn) self.analyze_metaclass(defn) defn.info.is_protocol = is_protocol defn.info.runtime_protocol = False for decorator in defn.decorators: self.analyze_class_decorator(defn, decorator) self.enter_class(defn.info) yield True self.calculate_abstract_status(defn.info) self.setup_type_promotion(defn) self.leave_class() def analyze_class_keywords(self, defn: ClassDef) -> None: for value in defn.keywords.values(): value.accept(self) def enter_class(self, info: TypeInfo) -> None: # Remember previous active class self.type_stack.append(self.type) self.locals.append(None) # Add class scope self.block_depth.append(-1) # The class body increments this to 0 self.postpone_nested_functions_stack.append(FUNCTION_BOTH_PHASES) self.type = info def leave_class(self) -> None: """ Restore analyzer state. """ self.postpone_nested_functions_stack.pop() self.block_depth.pop() self.locals.pop() self.type = self.type_stack.pop() def analyze_class_decorator(self, defn: ClassDef, decorator: Expression) -> None: decorator.accept(self) if (isinstance(decorator, RefExpr) and decorator.fullname in ('typing.runtime', 'typing_extensions.runtime')): if defn.info.is_protocol: defn.info.runtime_protocol = True else: self.fail('@runtime can only be used with protocol classes', defn) def calculate_abstract_status(self, typ: TypeInfo) -> None: """Calculate abstract status of a class. Set is_abstract of the type to True if the type has an unimplemented abstract attribute. Also compute a list of abstract attributes. """ concrete = set() # type: Set[str] abstract = [] # type: List[str] for base in typ.mro: for name, symnode in base.names.items(): node = symnode.node if isinstance(node, OverloadedFuncDef): # Unwrap an overloaded function definition. We can just # check arbitrarily the first overload item. If the # different items have a different abstract status, there # should be an error reported elsewhere. func = node.items[0] # type: Optional[Node] else: func = node if isinstance(func, Decorator): fdef = func.func if fdef.is_abstract and name not in concrete: typ.is_abstract = True abstract.append(name) elif isinstance(node, Var): if node.is_abstract_var and name not in concrete: typ.is_abstract = True abstract.append(name) concrete.add(name) typ.abstract_attributes = sorted(abstract) def setup_type_promotion(self, defn: ClassDef) -> None: """Setup extra, ad-hoc subtyping relationships between classes (promotion). This includes things like 'int' being compatible with 'float'. """ promote_target = None # type: Optional[Type] for decorator in defn.decorators: if isinstance(decorator, CallExpr): analyzed = decorator.analyzed if isinstance(analyzed, PromoteExpr): # _promote class decorator (undocumented faeture). promote_target = analyzed.type if not promote_target: promotions = (TYPE_PROMOTIONS_PYTHON3 if self.options.python_version[0] >= 3 else TYPE_PROMOTIONS_PYTHON2) if defn.fullname in promotions: promote_target = self.named_type_or_none(promotions[defn.fullname]) defn.info._promote = promote_target def detect_protocol_base(self, defn: ClassDef) -> bool: for base_expr in defn.base_type_exprs: try: base = expr_to_unanalyzed_type(base_expr) except TypeTranslationError: continue # This will be reported later if not isinstance(base, UnboundType): continue sym = self.lookup_qualified(base.name, base) if sym is None or sym.node is None: continue if sym.node.fullname() in ('typing.Protocol', 'typing_extensions.Protocol'): return True return False def clean_up_bases_and_infer_type_variables(self, defn: ClassDef) -> None: """Remove extra base classes such as Generic and infer type vars. For example, consider this class: . class Foo(Bar, Generic[T]): ... Now we will remove Generic[T] from bases of Foo and infer that the type variable 'T' is a type argument of Foo. Note that this is performed *before* semantic analysis. """ removed = [] # type: List[int] declared_tvars = [] # type: TypeVarList for i, base_expr in enumerate(defn.base_type_exprs): try: base = expr_to_unanalyzed_type(base_expr) except TypeTranslationError: # This error will be caught later. continue tvars = self.analyze_typevar_declaration(base) if tvars is not None: if declared_tvars: self.fail('Only single Generic[...] or Protocol[...] can be in bases', defn) removed.append(i) declared_tvars.extend(tvars) if isinstance(base, UnboundType): sym = self.lookup_qualified(base.name, base) if sym is not None and sym.node is not None: if (sym.node.fullname() in ('typing.Protocol', 'typing_extensions.Protocol') and i not in removed): # also remove bare 'Protocol' bases removed.append(i) all_tvars = self.get_all_bases_tvars(defn, removed) if declared_tvars: if len(remove_dups(declared_tvars)) < len(declared_tvars): self.fail("Duplicate type variables in Generic[...] or Protocol[...]", defn) declared_tvars = remove_dups(declared_tvars) if not set(all_tvars).issubset(set(declared_tvars)): self.fail("If Generic[...] or Protocol[...] is present" " it should list all type variables", defn) # In case of error, Generic tvars will go first declared_tvars = remove_dups(declared_tvars + all_tvars) else: declared_tvars = all_tvars if declared_tvars: if defn.info: defn.info.type_vars = [name for name, _ in declared_tvars] for i in reversed(removed): del defn.base_type_exprs[i] tvar_defs = [] # type: List[TypeVarDef] for name, tvar_expr in declared_tvars: tvar_defs.append(self.tvar_scope.bind(name, tvar_expr)) defn.type_vars = tvar_defs def analyze_typevar_declaration(self, t: Type) -> Optional[TypeVarList]: if not isinstance(t, UnboundType): return None unbound = t sym = self.lookup_qualified(unbound.name, unbound) if sym is None or sym.node is None: return None if (sym.node.fullname() == 'typing.Generic' or sym.node.fullname() == 'typing.Protocol' and t.args or sym.node.fullname() == 'typing_extensions.Protocol' and t.args): tvars = [] # type: TypeVarList for arg in unbound.args: tvar = self.analyze_unbound_tvar(arg) if tvar: tvars.append(tvar) else: self.fail('Free type variable expected in %s[...]' % sym.node.name(), t) return tvars return None def analyze_unbound_tvar(self, t: Type) -> Optional[Tuple[str, TypeVarExpr]]: if not isinstance(t, UnboundType): return None unbound = t sym = self.lookup_qualified(unbound.name, unbound) if sym is None or sym.kind != TVAR: return None elif sym.fullname and not self.tvar_scope.allow_binding(sym.fullname): # It's bound by our type variable scope return None else: assert isinstance(sym.node, TypeVarExpr) return unbound.name, sym.node def get_all_bases_tvars(self, defn: ClassDef, removed: List[int]) -> TypeVarList: tvars = [] # type: TypeVarList for i, base_expr in enumerate(defn.base_type_exprs): if i not in removed: try: base = expr_to_unanalyzed_type(base_expr) except TypeTranslationError: # This error will be caught later. continue base_tvars = base.accept(TypeVariableQuery(self.lookup_qualified, self.tvar_scope)) tvars.extend(base_tvars) return remove_dups(tvars) def analyze_namedtuple_classdef(self, defn: ClassDef) -> Optional[TypeInfo]: # special case for NamedTuple for base_expr in defn.base_type_exprs: if isinstance(base_expr, RefExpr): base_expr.accept(self) if base_expr.fullname == 'typing.NamedTuple': node = self.lookup(defn.name, defn) if node is not None: node.kind = GDEF # TODO in process_namedtuple_definition also applies here items, types, default_items = self.check_namedtuple_classdef(defn) info = self.build_namedtuple_typeinfo( defn.name, items, types, default_items) node.node = info defn.info.replaced = info defn.info = info defn.analyzed = NamedTupleExpr(info) defn.analyzed.line = defn.line defn.analyzed.column = defn.column return info return None def check_namedtuple_classdef( self, defn: ClassDef) -> Tuple[List[str], List[Type], Dict[str, Expression]]: NAMEDTUP_CLASS_ERROR = ('Invalid statement in NamedTuple definition; ' 'expected "field_name: field_type [= default]"') if self.options.python_version < (3, 6): self.fail('NamedTuple class syntax is only supported in Python 3.6', defn) return [], [], {} if len(defn.base_type_exprs) > 1: self.fail('NamedTuple should be a single base', defn) items = [] # type: List[str] types = [] # type: List[Type] default_items = {} # type: Dict[str, Expression] for stmt in defn.defs.body: if not isinstance(stmt, AssignmentStmt): # Still allow pass or ... (for empty namedtuples). if (isinstance(stmt, PassStmt) or (isinstance(stmt, ExpressionStmt) and isinstance(stmt.expr, EllipsisExpr))): continue # Also allow methods, including decorated ones. if isinstance(stmt, (Decorator, FuncBase)): continue # And docstrings. if (isinstance(stmt, ExpressionStmt) and isinstance(stmt.expr, StrExpr)): continue self.fail(NAMEDTUP_CLASS_ERROR, stmt) elif len(stmt.lvalues) > 1 or not isinstance(stmt.lvalues[0], NameExpr): # An assignment, but an invalid one. self.fail(NAMEDTUP_CLASS_ERROR, stmt) else: # Append name and type in this case... name = stmt.lvalues[0].name items.append(name) types.append(AnyType(TypeOfAny.unannotated) if stmt.type is None else self.anal_type(stmt.type)) # ...despite possible minor failures that allow further analyzis. if name.startswith('_'): self.fail('NamedTuple field name cannot start with an underscore: {}' .format(name), stmt) if stmt.type is None or hasattr(stmt, 'new_syntax') and not stmt.new_syntax: self.fail(NAMEDTUP_CLASS_ERROR, stmt) elif isinstance(stmt.rvalue, TempNode): # x: int assigns rvalue to TempNode(AnyType()) if default_items: self.fail('Non-default NamedTuple fields cannot follow default fields', stmt) else: default_items[name] = stmt.rvalue return items, types, default_items def setup_class_def_analysis(self, defn: ClassDef) -> None: """Prepare for the analysis of a class definition.""" if not defn.info: defn.info = TypeInfo(SymbolTable(), defn, self.cur_mod_id) defn.info._fullname = defn.info.name() if self.is_func_scope() or self.type: kind = MDEF if self.is_func_scope(): kind = LDEF node = SymbolTableNode(kind, defn.info) self.add_symbol(defn.name, node, defn) if kind == LDEF: # We need to preserve local classes, let's store them # in globals under mangled unique names local_name = defn.info._fullname + '@' + str(defn.line) defn.info._fullname = self.cur_mod_id + '.' + local_name defn.fullname = defn.info._fullname self.globals[local_name] = node def analyze_base_classes(self, defn: ClassDef) -> None: """Analyze and set up base classes. This computes several attributes on the corresponding TypeInfo defn.info related to the base classes: defn.info.bases, defn.info.mro, and miscellaneous others (at least tuple_type, fallback_to_any, and is_enum.) """ base_types = [] # type: List[Instance] info = defn.info for base_expr in defn.base_type_exprs: try: base = self.expr_to_analyzed_type(base_expr) except TypeTranslationError: self.fail('Invalid base class', base_expr) info.fallback_to_any = True continue if isinstance(base, TupleType): if info.tuple_type: self.fail("Class has two incompatible bases derived from tuple", defn) defn.has_incompatible_baseclass = True info.tuple_type = base base_types.append(base.fallback) if isinstance(base_expr, CallExpr): defn.analyzed = NamedTupleExpr(base.fallback.type) defn.analyzed.line = defn.line defn.analyzed.column = defn.column elif isinstance(base, Instance): if base.type.is_newtype: self.fail("Cannot subclass NewType", defn) base_types.append(base) elif isinstance(base, AnyType): if self.options.disallow_subclassing_any: if isinstance(base_expr, (NameExpr, MemberExpr)): msg = "Class cannot subclass '{}' (has type 'Any')".format(base_expr.name) else: msg = "Class cannot subclass value of type 'Any'" self.fail(msg, base_expr) info.fallback_to_any = True else: self.fail('Invalid base class', base_expr) info.fallback_to_any = True if self.options.disallow_any_unimported and has_any_from_unimported_type(base): if isinstance(base_expr, (NameExpr, MemberExpr)): prefix = "Base type {}".format(base_expr.name) else: prefix = "Base type" self.msg.unimported_type_becomes_any(prefix, base, base_expr) check_for_explicit_any(base, self.options, self.is_typeshed_stub_file, self.msg, context=base_expr) # Add 'object' as implicit base if there is no other base class. if (not base_types and defn.fullname != 'builtins.object'): base_types.append(self.object_type()) info.bases = base_types # Calculate the MRO. It might be incomplete at this point if # the bases of defn include classes imported from other # modules in an import loop. We'll recompute it in SemanticAnalyzerPass3. if not self.verify_base_classes(defn): # Give it an MRO consisting of just the class itself and object. defn.info.mro = [defn.info, self.object_type().type] return calculate_class_mro(defn, self.fail_blocker) # If there are cyclic imports, we may be missing 'object' in # the MRO. Fix MRO if needed. if info.mro and info.mro[-1].fullname() != 'builtins.object': info.mro.append(self.object_type().type) if defn.info.is_enum and defn.type_vars: self.fail("Enum class cannot be generic", defn) def update_metaclass(self, defn: ClassDef) -> None: """Lookup for special metaclass declarations, and update defn fields accordingly. * __metaclass__ attribute in Python 2 * six.with_metaclass(M, B1, B2, ...) * @six.add_metaclass(M) """ # Look for "__metaclass__ = " in Python 2 python2_meta_expr = None # type: Optional[Expression] if self.options.python_version[0] == 2: for body_node in defn.defs.body: if isinstance(body_node, ClassDef) and body_node.name == "__metaclass__": self.fail("Metaclasses defined as inner classes are not supported", body_node) break elif isinstance(body_node, AssignmentStmt) and len(body_node.lvalues) == 1: lvalue = body_node.lvalues[0] if isinstance(lvalue, NameExpr) and lvalue.name == "__metaclass__": python2_meta_expr = body_node.rvalue # Look for six.with_metaclass(M, B1, B2, ...) with_meta_expr = None # type: Optional[Expression] if len(defn.base_type_exprs) == 1: base_expr = defn.base_type_exprs[0] if isinstance(base_expr, CallExpr) and isinstance(base_expr.callee, RefExpr): base_expr.callee.accept(self) if (base_expr.callee.fullname == 'six.with_metaclass' and len(base_expr.args) >= 1 and all(kind == ARG_POS for kind in base_expr.arg_kinds)): with_meta_expr = base_expr.args[0] defn.base_type_exprs = base_expr.args[1:] # Look for @six.add_metaclass(M) add_meta_expr = None # type: Optional[Expression] for dec_expr in defn.decorators: if isinstance(dec_expr, CallExpr) and isinstance(dec_expr.callee, RefExpr): dec_expr.callee.accept(self) if (dec_expr.callee.fullname == 'six.add_metaclass' and len(dec_expr.args) == 1 and dec_expr.arg_kinds[0] == ARG_POS): add_meta_expr = dec_expr.args[0] break metas = {defn.metaclass, python2_meta_expr, with_meta_expr, add_meta_expr} - {None} if len(metas) == 0: return if len(metas) > 1: self.fail("Multiple metaclass definitions", defn) return defn.metaclass = metas.pop() def expr_to_analyzed_type(self, expr: Expression) -> Type: if isinstance(expr, CallExpr): expr.accept(self) info = self.check_namedtuple(expr) if info is None: # Some form of namedtuple is the only valid type that looks like a call # expression. This isn't a valid type. raise TypeTranslationError() assert info.tuple_type, "NamedTuple without tuple type" fallback = Instance(info, []) return TupleType(info.tuple_type.items, fallback=fallback) typ = expr_to_unanalyzed_type(expr) return self.anal_type(typ) def verify_base_classes(self, defn: ClassDef) -> bool: info = defn.info for base in info.bases: baseinfo = base.type if self.is_base_class(info, baseinfo): self.fail('Cycle in inheritance hierarchy', defn, blocker=True) # Clear bases to forcefully get rid of the cycle. info.bases = [] if baseinfo.fullname() == 'builtins.bool': self.fail("'%s' is not a valid base class" % baseinfo.name(), defn, blocker=True) return False dup = find_duplicate(info.direct_base_classes()) if dup: self.fail('Duplicate base class "%s"' % dup.name(), defn, blocker=True) return False return True def is_base_class(self, t: TypeInfo, s: TypeInfo) -> bool: """Determine if t is a base class of s (but do not use mro).""" # Search the base class graph for t, starting from s. worklist = [s] visited = {s} while worklist: nxt = worklist.pop() if nxt == t: return True for base in nxt.bases: if base.type not in visited: worklist.append(base.type) visited.add(base.type) return False def analyze_metaclass(self, defn: ClassDef) -> None: if defn.metaclass: metaclass_name = None if isinstance(defn.metaclass, NameExpr): metaclass_name = defn.metaclass.name elif isinstance(defn.metaclass, MemberExpr): metaclass_name = get_member_expr_fullname(defn.metaclass) if metaclass_name is None: self.fail("Dynamic metaclass not supported for '%s'" % defn.name, defn.metaclass) return sym = self.lookup_qualified(metaclass_name, defn.metaclass) if sym is None: # Probably a name error - it is already handled elsewhere return if isinstance(sym.node, Var) and isinstance(sym.node.type, AnyType): # 'Any' metaclass -- just ignore it. # # TODO: A better approach would be to record this information # and assume that the type object supports arbitrary # attributes, similar to an 'Any' base class. return if not isinstance(sym.node, TypeInfo) or sym.node.tuple_type is not None: self.fail("Invalid metaclass '%s'" % metaclass_name, defn.metaclass) return if not sym.node.is_metaclass(): self.fail("Metaclasses not inheriting from 'type' are not supported", defn.metaclass) return inst = fill_typevars(sym.node) assert isinstance(inst, Instance) defn.info.declared_metaclass = inst defn.info.metaclass_type = defn.info.calculate_metaclass_type() if defn.info.metaclass_type is None: # Inconsistency may happen due to multiple baseclasses even in classes that # do not declare explicit metaclass, but it's harder to catch at this stage if defn.metaclass is not None: self.fail("Inconsistent metaclass structure for '%s'" % defn.name, defn) def object_type(self) -> Instance: return self.named_type('__builtins__.object') def str_type(self) -> Instance: return self.named_type('__builtins__.str') def class_type(self, info: TypeInfo) -> Type: # Construct a function type whose fallback is cls. from mypy import checkmember # To avoid import cycle. leading_type = checkmember.type_object_type(info, self.builtin_type) if isinstance(leading_type, Overloaded): # Overloaded __init__ is too complex to handle. Plus it's stubs only. return AnyType(TypeOfAny.special_form) else: return leading_type def named_type(self, qualified_name: str, args: Optional[List[Type]] = None) -> Instance: sym = self.lookup_qualified(qualified_name, Context()) assert sym, "Internal error: attempted to construct unknown type" node = sym.node assert isinstance(node, TypeInfo) if args: # TODO: assert len(args) == len(node.defn.type_vars) return Instance(node, args) return Instance(node, [AnyType(TypeOfAny.special_form)] * len(node.defn.type_vars)) def named_type_or_none(self, qualified_name: str, args: Optional[List[Type]] = None) -> Optional[Instance]: sym = self.lookup_fully_qualified_or_none(qualified_name) if not sym: return None node = sym.node assert isinstance(node, TypeInfo) if args: # TODO: assert len(args) == len(node.defn.type_vars) return Instance(node, args) return Instance(node, [AnyType(TypeOfAny.unannotated)] * len(node.defn.type_vars)) def is_typeddict(self, expr: Expression) -> bool: return (isinstance(expr, RefExpr) and isinstance(expr.node, TypeInfo) and expr.node.typeddict_type is not None) def analyze_typeddict_classdef(self, defn: ClassDef) -> bool: # special case for TypedDict possible = False for base_expr in defn.base_type_exprs: if isinstance(base_expr, RefExpr): base_expr.accept(self) if (base_expr.fullname == 'mypy_extensions.TypedDict' or self.is_typeddict(base_expr)): possible = True if possible: node = self.lookup(defn.name, defn) if node is not None: node.kind = GDEF # TODO in process_namedtuple_definition also applies here if (len(defn.base_type_exprs) == 1 and isinstance(defn.base_type_exprs[0], RefExpr) and defn.base_type_exprs[0].fullname == 'mypy_extensions.TypedDict'): # Building a new TypedDict fields, types, required_keys = self.check_typeddict_classdef(defn) info = self.build_typeddict_typeinfo(defn.name, fields, types, required_keys) defn.info.replaced = info node.node = info defn.analyzed = TypedDictExpr(info) defn.analyzed.line = defn.line defn.analyzed.column = defn.column return True # Extending/merging existing TypedDicts if any(not isinstance(expr, RefExpr) or expr.fullname != 'mypy_extensions.TypedDict' and not self.is_typeddict(expr) for expr in defn.base_type_exprs): self.fail("All bases of a new TypedDict must be TypedDict types", defn) typeddict_bases = list(filter(self.is_typeddict, defn.base_type_exprs)) keys = [] # type: List[str] types = [] required_keys = set() for base in typeddict_bases: assert isinstance(base, RefExpr) assert isinstance(base.node, TypeInfo) assert isinstance(base.node.typeddict_type, TypedDictType) base_typed_dict = base.node.typeddict_type base_items = base_typed_dict.items valid_items = base_items.copy() for key in base_items: if key in keys: self.fail('Cannot overwrite TypedDict field "{}" while merging' .format(key), defn) valid_items.pop(key) keys.extend(valid_items.keys()) types.extend(valid_items.values()) required_keys.update(base_typed_dict.required_keys) new_keys, new_types, new_required_keys = self.check_typeddict_classdef(defn, keys) keys.extend(new_keys) types.extend(new_types) required_keys.update(new_required_keys) info = self.build_typeddict_typeinfo(defn.name, keys, types, required_keys) defn.info.replaced = info node.node = info defn.analyzed = TypedDictExpr(info) defn.analyzed.line = defn.line defn.analyzed.column = defn.column return True return False def check_typeddict_classdef(self, defn: ClassDef, oldfields: Optional[List[str]] = None) -> Tuple[List[str], List[Type], Set[str]]: TPDICT_CLASS_ERROR = ('Invalid statement in TypedDict definition; ' 'expected "field_name: field_type"') if self.options.python_version < (3, 6): self.fail('TypedDict class syntax is only supported in Python 3.6', defn) return [], [], set() fields = [] # type: List[str] types = [] # type: List[Type] for stmt in defn.defs.body: if not isinstance(stmt, AssignmentStmt): # Still allow pass or ... (for empty TypedDict's). if (not isinstance(stmt, PassStmt) and not (isinstance(stmt, ExpressionStmt) and isinstance(stmt.expr, (EllipsisExpr, StrExpr)))): self.fail(TPDICT_CLASS_ERROR, stmt) elif len(stmt.lvalues) > 1 or not isinstance(stmt.lvalues[0], NameExpr): # An assignment, but an invalid one. self.fail(TPDICT_CLASS_ERROR, stmt) else: name = stmt.lvalues[0].name if name in (oldfields or []): self.fail('Cannot overwrite TypedDict field "{}" while extending' .format(name), stmt) continue if name in fields: self.fail('Duplicate TypedDict field "{}"'.format(name), stmt) continue # Append name and type in this case... fields.append(name) types.append(AnyType(TypeOfAny.unannotated) if stmt.type is None else self.anal_type(stmt.type)) # ...despite possible minor failures that allow further analyzis. if stmt.type is None or hasattr(stmt, 'new_syntax') and not stmt.new_syntax: self.fail(TPDICT_CLASS_ERROR, stmt) elif not isinstance(stmt.rvalue, TempNode): # x: int assigns rvalue to TempNode(AnyType()) self.fail('Right hand side values are not supported in TypedDict', stmt) total = True # type: Optional[bool] if 'total' in defn.keywords: total = self.parse_bool(defn.keywords['total']) if total is None: self.fail('Value of "total" must be True or False', defn) total = True required_keys = set(fields) if total else set() return fields, types, required_keys def visit_import(self, i: Import) -> None: for id, as_id in i.ids: if as_id is not None: self.add_module_symbol(id, as_id, module_public=True, context=i) else: # Modules imported in a stub file without using 'as x' won't get exported module_public = not self.is_stub_file base = id.split('.')[0] self.add_module_symbol(base, base, module_public=module_public, context=i, module_hidden=not module_public) self.add_submodules_to_parent_modules(id, module_public) def add_submodules_to_parent_modules(self, id: str, module_public: bool) -> None: """Recursively adds a reference to a newly loaded submodule to its parent. When you import a submodule in any way, Python will add a reference to that submodule to its parent. So, if you do something like `import A.B` or `from A import B` or `from A.B import Foo`, Python will add a reference to module A.B to A's namespace. Note that this "parent patching" process is completely independent from any changes made to the *importer's* namespace. For example, if you have a file named `foo.py` where you do `from A.B import Bar`, then foo's namespace will be modified to contain a reference to only Bar. Independently, A's namespace will be modified to contain a reference to `A.B`. """ while '.' in id: parent, child = id.rsplit('.', 1) parent_mod = self.modules.get(parent) if parent_mod and child not in parent_mod.names: child_mod = self.modules.get(id) if child_mod: sym = SymbolTableNode(MODULE_REF, child_mod, module_public=module_public) parent_mod.names[child] = sym id = parent def add_module_symbol(self, id: str, as_id: str, module_public: bool, context: Context, module_hidden: bool = False) -> None: if id in self.modules: m = self.modules[id] self.add_symbol(as_id, SymbolTableNode(MODULE_REF, m, module_public=module_public, module_hidden=module_hidden), context) else: self.add_unknown_symbol(as_id, context, is_import=True) def visit_import_from(self, imp: ImportFrom) -> None: import_id = self.correct_relative_import(imp) self.add_submodules_to_parent_modules(import_id, True) module = self.modules.get(import_id) for id, as_id in imp.names: node = module.names.get(id) if module else None missing = False possible_module_id = import_id + '.' + id # If the module does not contain a symbol with the name 'id', # try checking if it's a module instead. if not node or node.kind == UNBOUND_IMPORTED: mod = self.modules.get(possible_module_id) if mod is not None: node = SymbolTableNode(MODULE_REF, mod) self.add_submodules_to_parent_modules(possible_module_id, True) elif possible_module_id in self.missing_modules: missing = True # If it is still not resolved, and the module is a stub # check for a module level __getattr__ if module and not node and module.is_stub and '__getattr__' in module.names: getattr_defn = module.names['__getattr__'] if isinstance(getattr_defn.node, FuncDef): if isinstance(getattr_defn.node.type, CallableType): typ = getattr_defn.node.type.ret_type else: typ = AnyType(TypeOfAny.from_error) if as_id: name = as_id else: name = id ast_node = Var(name, type=typ) symbol = SymbolTableNode(GDEF, ast_node) self.add_symbol(name, symbol, imp) return if node and node.kind != UNBOUND_IMPORTED and not node.module_hidden: node = self.normalize_type_alias(node, imp) if not node: return imported_id = as_id or id existing_symbol = self.globals.get(imported_id) if existing_symbol: # Import can redefine a variable. They get special treatment. if self.process_import_over_existing_name( imported_id, existing_symbol, node, imp): continue # 'from m import x as x' exports x in a stub file. module_public = not self.is_stub_file or as_id is not None module_hidden = not module_public and possible_module_id not in self.modules symbol = SymbolTableNode(node.kind, node.node, node.type_override, module_public=module_public, normalized=node.normalized, alias_tvars=node.alias_tvars, module_hidden=module_hidden) self.add_symbol(imported_id, symbol, imp) elif module and not missing: # Missing attribute. message = "Module '{}' has no attribute '{}'".format(import_id, id) extra = self.undefined_name_extra_info('{}.{}'.format(import_id, id)) if extra: message += " {}".format(extra) self.fail(message, imp) self.add_unknown_symbol(as_id or id, imp, is_import=True) else: # Missing module. self.add_unknown_symbol(as_id or id, imp, is_import=True) def process_import_over_existing_name(self, imported_id: str, existing_symbol: SymbolTableNode, module_symbol: SymbolTableNode, import_node: ImportBase) -> bool: if (existing_symbol.kind in (LDEF, GDEF, MDEF) and isinstance(existing_symbol.node, (Var, FuncDef, TypeInfo, Decorator))): # This is a valid import over an existing definition in the file. Construct a dummy # assignment that we'll use to type check the import. lvalue = NameExpr(imported_id) lvalue.kind = existing_symbol.kind lvalue.node = existing_symbol.node rvalue = NameExpr(imported_id) rvalue.kind = module_symbol.kind rvalue.node = module_symbol.node assignment = AssignmentStmt([lvalue], rvalue) for node in assignment, lvalue, rvalue: node.set_line(import_node) import_node.assignments.append(assignment) return True return False def normalize_type_alias(self, node: SymbolTableNode, ctx: Context) -> Optional[SymbolTableNode]: normalized = False fullname = node.fullname if fullname in type_aliases: # Node refers to an aliased type such as typing.List; normalize. new_node = self.lookup_qualified(type_aliases[fullname], ctx) if new_node is None: self.add_fixture_note(fullname, ctx) return None normalized = True if fullname in collections_type_aliases: # Similar, but for types from the collections module like typing.DefaultDict self.add_module_symbol('collections', '__mypy_collections__', False, ctx) new_node = self.lookup_qualified(collections_type_aliases[fullname], ctx) normalized = True if normalized: assert new_node is not None, "Collection node not found" node = SymbolTableNode(new_node.kind, new_node.node, new_node.type_override, normalized=True, alias_tvars=new_node.alias_tvars) return node def add_fixture_note(self, fullname: str, ctx: Context) -> None: self.note('Maybe your test fixture does not define "{}"?'.format(fullname), ctx) if fullname in SUGGESTED_TEST_FIXTURES: self.note( 'Consider adding [builtins fixtures/{}] to your test description'.format( SUGGESTED_TEST_FIXTURES[fullname]), ctx) def correct_relative_import(self, node: Union[ImportFrom, ImportAll]) -> str: if node.relative == 0: return node.id parts = self.cur_mod_id.split(".") cur_mod_id = self.cur_mod_id rel = node.relative if self.cur_mod_node.is_package_init_file(): rel -= 1 if len(parts) < rel: self.fail("Relative import climbs too many namespaces", node) if rel != 0: cur_mod_id = ".".join(parts[:-rel]) return cur_mod_id + (("." + node.id) if node.id else "") def visit_import_all(self, i: ImportAll) -> None: i_id = self.correct_relative_import(i) if i_id in self.modules: m = self.modules[i_id] self.add_submodules_to_parent_modules(i_id, True) for name, node in m.names.items(): new_node = self.normalize_type_alias(node, i) # if '__all__' exists, all nodes not included have had module_public set to # False, and we can skip checking '_' because it's been explicitly included. if (new_node and new_node.module_public and (not name.startswith('_') or '__all__' in m.names)): existing_symbol = self.globals.get(name) if existing_symbol: # Import can redefine a variable. They get special treatment. if self.process_import_over_existing_name( name, existing_symbol, new_node, i): continue self.add_symbol(name, SymbolTableNode(new_node.kind, new_node.node, new_node.type_override, normalized=new_node.normalized, alias_tvars=new_node.alias_tvars), i) else: # Don't add any dummy symbols for 'from x import *' if 'x' is unknown. pass def add_unknown_symbol(self, name: str, context: Context, is_import: bool = False) -> None: var = Var(name) if self.type: var._fullname = self.type.fullname() + "." + name else: var._fullname = self.qualified_name(name) var.is_ready = True if is_import: any_type = AnyType(TypeOfAny.from_unimported_type) else: any_type = AnyType(TypeOfAny.from_error) var.type = any_type var.is_suppressed_import = is_import self.add_symbol(name, SymbolTableNode(GDEF, var), context) # # Statements # def visit_block(self, b: Block) -> None: if b.is_unreachable: return self.block_depth[-1] += 1 for s in b.body: self.accept(s) self.block_depth[-1] -= 1 def visit_block_maybe(self, b: Optional[Block]) -> None: if b: self.visit_block(b) def type_analyzer(self, *, tvar_scope: Optional[TypeVarScope] = None, allow_tuple_literal: bool = False, aliasing: bool = False, third_pass: bool = False) -> TypeAnalyser: if tvar_scope is None: tvar_scope = self.tvar_scope tpan = TypeAnalyser(self.lookup_qualified, self.lookup_fully_qualified, tvar_scope, self.fail, self.note, self.plugin, self.options, self.is_typeshed_stub_file, aliasing=aliasing, allow_tuple_literal=allow_tuple_literal, allow_unnormalized=self.is_stub_file, third_pass=third_pass) tpan.in_dynamic_func = bool(self.function_stack and self.function_stack[-1].is_dynamic()) tpan.global_scope = not self.type and not self.function_stack return tpan def anal_type(self, t: Type, *, tvar_scope: Optional[TypeVarScope] = None, allow_tuple_literal: bool = False, aliasing: bool = False, third_pass: bool = False) -> Type: a = self.type_analyzer(tvar_scope=tvar_scope, aliasing=aliasing, allow_tuple_literal=allow_tuple_literal, third_pass=third_pass) return t.accept(a) def visit_assignment_stmt(self, s: AssignmentStmt) -> None: for lval in s.lvalues: self.analyze_lvalue(lval, explicit_type=s.type is not None) self.check_classvar(s) s.rvalue.accept(self) if s.type: allow_tuple_literal = isinstance(s.lvalues[-1], (TupleExpr, ListExpr)) s.type = self.anal_type(s.type, allow_tuple_literal=allow_tuple_literal) if (self.type and self.type.is_protocol and isinstance(lval, NameExpr) and isinstance(s.rvalue, TempNode) and s.rvalue.no_rhs): if isinstance(lval.node, Var): lval.node.is_abstract_var = True else: if (any(isinstance(lv, NameExpr) and lv.is_inferred_def for lv in s.lvalues) and self.type and self.type.is_protocol and not self.is_func_scope()): self.fail('All protocol members must have explicitly declared types', s) # Set the type if the rvalue is a simple literal (even if the above error occurred). if len(s.lvalues) == 1 and isinstance(s.lvalues[0], NameExpr): if s.lvalues[0].is_inferred_def: s.type = self.analyze_simple_literal_type(s.rvalue) if s.type: # Store type into nodes. for lvalue in s.lvalues: self.store_declared_types(lvalue, s.type) self.check_and_set_up_type_alias(s) self.process_newtype_declaration(s) self.process_typevar_declaration(s) self.process_namedtuple_definition(s) self.process_typeddict_definition(s) self.process_enum_call(s) if not s.type: self.process_module_assignment(s.lvalues, s.rvalue, s) if (len(s.lvalues) == 1 and isinstance(s.lvalues[0], NameExpr) and s.lvalues[0].name == '__all__' and s.lvalues[0].kind == GDEF and isinstance(s.rvalue, (ListExpr, TupleExpr))): self.add_exports(*s.rvalue.items) def analyze_simple_literal_type(self, rvalue: Expression) -> Optional[Type]: """Return builtins.int if rvalue is an int literal, etc.""" if self.options.semantic_analysis_only or self.function_stack: # Skip this if we're only doing the semantic analysis pass. # This is mostly to avoid breaking unit tests. # Also skip inside a function; this is to avoid confusing # the code that handles dead code due to isinstance() # inside type variables with value restrictions (like # AnyStr). return None if isinstance(rvalue, IntExpr): return self.named_type_or_none('builtins.int') if isinstance(rvalue, FloatExpr): return self.named_type_or_none('builtins.float') if isinstance(rvalue, StrExpr): return self.named_type_or_none('builtins.str') if isinstance(rvalue, BytesExpr): return self.named_type_or_none('builtins.bytes') if isinstance(rvalue, UnicodeExpr): return self.named_type_or_none('builtins.unicode') return None def alias_fallback(self, tp: Type) -> Instance: """Make a dummy Instance with no methods. It is used as a fallback type to detect errors for non-Instance aliases (i.e. Unions, Tuples, Callables). """ kind = (' to Callable' if isinstance(tp, CallableType) else ' to Tuple' if isinstance(tp, TupleType) else ' to Union' if isinstance(tp, UnionType) else '') cdef = ClassDef('Type alias' + kind, Block([])) fb_info = TypeInfo(SymbolTable(), cdef, self.cur_mod_id) fb_info.bases = [self.object_type()] fb_info.mro = [fb_info, self.object_type().type] return Instance(fb_info, []) def analyze_alias(self, rvalue: Expression, warn_bound_tvar: bool = False) -> Tuple[Optional[Type], List[str]]: """Check if 'rvalue' represents a valid type allowed for aliasing (e.g. not a type variable). If yes, return the corresponding type and a list of qualified type variable names for generic aliases. """ dynamic = bool(self.function_stack and self.function_stack[-1].is_dynamic()) global_scope = not self.type and not self.function_stack res = analyze_type_alias(rvalue, self.lookup_qualified, self.lookup_fully_qualified, self.tvar_scope, self.fail, self.note, self.plugin, self.options, self.is_typeshed_stub_file, allow_unnormalized=True, in_dynamic_func=dynamic, global_scope=global_scope, warn_bound_tvar=warn_bound_tvar) if res: alias_tvars = [name for (name, _) in res.accept(TypeVariableQuery(self.lookup_qualified, self.tvar_scope))] else: alias_tvars = [] return res, alias_tvars def check_and_set_up_type_alias(self, s: AssignmentStmt) -> None: """Check if assignment creates a type alias and set it up as needed. For simple aliases like L = List we use a simpler mechanism, just copying TypeInfo. For subscripted (including generic) aliases the resulting types are stored in rvalue.analyzed. """ lvalue = s.lvalues[0] if len(s.lvalues) > 1 or not isinstance(lvalue, NameExpr): # First rule: Only simple assignments like Alias = ... create aliases. return if s.type: # Second rule: Explicit type (cls: Type[A] = A) always creates variable, not alias. return non_global_scope = self.type or self.is_func_scope() if isinstance(s.rvalue, NameExpr) and non_global_scope and lvalue.is_inferred_def: # Third rule: Non-subscripted right hand side creates a variable # at class and function scopes. For example: # # class Model: # ... # class C: # model = Model # this is automatically a variable with type 'Type[Model]' # # without this rule, this typical use case will require a lot of explicit # annotations (see the second rule). return rvalue = s.rvalue res, alias_tvars = self.analyze_alias(rvalue, warn_bound_tvar=True) if not res: return node = self.lookup(lvalue.name, lvalue) assert node is not None if not lvalue.is_inferred_def: # Type aliases can't be re-defined. if node and (node.kind == TYPE_ALIAS or isinstance(node.node, TypeInfo)): self.fail('Cannot assign multiple types to name "{}"' ' without an explicit "Type[...]" annotation' .format(lvalue.name), lvalue) return check_for_explicit_any(res, self.options, self.is_typeshed_stub_file, self.msg, context=s) # when this type alias gets "inlined", the Any is not explicit anymore, # so we need to replace it with non-explicit Anys res = make_any_non_explicit(res) if isinstance(res, Instance) and not res.args and isinstance(rvalue, RefExpr): # For simple (on-generic) aliases we use aliasing TypeInfo's # to allow using them in runtime context where it makes sense. node.node = res.type if isinstance(rvalue, RefExpr): sym = self.lookup_type_node(rvalue) if sym: node.normalized = sym.normalized return node.kind = TYPE_ALIAS node.type_override = res node.alias_tvars = alias_tvars if isinstance(rvalue, (IndexExpr, CallExpr)): # We only need this for subscripted aliases, since simple aliases # are already processed using aliasing TypeInfo's above. rvalue.analyzed = TypeAliasExpr(res, node.alias_tvars, fallback=self.alias_fallback(res)) rvalue.analyzed.line = rvalue.line rvalue.analyzed.column = rvalue.column def analyze_lvalue(self, lval: Lvalue, nested: bool = False, add_global: bool = False, explicit_type: bool = False) -> None: """Analyze an lvalue or assignment target. Args: lval: The target lvalue nested: If true, the lvalue is within a tuple or list lvalue expression add_global: Add name to globals table only if this is true (used in first pass) explicit_type: Assignment has type annotation """ if isinstance(lval, NameExpr): # Top-level definitions within some statements (at least while) are # not handled in the first pass, so they have to be added now. nested_global = (not self.is_func_scope() and self.block_depth[-1] > 0 and not self.type) if (add_global or nested_global) and lval.name not in self.globals: # Define new global name. v = Var(lval.name) v.set_line(lval) v._fullname = self.qualified_name(lval.name) v.is_ready = False # Type not inferred yet lval.node = v lval.is_new_def = True lval.is_inferred_def = True lval.kind = GDEF lval.fullname = v._fullname self.globals[lval.name] = SymbolTableNode(GDEF, v) elif isinstance(lval.node, Var) and lval.is_new_def: if lval.kind == GDEF: # Since the is_new_def flag is set, this must have been analyzed # already in the first pass and added to the symbol table. assert lval.node.name() in self.globals elif (self.locals[-1] is not None and lval.name not in self.locals[-1] and lval.name not in self.global_decls[-1] and lval.name not in self.nonlocal_decls[-1]): # Define new local name. v = Var(lval.name) v.set_line(lval) lval.node = v lval.is_new_def = True lval.is_inferred_def = True lval.kind = LDEF lval.fullname = lval.name self.add_local(v, lval) elif not self.is_func_scope() and (self.type and lval.name not in self.type.names): # Define a new attribute within class body. v = Var(lval.name) v.info = self.type v.is_initialized_in_class = True v.set_line(lval) v._fullname = self.qualified_name(lval.name) lval.node = v lval.is_new_def = True lval.is_inferred_def = True lval.kind = MDEF lval.fullname = lval.name self.type.names[lval.name] = SymbolTableNode(MDEF, v) elif explicit_type: # Don't re-bind types self.name_already_defined(lval.name, lval) else: # Bind to an existing name. lval.accept(self) self.check_lvalue_validity(lval.node, lval) elif isinstance(lval, MemberExpr): if not add_global: self.analyze_member_lvalue(lval) if explicit_type and not self.is_self_member_ref(lval): self.fail('Type cannot be declared in assignment to non-self ' 'attribute', lval) elif isinstance(lval, IndexExpr): if explicit_type: self.fail('Unexpected type declaration', lval) if not add_global: lval.accept(self) elif (isinstance(lval, TupleExpr) or isinstance(lval, ListExpr)): items = lval.items if len(items) == 0 and isinstance(lval, TupleExpr): self.fail("can't assign to ()", lval) self.analyze_tuple_or_list_lvalue(lval, add_global, explicit_type) elif isinstance(lval, StarExpr): if nested: self.analyze_lvalue(lval.expr, nested, add_global, explicit_type) else: self.fail('Starred assignment target must be in a list or tuple', lval) else: self.fail('Invalid assignment target', lval) def analyze_tuple_or_list_lvalue(self, lval: Union[ListExpr, TupleExpr], add_global: bool = False, explicit_type: bool = False) -> None: """Analyze an lvalue or assignment target that is a list or tuple.""" items = lval.items star_exprs = [item for item in items if isinstance(item, StarExpr)] if len(star_exprs) > 1: self.fail('Two starred expressions in assignment', lval) else: if len(star_exprs) == 1: star_exprs[0].valid = True for i in items: self.analyze_lvalue(i, nested=True, add_global=add_global, explicit_type = explicit_type) def analyze_member_lvalue(self, lval: MemberExpr) -> None: lval.accept(self) if self.is_self_member_ref(lval): assert self.type, "Self member outside a class" node = self.type.get(lval.name) if node is None or isinstance(node.node, Var) and node.node.is_abstract_var: if self.type.is_protocol and node is None: self.fail("Protocol members cannot be defined via assignment to self", lval) else: # Implicit attribute definition in __init__. lval.is_new_def = True lval.is_inferred_def = True v = Var(lval.name) v.set_line(lval) v._fullname = self.qualified_name(lval.name) v.info = self.type v.is_ready = False lval.def_var = v lval.node = v self.type.names[lval.name] = SymbolTableNode(MDEF, v, implicit=True) self.check_lvalue_validity(lval.node, lval) def is_self_member_ref(self, memberexpr: MemberExpr) -> bool: """Does memberexpr to refer to an attribute of self?""" if not isinstance(memberexpr.expr, NameExpr): return False node = memberexpr.expr.node return isinstance(node, Var) and node.is_self def check_lvalue_validity(self, node: Union[Expression, SymbolNode, None], ctx: Context) -> None: if isinstance(node, TypeVarExpr): self.fail('Invalid assignment target', ctx) elif isinstance(node, TypeInfo): self.fail(CANNOT_ASSIGN_TO_TYPE, ctx) def store_declared_types(self, lvalue: Lvalue, typ: Type) -> None: if isinstance(typ, StarType) and not isinstance(lvalue, StarExpr): self.fail('Star type only allowed for starred expressions', lvalue) if isinstance(lvalue, RefExpr): lvalue.is_inferred_def = False if isinstance(lvalue.node, Var): var = lvalue.node var.type = typ var.is_ready = True # If node is not a variable, we'll catch it elsewhere. elif isinstance(lvalue, TupleExpr): if isinstance(typ, TupleType): if len(lvalue.items) != len(typ.items): self.fail('Incompatible number of tuple items', lvalue) return for item, itemtype in zip(lvalue.items, typ.items): self.store_declared_types(item, itemtype) else: self.fail('Tuple type expected for multiple variables', lvalue) elif isinstance(lvalue, StarExpr): # Historical behavior for the old parser if isinstance(typ, StarType): self.store_declared_types(lvalue.expr, typ.type) else: self.store_declared_types(lvalue.expr, typ) else: # This has been flagged elsewhere as an error, so just ignore here. pass def process_newtype_declaration(self, s: AssignmentStmt) -> None: """Check if s declares a NewType; if yes, store it in symbol table.""" # Extract and check all information from newtype declaration name, call = self.analyze_newtype_declaration(s) if name is None or call is None: return old_type = self.check_newtype_args(name, call, s) call.analyzed = NewTypeExpr(name, old_type, line=call.line) if old_type is None: return # Create the corresponding class definition if the aliased type is subtypeable if isinstance(old_type, TupleType): newtype_class_info = self.build_newtype_typeinfo(name, old_type, old_type.fallback) newtype_class_info.tuple_type = old_type elif isinstance(old_type, Instance): if old_type.type.is_protocol: self.fail("NewType cannot be used with protocol classes", s) newtype_class_info = self.build_newtype_typeinfo(name, old_type, old_type) else: message = "Argument 2 to NewType(...) must be subclassable (got {})" self.fail(message.format(self.msg.format(old_type)), s) return check_for_explicit_any(old_type, self.options, self.is_typeshed_stub_file, self.msg, context=s) if self.options.disallow_any_unimported and has_any_from_unimported_type(old_type): self.msg.unimported_type_becomes_any("Argument 2 to NewType(...)", old_type, s) # If so, add it to the symbol table. node = self.lookup(name, s) if node is None: self.fail("Could not find {} in current namespace".format(name), s) return # TODO: why does NewType work in local scopes despite always being of kind GDEF? node.kind = GDEF call.analyzed.info = node.node = newtype_class_info def analyze_newtype_declaration(self, s: AssignmentStmt) -> Tuple[Optional[str], Optional[CallExpr]]: """Return the NewType call expression if `s` is a newtype declaration or None otherwise.""" name, call = None, None if (len(s.lvalues) == 1 and isinstance(s.lvalues[0], NameExpr) and isinstance(s.rvalue, CallExpr) and isinstance(s.rvalue.callee, RefExpr) and s.rvalue.callee.fullname == 'typing.NewType'): lvalue = s.lvalues[0] name = s.lvalues[0].name if not lvalue.is_inferred_def: if s.type: self.fail("Cannot declare the type of a NewType declaration", s) else: self.fail("Cannot redefine '%s' as a NewType" % name, s) # This dummy NewTypeExpr marks the call as sufficiently analyzed; it will be # overwritten later with a fully complete NewTypeExpr if there are no other # errors with the NewType() call. call = s.rvalue return name, call def check_newtype_args(self, name: str, call: CallExpr, context: Context) -> Optional[Type]: has_failed = False args, arg_kinds = call.args, call.arg_kinds if len(args) != 2 or arg_kinds[0] != ARG_POS or arg_kinds[1] != ARG_POS: self.fail("NewType(...) expects exactly two positional arguments", context) return None # Check first argument if not isinstance(args[0], (StrExpr, BytesExpr, UnicodeExpr)): self.fail("Argument 1 to NewType(...) must be a string literal", context) has_failed = True elif args[0].value != name: msg = "String argument 1 '{}' to NewType(...) does not match variable name '{}'" self.fail(msg.format(args[0].value, name), context) has_failed = True # Check second argument try: unanalyzed_type = expr_to_unanalyzed_type(args[1]) except TypeTranslationError: self.fail("Argument 2 to NewType(...) must be a valid type", context) return None old_type = self.anal_type(unanalyzed_type) return None if has_failed else old_type def build_newtype_typeinfo(self, name: str, old_type: Type, base_type: Instance) -> TypeInfo: info = self.basic_new_typeinfo(name, base_type) info.is_newtype = True # Add __init__ method args = [Argument(Var('self'), NoneTyp(), None, ARG_POS), self.make_argument('item', old_type)] signature = CallableType( arg_types=[Instance(info, []), old_type], arg_kinds=[arg.kind for arg in args], arg_names=['self', 'item'], ret_type=old_type, fallback=self.named_type('__builtins__.function'), name=name) init_func = FuncDef('__init__', args, Block([]), typ=signature) init_func.info = info info.names['__init__'] = SymbolTableNode(MDEF, init_func) return info def process_typevar_declaration(self, s: AssignmentStmt) -> None: """Check if s declares a TypeVar; it yes, store it in symbol table.""" call = self.get_typevar_declaration(s) if not call: return lvalue = s.lvalues[0] assert isinstance(lvalue, NameExpr) name = lvalue.name if not lvalue.is_inferred_def: if s.type: self.fail("Cannot declare the type of a type variable", s) else: self.fail("Cannot redefine '%s' as a type variable" % name, s) return if not self.check_typevar_name(call, name, s): return # Constraining types n_values = call.arg_kinds[1:].count(ARG_POS) values = self.analyze_types(call.args[1:1 + n_values]) res = self.process_typevar_parameters(call.args[1 + n_values:], call.arg_names[1 + n_values:], call.arg_kinds[1 + n_values:], n_values, s) if res is None: return variance, upper_bound = res if self.options.disallow_any_unimported: for idx, constraint in enumerate(values, start=1): if has_any_from_unimported_type(constraint): prefix = "Constraint {}".format(idx) self.msg.unimported_type_becomes_any(prefix, constraint, s) if has_any_from_unimported_type(upper_bound): prefix = "Upper bound of type variable" self.msg.unimported_type_becomes_any(prefix, upper_bound, s) for t in values + [upper_bound]: check_for_explicit_any(t, self.options, self.is_typeshed_stub_file, self.msg, context=s) # Yes, it's a valid type variable definition! Add it to the symbol table. node = self.lookup(name, s) assert node is not None assert node.fullname is not None node.kind = TVAR TypeVar = TypeVarExpr(name, node.fullname, values, upper_bound, variance) TypeVar.line = call.line call.analyzed = TypeVar node.node = TypeVar def check_typevar_name(self, call: CallExpr, name: str, context: Context) -> bool: if len(call.args) < 1: self.fail("Too few arguments for TypeVar()", context) return False if (not isinstance(call.args[0], (StrExpr, BytesExpr, UnicodeExpr)) or not call.arg_kinds[0] == ARG_POS): self.fail("TypeVar() expects a string literal as first argument", context) return False elif call.args[0].value != name: msg = "String argument 1 '{}' to TypeVar(...) does not match variable name '{}'" self.fail(msg.format(call.args[0].value, name), context) return False return True def get_typevar_declaration(self, s: AssignmentStmt) -> Optional[CallExpr]: """Returns the TypeVar() call expression if `s` is a type var declaration or None otherwise. """ if len(s.lvalues) != 1 or not isinstance(s.lvalues[0], NameExpr): return None if not isinstance(s.rvalue, CallExpr): return None call = s.rvalue callee = call.callee if not isinstance(callee, RefExpr): return None if callee.fullname != 'typing.TypeVar': return None return call def process_typevar_parameters(self, args: List[Expression], names: List[Optional[str]], kinds: List[int], num_values: int, context: Context) -> Optional[Tuple[int, Type]]: has_values = (num_values > 0) covariant = False contravariant = False upper_bound = self.object_type() # type: Type for param_value, param_name, param_kind in zip(args, names, kinds): if not param_kind == ARG_NAMED: self.fail("Unexpected argument to TypeVar()", context) return None if param_name == 'covariant': if isinstance(param_value, NameExpr): if param_value.name == 'True': covariant = True else: self.fail("TypeVar 'covariant' may only be 'True'", context) return None else: self.fail("TypeVar 'covariant' may only be 'True'", context) return None elif param_name == 'contravariant': if isinstance(param_value, NameExpr): if param_value.name == 'True': contravariant = True else: self.fail("TypeVar 'contravariant' may only be 'True'", context) return None else: self.fail("TypeVar 'contravariant' may only be 'True'", context) return None elif param_name == 'bound': if has_values: self.fail("TypeVar cannot have both values and an upper bound", context) return None try: upper_bound = self.expr_to_analyzed_type(param_value) except TypeTranslationError: self.fail("TypeVar 'bound' must be a type", param_value) return None elif param_name == 'values': # Probably using obsolete syntax with values=(...). Explain the current syntax. self.fail("TypeVar 'values' argument not supported", context) self.fail("Use TypeVar('T', t, ...) instead of TypeVar('T', values=(t, ...))", context) return None else: self.fail("Unexpected argument to TypeVar(): {}".format(param_name), context) return None if covariant and contravariant: self.fail("TypeVar cannot be both covariant and contravariant", context) return None elif num_values == 1: self.fail("TypeVar cannot have only a single constraint", context) return None elif covariant: variance = COVARIANT elif contravariant: variance = CONTRAVARIANT else: variance = INVARIANT return (variance, upper_bound) def process_namedtuple_definition(self, s: AssignmentStmt) -> None: """Check if s defines a namedtuple; if yes, store the definition in symbol table.""" if len(s.lvalues) != 1 or not isinstance(s.lvalues[0], NameExpr): return lvalue = s.lvalues[0] name = lvalue.name named_tuple = self.check_namedtuple(s.rvalue, name) if named_tuple is None: return # Yes, it's a valid namedtuple definition. Add it to the symbol table. node = self.lookup(name, s) assert node is not None node.kind = GDEF # TODO locally defined namedtuple node.node = named_tuple def check_namedtuple(self, node: Expression, var_name: Optional[str] = None) -> Optional[TypeInfo]: """Check if a call defines a namedtuple. The optional var_name argument is the name of the variable to which this is assigned, if any. If it does, return the corresponding TypeInfo. Return None otherwise. If the definition is invalid but looks like a namedtuple, report errors but return (some) TypeInfo. """ if not isinstance(node, CallExpr): return None call = node callee = call.callee if not isinstance(callee, RefExpr): return None fullname = callee.fullname if fullname not in ('collections.namedtuple', 'typing.NamedTuple'): return None items, types, ok = self.parse_namedtuple_args(call, fullname) if not ok: # Error. Construct dummy return value. return self.build_namedtuple_typeinfo('namedtuple', [], [], {}) name = cast(StrExpr, call.args[0]).value if name != var_name or self.is_func_scope(): # Give it a unique name derived from the line number. name += '@' + str(call.line) info = self.build_namedtuple_typeinfo(name, items, types, {}) # Store it as a global just in case it would remain anonymous. # (Or in the nearest class if there is one.) stnode = SymbolTableNode(GDEF, info) if self.type: self.type.names[name] = stnode else: self.globals[name] = stnode call.analyzed = NamedTupleExpr(info) call.analyzed.set_line(call.line, call.column) return info def parse_namedtuple_args(self, call: CallExpr, fullname: str) -> Tuple[List[str], List[Type], bool]: # TODO: Share code with check_argument_count in checkexpr.py? args = call.args if len(args) < 2: return self.fail_namedtuple_arg("Too few arguments for namedtuple()", call) if len(args) > 2: # FIX incorrect. There are two additional parameters return self.fail_namedtuple_arg("Too many arguments for namedtuple()", call) if call.arg_kinds != [ARG_POS, ARG_POS]: return self.fail_namedtuple_arg("Unexpected arguments to namedtuple()", call) if not isinstance(args[0], (StrExpr, BytesExpr, UnicodeExpr)): return self.fail_namedtuple_arg( "namedtuple() expects a string literal as the first argument", call) types = [] # type: List[Type] ok = True if not isinstance(args[1], (ListExpr, TupleExpr)): if (fullname == 'collections.namedtuple' and isinstance(args[1], (StrExpr, BytesExpr, UnicodeExpr))): str_expr = cast(StrExpr, args[1]) items = str_expr.value.replace(',', ' ').split() else: return self.fail_namedtuple_arg( "List or tuple literal expected as the second argument to namedtuple()", call) else: listexpr = args[1] if fullname == 'collections.namedtuple': # The fields argument contains just names, with implicit Any types. if any(not isinstance(item, (StrExpr, BytesExpr, UnicodeExpr)) for item in listexpr.items): return self.fail_namedtuple_arg("String literal expected as namedtuple() item", call) items = [cast(StrExpr, item).value for item in listexpr.items] else: # The fields argument contains (name, type) tuples. items, types, ok = self.parse_namedtuple_fields_with_types(listexpr.items, call) if not types: types = [AnyType(TypeOfAny.unannotated) for _ in items] underscore = [item for item in items if item.startswith('_')] if underscore: self.fail("namedtuple() field names cannot start with an underscore: " + ', '.join(underscore), call) return items, types, ok def parse_namedtuple_fields_with_types(self, nodes: List[Expression], context: Context) -> Tuple[List[str], List[Type], bool]: items = [] # type: List[str] types = [] # type: List[Type] for item in nodes: if isinstance(item, TupleExpr): if len(item.items) != 2: return self.fail_namedtuple_arg("Invalid NamedTuple field definition", item) name, type_node = item.items if isinstance(name, (StrExpr, BytesExpr, UnicodeExpr)): items.append(name.value) else: return self.fail_namedtuple_arg("Invalid NamedTuple() field name", item) try: type = expr_to_unanalyzed_type(type_node) except TypeTranslationError: return self.fail_namedtuple_arg('Invalid field type', type_node) types.append(self.anal_type(type)) else: return self.fail_namedtuple_arg("Tuple expected as NamedTuple() field", item) return items, types, True def fail_namedtuple_arg(self, message: str, context: Context) -> Tuple[List[str], List[Type], bool]: self.fail(message, context) return [], [], False def basic_new_typeinfo(self, name: str, basetype_or_fallback: Instance) -> TypeInfo: class_def = ClassDef(name, Block([])) class_def.fullname = self.qualified_name(name) info = TypeInfo(SymbolTable(), class_def, self.cur_mod_id) class_def.info = info mro = basetype_or_fallback.type.mro if mro is None: # Forward reference, MRO should be recalculated in third pass. mro = [basetype_or_fallback.type, self.object_type().type] info.mro = [info] + mro info.bases = [basetype_or_fallback] return info def build_namedtuple_typeinfo(self, name: str, items: List[str], types: List[Type], default_items: Dict[str, Expression]) -> TypeInfo: strtype = self.str_type() implicit_any = AnyType(TypeOfAny.special_form) basetuple_type = self.named_type('__builtins__.tuple', [implicit_any]) dictype = (self.named_type_or_none('builtins.dict', [strtype, implicit_any]) or self.object_type()) # Actual signature should return OrderedDict[str, Union[types]] ordereddictype = (self.named_type_or_none('builtins.dict', [strtype, implicit_any]) or self.object_type()) fallback = self.named_type('__builtins__.tuple', [implicit_any]) # Note: actual signature should accept an invariant version of Iterable[UnionType[types]]. # but it can't be expressed. 'new' and 'len' should be callable types. iterable_type = self.named_type_or_none('typing.Iterable', [implicit_any]) function_type = self.named_type('__builtins__.function') info = self.basic_new_typeinfo(name, fallback) info.is_named_tuple = True info.tuple_type = TupleType(types, fallback) def patch() -> None: # Calculate the correct value type for the fallback tuple. assert info.tuple_type, "TupleType type deleted before calling the patch" fallback.args[0] = join.join_type_list(list(info.tuple_type.items)) # We can't calculate the complete fallback type until after semantic # analysis, since otherwise MROs might be incomplete. Postpone a callback # function that patches the fallback. self.patches.append(patch) def add_field(var: Var, is_initialized_in_class: bool = False, is_property: bool = False) -> None: var.info = info var.is_initialized_in_class = is_initialized_in_class var.is_property = is_property var._fullname = '%s.%s' % (info.fullname(), var.name()) info.names[var.name()] = SymbolTableNode(MDEF, var) vars = [Var(item, typ) for item, typ in zip(items, types)] for var in vars: add_field(var, is_property=True) tuple_of_strings = TupleType([strtype for _ in items], basetuple_type) add_field(Var('_fields', tuple_of_strings), is_initialized_in_class=True) add_field(Var('_field_types', dictype), is_initialized_in_class=True) add_field(Var('_field_defaults', dictype), is_initialized_in_class=True) add_field(Var('_source', strtype), is_initialized_in_class=True) add_field(Var('__annotations__', ordereddictype), is_initialized_in_class=True) add_field(Var('__doc__', strtype), is_initialized_in_class=True) tvd = TypeVarDef('NT', 'NT', 1, [], info.tuple_type) selftype = TypeVarType(tvd) def add_method(funcname: str, ret: Type, args: List[Argument], name: Optional[str] = None, is_classmethod: bool = False, ) -> None: if is_classmethod: first = [Argument(Var('cls'), TypeType.make_normalized(selftype), None, ARG_POS)] else: first = [Argument(Var('self'), selftype, None, ARG_POS)] args = first + args types = [arg.type_annotation for arg in args] items = [arg.variable.name() for arg in args] arg_kinds = [arg.kind for arg in args] assert None not in types signature = CallableType(cast(List[Type], types), arg_kinds, items, ret, function_type) signature.variables = [tvd] func = FuncDef(funcname, args, Block([])) func.info = info func.is_class = is_classmethod func.type = set_callable_name(signature, func) func._fullname = info.fullname() + '.' + funcname if is_classmethod: v = Var(funcname, func.type) v.is_classmethod = True v.info = info v._fullname = func._fullname dec = Decorator(func, [NameExpr('classmethod')], v) info.names[funcname] = SymbolTableNode(MDEF, dec) else: info.names[funcname] = SymbolTableNode(MDEF, func) add_method('_replace', ret=selftype, args=[Argument(var, var.type, EllipsisExpr(), ARG_NAMED_OPT) for var in vars]) def make_init_arg(var: Var) -> Argument: default = default_items.get(var.name(), None) kind = ARG_POS if default is None else ARG_OPT return Argument(var, var.type, default, kind) add_method('__init__', ret=NoneTyp(), name=info.name(), args=[make_init_arg(var) for var in vars]) add_method('_asdict', args=[], ret=ordereddictype) special_form_any = AnyType(TypeOfAny.special_form) add_method('_make', ret=selftype, is_classmethod=True, args=[Argument(Var('iterable', iterable_type), iterable_type, None, ARG_POS), Argument(Var('new'), special_form_any, EllipsisExpr(), ARG_NAMED_OPT), Argument(Var('len'), special_form_any, EllipsisExpr(), ARG_NAMED_OPT)]) return info def make_argument(self, name: str, type: Type) -> Argument: return Argument(Var(name), type, None, ARG_POS) def analyze_types(self, items: List[Expression]) -> List[Type]: result = [] # type: List[Type] for node in items: try: result.append(self.anal_type(expr_to_unanalyzed_type(node))) except TypeTranslationError: self.fail('Type expected', node) result.append(AnyType(TypeOfAny.from_error)) return result def process_typeddict_definition(self, s: AssignmentStmt) -> None: """Check if s defines a TypedDict; if yes, store the definition in symbol table.""" if len(s.lvalues) != 1 or not isinstance(s.lvalues[0], NameExpr): return lvalue = s.lvalues[0] name = lvalue.name typed_dict = self.check_typeddict(s.rvalue, name) if typed_dict is None: return # Yes, it's a valid TypedDict definition. Add it to the symbol table. node = self.lookup(name, s) if node: node.kind = GDEF # TODO locally defined TypedDict node.node = typed_dict def check_typeddict(self, node: Expression, var_name: Optional[str] = None) -> Optional[TypeInfo]: """Check if a call defines a TypedDict. The optional var_name argument is the name of the variable to which this is assigned, if any. If it does, return the corresponding TypeInfo. Return None otherwise. If the definition is invalid but looks like a TypedDict, report errors but return (some) TypeInfo. """ if not isinstance(node, CallExpr): return None call = node callee = call.callee if not isinstance(callee, RefExpr): return None fullname = callee.fullname if fullname != 'mypy_extensions.TypedDict': return None items, types, total, ok = self.parse_typeddict_args(call) if not ok: # Error. Construct dummy return value. info = self.build_typeddict_typeinfo('TypedDict', [], [], set()) else: name = cast(StrExpr, call.args[0]).value if var_name is not None and name != var_name: self.fail( "First argument '{}' to TypedDict() does not match variable name '{}'".format( name, var_name), node) if name != var_name or self.is_func_scope(): # Give it a unique name derived from the line number. name += '@' + str(call.line) required_keys = set(items) if total else set() info = self.build_typeddict_typeinfo(name, items, types, required_keys) # Store it as a global just in case it would remain anonymous. # (Or in the nearest class if there is one.) stnode = SymbolTableNode(GDEF, info) if self.type: self.type.names[name] = stnode else: self.globals[name] = stnode call.analyzed = TypedDictExpr(info) call.analyzed.set_line(call.line, call.column) return info def parse_typeddict_args(self, call: CallExpr) -> Tuple[List[str], List[Type], bool, bool]: # TODO: Share code with check_argument_count in checkexpr.py? args = call.args if len(args) < 2: return self.fail_typeddict_arg("Too few arguments for TypedDict()", call) if len(args) > 3: return self.fail_typeddict_arg("Too many arguments for TypedDict()", call) # TODO: Support keyword arguments if call.arg_kinds not in ([ARG_POS, ARG_POS], [ARG_POS, ARG_POS, ARG_NAMED]): return self.fail_typeddict_arg("Unexpected arguments to TypedDict()", call) if len(args) == 3 and call.arg_names[2] != 'total': return self.fail_typeddict_arg( 'Unexpected keyword argument "{}" for "TypedDict"'.format(call.arg_names[2]), call) if not isinstance(args[0], (StrExpr, BytesExpr, UnicodeExpr)): return self.fail_typeddict_arg( "TypedDict() expects a string literal as the first argument", call) if not isinstance(args[1], DictExpr): return self.fail_typeddict_arg( "TypedDict() expects a dictionary literal as the second argument", call) total = True # type: Optional[bool] if len(args) == 3: total = self.parse_bool(call.args[2]) if total is None: return self.fail_typeddict_arg( 'TypedDict() "total" argument must be True or False', call) dictexpr = args[1] items, types, ok = self.parse_typeddict_fields_with_types(dictexpr.items, call) for t in types: check_for_explicit_any(t, self.options, self.is_typeshed_stub_file, self.msg, context=call) if self.options.disallow_any_unimported: for t in types: if has_any_from_unimported_type(t): self.msg.unimported_type_becomes_any("Type of a TypedDict key", t, dictexpr) assert total is not None return items, types, total, ok def parse_bool(self, expr: Expression) -> Optional[bool]: if isinstance(expr, NameExpr): if expr.fullname == 'builtins.True': return True if expr.fullname == 'builtins.False': return False return None def parse_typeddict_fields_with_types(self, dict_items: List[Tuple[Expression, Expression]], context: Context) -> Tuple[List[str], List[Type], bool]: items = [] # type: List[str] types = [] # type: List[Type] for (field_name_expr, field_type_expr) in dict_items: if isinstance(field_name_expr, (StrExpr, BytesExpr, UnicodeExpr)): items.append(field_name_expr.value) else: self.fail_typeddict_arg("Invalid TypedDict() field name", field_name_expr) return [], [], False try: type = expr_to_unanalyzed_type(field_type_expr) except TypeTranslationError: self.fail_typeddict_arg('Invalid field type', field_type_expr) return [], [], False types.append(self.anal_type(type)) return items, types, True def fail_typeddict_arg(self, message: str, context: Context) -> Tuple[List[str], List[Type], bool, bool]: self.fail(message, context) return [], [], True, False def build_typeddict_typeinfo(self, name: str, items: List[str], types: List[Type], required_keys: Set[str]) -> TypeInfo: fallback = (self.named_type_or_none('typing.Mapping', [self.str_type(), self.object_type()]) or self.object_type()) info = self.basic_new_typeinfo(name, fallback) info.typeddict_type = TypedDictType(OrderedDict(zip(items, types)), required_keys, fallback) def patch() -> None: # Calculate the correct value type for the fallback Mapping. assert info.typeddict_type, "TypedDict type deleted before calling the patch" fallback.args[1] = join.join_type_list(list(info.typeddict_type.items.values())) # We can't calculate the complete fallback type until after semantic # analysis, since otherwise MROs might be incomplete. Postpone a callback # function that patches the fallback. self.patches.append(patch) return info def check_classvar(self, s: AssignmentStmt) -> None: lvalue = s.lvalues[0] if len(s.lvalues) != 1 or not isinstance(lvalue, RefExpr): return if not s.type or not self.is_classvar(s.type): return if self.is_class_scope() and isinstance(lvalue, NameExpr): node = lvalue.node if isinstance(node, Var): node.is_classvar = True elif not isinstance(lvalue, MemberExpr) or self.is_self_member_ref(lvalue): # In case of member access, report error only when assigning to self # Other kinds of member assignments should be already reported self.fail_invalid_classvar(lvalue) def is_classvar(self, typ: Type) -> bool: if not isinstance(typ, UnboundType): return False sym = self.lookup_qualified(typ.name, typ) if not sym or not sym.node: return False return sym.node.fullname() == 'typing.ClassVar' def fail_invalid_classvar(self, context: Context) -> None: self.fail('ClassVar can only be used for assignments in class body', context) def process_module_assignment(self, lvals: List[Expression], rval: Expression, ctx: AssignmentStmt) -> None: """Propagate module references across assignments. Recursively handles the simple form of iterable unpacking; doesn't handle advanced unpacking with *rest, dictionary unpacking, etc. In an expression like x = y = z, z is the rval and lvals will be [x, y]. """ if all(isinstance(v, (TupleExpr, ListExpr)) for v in lvals + [rval]): # rval and all lvals are either list or tuple, so we are dealing # with unpacking assignment like `x, y = a, b`. Mypy didn't # understand our all(isinstance(...)), so cast them as # Union[TupleExpr, ListExpr] so mypy knows it is safe to access # their .items attribute. seq_lvals = cast(List[Union[TupleExpr, ListExpr]], lvals) seq_rval = cast(Union[TupleExpr, ListExpr], rval) # given an assignment like: # (x, y) = (m, n) = (a, b) # we now have: # seq_lvals = [(x, y), (m, n)] # seq_rval = (a, b) # We now zip this into: # elementwise_assignments = [(a, x, m), (b, y, n)] # where each elementwise assignment includes one element of rval and the # corresponding element of each lval. Basically we unpack # (x, y) = (m, n) = (a, b) # into elementwise assignments # x = m = a # y = n = b # and then we recursively call this method for each of those assignments. # If the rval and all lvals are not all of the same length, zip will just ignore # extra elements, so no error will be raised here; mypy will later complain # about the length mismatch in type-checking. elementwise_assignments = zip(seq_rval.items, *[v.items for v in seq_lvals]) for rv, *lvs in elementwise_assignments: self.process_module_assignment(lvs, rv, ctx) elif isinstance(rval, RefExpr): rnode = self.lookup_type_node(rval) if rnode and rnode.kind == MODULE_REF: for lval in lvals: if not isinstance(lval, NameExpr): continue # respect explicitly annotated type if (isinstance(lval.node, Var) and lval.node.type is not None): continue lnode = self.lookup(lval.name, ctx) if lnode: if lnode.kind == MODULE_REF and lnode.node is not rnode.node: self.fail( "Cannot assign multiple modules to name '{}' " "without explicit 'types.ModuleType' annotation".format(lval.name), ctx) # never create module alias except on initial var definition elif lval.is_inferred_def: lnode.kind = MODULE_REF lnode.node = rnode.node def process_enum_call(self, s: AssignmentStmt) -> None: """Check if s defines an Enum; if yes, store the definition in symbol table.""" if len(s.lvalues) != 1 or not isinstance(s.lvalues[0], NameExpr): return lvalue = s.lvalues[0] name = lvalue.name enum_call = self.check_enum_call(s.rvalue, name) if enum_call is None: return # Yes, it's a valid Enum definition. Add it to the symbol table. node = self.lookup(name, s) if node: node.kind = GDEF # TODO locally defined Enum node.node = enum_call def check_enum_call(self, node: Expression, var_name: Optional[str] = None) -> Optional[TypeInfo]: """Check if a call defines an Enum. Example: A = enum.Enum('A', 'foo bar') is equivalent to: class A(enum.Enum): foo = 1 bar = 2 """ if not isinstance(node, CallExpr): return None call = node callee = call.callee if not isinstance(callee, RefExpr): return None fullname = callee.fullname if fullname not in ('enum.Enum', 'enum.IntEnum', 'enum.Flag', 'enum.IntFlag'): return None items, values, ok = self.parse_enum_call_args(call, fullname.split('.')[-1]) if not ok: # Error. Construct dummy return value. return self.build_enum_call_typeinfo('Enum', [], fullname) name = cast(StrExpr, call.args[0]).value if name != var_name or self.is_func_scope(): # Give it a unique name derived from the line number. name += '@' + str(call.line) info = self.build_enum_call_typeinfo(name, items, fullname) # Store it as a global just in case it would remain anonymous. # (Or in the nearest class if there is one.) stnode = SymbolTableNode(GDEF, info) if self.type: self.type.names[name] = stnode else: self.globals[name] = stnode call.analyzed = EnumCallExpr(info, items, values) call.analyzed.set_line(call.line, call.column) return info def build_enum_call_typeinfo(self, name: str, items: List[str], fullname: str) -> TypeInfo: base = self.named_type_or_none(fullname) assert base is not None info = self.basic_new_typeinfo(name, base) info.is_enum = True for item in items: var = Var(item) var.info = info var.is_property = True info.names[item] = SymbolTableNode(MDEF, var) return info def parse_enum_call_args(self, call: CallExpr, class_name: str) -> Tuple[List[str], List[Optional[Expression]], bool]: args = call.args if len(args) < 2: return self.fail_enum_call_arg("Too few arguments for %s()" % class_name, call) if len(args) > 2: return self.fail_enum_call_arg("Too many arguments for %s()" % class_name, call) if call.arg_kinds != [ARG_POS, ARG_POS]: return self.fail_enum_call_arg("Unexpected arguments to %s()" % class_name, call) if not isinstance(args[0], (StrExpr, UnicodeExpr)): return self.fail_enum_call_arg( "%s() expects a string literal as the first argument" % class_name, call) items = [] values = [] # type: List[Optional[Expression]] if isinstance(args[1], (StrExpr, UnicodeExpr)): fields = args[1].value for field in fields.replace(',', ' ').split(): items.append(field) elif isinstance(args[1], (TupleExpr, ListExpr)): seq_items = args[1].items if all(isinstance(seq_item, (StrExpr, UnicodeExpr)) for seq_item in seq_items): items = [cast(StrExpr, seq_item).value for seq_item in seq_items] elif all(isinstance(seq_item, (TupleExpr, ListExpr)) and len(seq_item.items) == 2 and isinstance(seq_item.items[0], (StrExpr, UnicodeExpr)) for seq_item in seq_items): for seq_item in seq_items: assert isinstance(seq_item, (TupleExpr, ListExpr)) name, value = seq_item.items assert isinstance(name, (StrExpr, UnicodeExpr)) items.append(name.value) values.append(value) else: return self.fail_enum_call_arg( "%s() with tuple or list expects strings or (name, value) pairs" % class_name, call) elif isinstance(args[1], DictExpr): for key, value in args[1].items: if not isinstance(key, (StrExpr, UnicodeExpr)): return self.fail_enum_call_arg( "%s() with dict literal requires string literals" % class_name, call) items.append(key.value) values.append(value) else: # TODO: Allow dict(x=1, y=2) as a substitute for {'x': 1, 'y': 2}? return self.fail_enum_call_arg( "%s() expects a string, tuple, list or dict literal as the second argument" % class_name, call) if len(items) == 0: return self.fail_enum_call_arg("%s() needs at least one item" % class_name, call) if not values: values = [None] * len(items) assert len(items) == len(values) return items, values, True def fail_enum_call_arg(self, message: str, context: Context) -> Tuple[List[str], List[Optional[Expression]], bool]: self.fail(message, context) return [], [], False def visit_decorator(self, dec: Decorator) -> None: for d in dec.decorators: d.accept(self) removed = [] # type: List[int] no_type_check = False for i, d in enumerate(dec.decorators): # A bunch of decorators are special cased here. if refers_to_fullname(d, 'abc.abstractmethod'): removed.append(i) dec.func.is_abstract = True self.check_decorated_function_is_method('abstractmethod', dec) elif (refers_to_fullname(d, 'asyncio.coroutines.coroutine') or refers_to_fullname(d, 'types.coroutine')): removed.append(i) dec.func.is_awaitable_coroutine = True elif refers_to_fullname(d, 'builtins.staticmethod'): removed.append(i) dec.func.is_static = True dec.var.is_staticmethod = True self.check_decorated_function_is_method('staticmethod', dec) elif refers_to_fullname(d, 'builtins.classmethod'): removed.append(i) dec.func.is_class = True dec.var.is_classmethod = True self.check_decorated_function_is_method('classmethod', dec) elif (refers_to_fullname(d, 'builtins.property') or refers_to_fullname(d, 'abc.abstractproperty')): removed.append(i) dec.func.is_property = True dec.var.is_property = True if refers_to_fullname(d, 'abc.abstractproperty'): dec.func.is_abstract = True self.check_decorated_function_is_method('property', dec) if len(dec.func.arguments) > 1: self.fail('Too many arguments', dec.func) elif refers_to_fullname(d, 'typing.no_type_check'): dec.var.type = AnyType(TypeOfAny.special_form) no_type_check = True for i in reversed(removed): del dec.decorators[i] if not dec.is_overload or dec.var.is_property: if self.is_func_scope(): self.add_symbol(dec.var.name(), SymbolTableNode(LDEF, dec), dec) elif self.type: dec.var.info = self.type dec.var.is_initialized_in_class = True self.add_symbol(dec.var.name(), SymbolTableNode(MDEF, dec), dec) if not no_type_check: dec.func.accept(self) if dec.decorators and dec.var.is_property: self.fail('Decorated property not supported', dec) def check_decorated_function_is_method(self, decorator: str, context: Context) -> None: if not self.type or self.is_func_scope(): self.fail("'%s' used with a non-method" % decorator, context) def visit_expression_stmt(self, s: ExpressionStmt) -> None: s.expr.accept(self) def visit_return_stmt(self, s: ReturnStmt) -> None: if not self.is_func_scope(): self.fail("'return' outside function", s) if s.expr: s.expr.accept(self) def visit_raise_stmt(self, s: RaiseStmt) -> None: if s.expr: s.expr.accept(self) if s.from_expr: s.from_expr.accept(self) def visit_assert_stmt(self, s: AssertStmt) -> None: if s.expr: s.expr.accept(self) if s.msg: s.msg.accept(self) def visit_operator_assignment_stmt(self, s: OperatorAssignmentStmt) -> None: s.lvalue.accept(self) s.rvalue.accept(self) if (isinstance(s.lvalue, NameExpr) and s.lvalue.name == '__all__' and s.lvalue.kind == GDEF and isinstance(s.rvalue, (ListExpr, TupleExpr))): self.add_exports(*s.rvalue.items) def visit_while_stmt(self, s: WhileStmt) -> None: s.expr.accept(self) self.loop_depth += 1 s.body.accept(self) self.loop_depth -= 1 self.visit_block_maybe(s.else_body) def visit_for_stmt(self, s: ForStmt) -> None: s.expr.accept(self) # Bind index variables and check if they define new names. self.analyze_lvalue(s.index, explicit_type=s.index_type is not None) if s.index_type: if self.is_classvar(s.index_type): self.fail_invalid_classvar(s.index) allow_tuple_literal = isinstance(s.index, (TupleExpr, ListExpr)) s.index_type = self.anal_type(s.index_type, allow_tuple_literal=allow_tuple_literal) self.store_declared_types(s.index, s.index_type) self.loop_depth += 1 self.visit_block(s.body) self.loop_depth -= 1 self.visit_block_maybe(s.else_body) def visit_break_stmt(self, s: BreakStmt) -> None: if self.loop_depth == 0: self.fail("'break' outside loop", s, True, blocker=True) def visit_continue_stmt(self, s: ContinueStmt) -> None: if self.loop_depth == 0: self.fail("'continue' outside loop", s, True, blocker=True) def visit_if_stmt(self, s: IfStmt) -> None: infer_reachability_of_if_statement(s, pyversion=self.options.python_version, platform=self.options.platform) for i in range(len(s.expr)): s.expr[i].accept(self) self.visit_block(s.body[i]) self.visit_block_maybe(s.else_body) def visit_try_stmt(self, s: TryStmt) -> None: self.analyze_try_stmt(s, self) def analyze_try_stmt(self, s: TryStmt, visitor: NodeVisitor[None], add_global: bool = False) -> None: s.body.accept(visitor) for type, var, handler in zip(s.types, s.vars, s.handlers): if type: type.accept(visitor) if var: self.analyze_lvalue(var, add_global=add_global) handler.accept(visitor) if s.else_body: s.else_body.accept(visitor) if s.finally_body: s.finally_body.accept(visitor) def visit_with_stmt(self, s: WithStmt) -> None: types = [] # type: List[Type] if s.target_type: actual_targets = [t for t in s.target if t is not None] if len(actual_targets) == 0: # We have a type for no targets self.fail('Invalid type comment', s) elif len(actual_targets) == 1: # We have one target and one type types = [s.target_type] elif isinstance(s.target_type, TupleType): # We have multiple targets and multiple types if len(actual_targets) == len(s.target_type.items): types = s.target_type.items else: # But it's the wrong number of items self.fail('Incompatible number of types for `with` targets', s) else: # We have multiple targets and one type self.fail('Multiple types expected for multiple `with` targets', s) new_types = [] # type: List[Type] for e, n in zip(s.expr, s.target): e.accept(self) if n: self.analyze_lvalue(n, explicit_type=s.target_type is not None) # Since we have a target, pop the next type from types if types: t = types.pop(0) if self.is_classvar(t): self.fail_invalid_classvar(n) allow_tuple_literal = isinstance(n, (TupleExpr, ListExpr)) t = self.anal_type(t, allow_tuple_literal=allow_tuple_literal) new_types.append(t) self.store_declared_types(n, t) # Reverse the logic above to correctly reassign target_type if new_types: if len(s.target) == 1: s.target_type = new_types[0] elif isinstance(s.target_type, TupleType): s.target_type = s.target_type.copy_modified(items=new_types) self.visit_block(s.body) def visit_del_stmt(self, s: DelStmt) -> None: s.expr.accept(self) if not self.is_valid_del_target(s.expr): self.fail('Invalid delete target', s) def is_valid_del_target(self, s: Expression) -> bool: if isinstance(s, (IndexExpr, NameExpr, MemberExpr)): return True elif isinstance(s, TupleExpr): return all(self.is_valid_del_target(item) for item in s.items) else: return False def visit_global_decl(self, g: GlobalDecl) -> None: for name in g.names: if name in self.nonlocal_decls[-1]: self.fail("Name '{}' is nonlocal and global".format(name), g) self.global_decls[-1].add(name) def visit_nonlocal_decl(self, d: NonlocalDecl) -> None: if not self.is_func_scope(): self.fail("nonlocal declaration not allowed at module level", d) else: for name in d.names: for table in reversed(self.locals[:-1]): if table is not None and name in table: break else: self.fail("No binding for nonlocal '{}' found".format(name), d) if self.locals[-1] is not None and name in self.locals[-1]: self.fail("Name '{}' is already defined in local " "scope before nonlocal declaration".format(name), d) if name in self.global_decls[-1]: self.fail("Name '{}' is nonlocal and global".format(name), d) self.nonlocal_decls[-1].add(name) def visit_print_stmt(self, s: PrintStmt) -> None: for arg in s.args: arg.accept(self) if s.target: s.target.accept(self) def visit_exec_stmt(self, s: ExecStmt) -> None: s.expr.accept(self) if s.globals: s.globals.accept(self) if s.locals: s.locals.accept(self) # # Expressions # def visit_name_expr(self, expr: NameExpr) -> None: n = self.lookup(expr.name, expr) if n: if n.kind == TVAR and self.tvar_scope.get_binding(n): self.fail("'{}' is a type variable and only valid in type " "context".format(expr.name), expr) else: expr.kind = n.kind expr.node = n.node expr.fullname = n.fullname def visit_super_expr(self, expr: SuperExpr) -> None: if not self.type: self.fail('"super" used outside class', expr) return expr.info = self.type for arg in expr.call.args: arg.accept(self) def visit_tuple_expr(self, expr: TupleExpr) -> None: for item in expr.items: if isinstance(item, StarExpr): item.valid = True item.accept(self) def visit_list_expr(self, expr: ListExpr) -> None: for item in expr.items: if isinstance(item, StarExpr): item.valid = True item.accept(self) def visit_set_expr(self, expr: SetExpr) -> None: for item in expr.items: if isinstance(item, StarExpr): item.valid = True item.accept(self) def visit_dict_expr(self, expr: DictExpr) -> None: for key, value in expr.items: if key is not None: key.accept(self) value.accept(self) def visit_star_expr(self, expr: StarExpr) -> None: if not expr.valid: # XXX TODO Change this error message self.fail('Can use starred expression only as assignment target', expr) else: expr.expr.accept(self) def visit_yield_from_expr(self, e: YieldFromExpr) -> None: if not self.is_func_scope(): # not sure self.fail("'yield from' outside function", e, True, blocker=True) else: if self.function_stack[-1].is_coroutine: self.fail("'yield from' in async function", e, True, blocker=True) else: self.function_stack[-1].is_generator = True if e.expr: e.expr.accept(self) def visit_call_expr(self, expr: CallExpr) -> None: """Analyze a call expression. Some call expressions are recognized as special forms, including cast(...). """ if expr.analyzed: return expr.callee.accept(self) if refers_to_fullname(expr.callee, 'typing.cast'): # Special form cast(...). if not self.check_fixed_args(expr, 2, 'cast'): return # Translate first argument to an unanalyzed type. try: target = expr_to_unanalyzed_type(expr.args[0]) except TypeTranslationError: self.fail('Cast target is not a type', expr) return # Piggyback CastExpr object to the CallExpr object; it takes # precedence over the CallExpr semantics. expr.analyzed = CastExpr(expr.args[1], target) expr.analyzed.line = expr.line expr.analyzed.accept(self) elif refers_to_fullname(expr.callee, 'builtins.reveal_type'): if not self.check_fixed_args(expr, 1, 'reveal_type'): return expr.analyzed = RevealTypeExpr(expr.args[0]) expr.analyzed.line = expr.line expr.analyzed.column = expr.column expr.analyzed.accept(self) elif refers_to_fullname(expr.callee, 'typing.Any'): # Special form Any(...) no longer supported. self.fail('Any(...) is no longer supported. Use cast(Any, ...) instead', expr) elif refers_to_fullname(expr.callee, 'typing._promote'): # Special form _promote(...). if not self.check_fixed_args(expr, 1, '_promote'): return # Translate first argument to an unanalyzed type. try: target = expr_to_unanalyzed_type(expr.args[0]) except TypeTranslationError: self.fail('Argument 1 to _promote is not a type', expr) return expr.analyzed = PromoteExpr(target) expr.analyzed.line = expr.line expr.analyzed.accept(self) elif refers_to_fullname(expr.callee, 'builtins.dict'): expr.analyzed = self.translate_dict_call(expr) else: # Normal call expression. for a in expr.args: a.accept(self) if (isinstance(expr.callee, MemberExpr) and isinstance(expr.callee.expr, NameExpr) and expr.callee.expr.name == '__all__' and expr.callee.expr.kind == GDEF and expr.callee.name in ('append', 'extend')): if expr.callee.name == 'append' and expr.args: self.add_exports(expr.args[0]) elif (expr.callee.name == 'extend' and expr.args and isinstance(expr.args[0], (ListExpr, TupleExpr))): self.add_exports(*expr.args[0].items) def translate_dict_call(self, call: CallExpr) -> Optional[DictExpr]: """Translate 'dict(x=y, ...)' to {'x': y, ...}. For other variants of dict(...), return None. """ if not call.args: return None if not all(kind == ARG_NAMED for kind in call.arg_kinds): # Must still accept those args. for a in call.args: a.accept(self) return None expr = DictExpr([(StrExpr(cast(str, key)), value) # since they are all ARG_NAMED for key, value in zip(call.arg_names, call.args)]) expr.set_line(call) expr.accept(self) return expr def check_fixed_args(self, expr: CallExpr, numargs: int, name: str) -> bool: """Verify that expr has specified number of positional args. Return True if the arguments are valid. """ s = 's' if numargs == 1: s = '' if len(expr.args) != numargs: self.fail("'%s' expects %d argument%s" % (name, numargs, s), expr) return False if expr.arg_kinds != [ARG_POS] * numargs: self.fail("'%s' must be called with %s positional argument%s" % (name, numargs, s), expr) return False return True def visit_member_expr(self, expr: MemberExpr) -> None: base = expr.expr base.accept(self) # Bind references to module attributes. if isinstance(base, RefExpr) and base.kind == MODULE_REF: # This branch handles the case foo.bar where foo is a module. # In this case base.node is the module's MypyFile and we look up # bar in its namespace. This must be done for all types of bar. file = cast(Optional[MypyFile], base.node) # can't use isinstance due to issue #2999 # TODO: Should we actually use this? Not sure if this makes a difference. # if file.fullname() == self.cur_mod_id: # names = self.globals # else: # names = file.names n = file.names.get(expr.name, None) if file is not None else None if n and not n.module_hidden: n = self.normalize_type_alias(n, expr) if not n: return n = self.rebind_symbol_table_node(n) if n: # TODO: What if None? expr.kind = n.kind expr.fullname = n.fullname expr.node = n.node elif file is not None and file.is_stub and '__getattr__' in file.names: # If there is a module-level __getattr__, then any attribute on the module is valid # per PEP 484. getattr_defn = file.names['__getattr__'] if isinstance(getattr_defn.node, FuncDef): if isinstance(getattr_defn.node.type, CallableType): typ = getattr_defn.node.type.ret_type else: typ = AnyType(TypeOfAny.special_form) expr.kind = MDEF expr.fullname = '{}.{}'.format(file.fullname(), expr.name) expr.node = Var(expr.name, type=typ) else: # We only catch some errors here; the rest will be # caught during type checking. # # This way we can report a larger number of errors in # one type checker run. If we reported errors here, # the build would terminate after semantic analysis # and we wouldn't be able to report any type errors. full_name = '%s.%s' % (file.fullname() if file is not None else None, expr.name) mod_name = " '%s'" % file.fullname() if file is not None else '' if full_name in obsolete_name_mapping: self.fail("Module%s has no attribute %r (it's now called %r)" % ( mod_name, expr.name, obsolete_name_mapping[full_name]), expr) elif isinstance(base, RefExpr): # This branch handles the case C.bar (or cls.bar or self.bar inside # a classmethod/method), where C is a class and bar is a type # definition or a module resulting from `import bar` (or a module # assignment) inside class C. We look up bar in the class' TypeInfo # namespace. This is done only when bar is a module or a type; # other things (e.g. methods) are handled by other code in # checkmember. type_info = None if isinstance(base.node, TypeInfo): # C.bar where C is a class type_info = base.node elif isinstance(base.node, Var) and self.type and self.function_stack: # check for self.bar or cls.bar in method/classmethod func_def = self.function_stack[-1] if not func_def.is_static and isinstance(func_def.type, CallableType): formal_arg = func_def.type.argument_by_name(base.node.name()) if formal_arg and formal_arg.pos == 0: type_info = self.type if type_info: n = type_info.names.get(expr.name) if n is not None and (n.kind == MODULE_REF or isinstance(n.node, TypeInfo)): n = self.normalize_type_alias(n, expr) if not n: return expr.kind = n.kind expr.fullname = n.fullname expr.node = n.node def visit_op_expr(self, expr: OpExpr) -> None: expr.left.accept(self) if expr.op in ('and', 'or'): inferred = infer_condition_value(expr.left, pyversion=self.options.python_version, platform=self.options.platform) if ((inferred == ALWAYS_FALSE and expr.op == 'and') or (inferred == ALWAYS_TRUE and expr.op == 'or')): expr.right_unreachable = True return elif ((inferred == ALWAYS_TRUE and expr.op == 'and') or (inferred == ALWAYS_FALSE and expr.op == 'or')): expr.right_always = True expr.right.accept(self) def visit_comparison_expr(self, expr: ComparisonExpr) -> None: for operand in expr.operands: operand.accept(self) def visit_unary_expr(self, expr: UnaryExpr) -> None: expr.expr.accept(self) def visit_index_expr(self, expr: IndexExpr) -> None: if expr.analyzed: return expr.base.accept(self) if (isinstance(expr.base, RefExpr) and isinstance(expr.base.node, TypeInfo) and not expr.base.node.is_generic()): expr.index.accept(self) elif isinstance(expr.base, RefExpr) and expr.base.kind == TYPE_ALIAS: # Special form -- subscripting a generic type alias. # Perform the type substitution and create a new alias. res, alias_tvars = self.analyze_alias(expr) assert res is not None, "Failed analyzing already defined alias" expr.analyzed = TypeAliasExpr(res, alias_tvars, fallback=self.alias_fallback(res), in_runtime=True) expr.analyzed.line = expr.line expr.analyzed.column = expr.column elif refers_to_class_or_function(expr.base): # Special form -- type application. # Translate index to an unanalyzed type. types = [] # type: List[Type] if isinstance(expr.index, TupleExpr): items = expr.index.items else: items = [expr.index] for item in items: try: typearg = expr_to_unanalyzed_type(item) except TypeTranslationError: self.fail('Type expected within [...]', expr) return typearg = self.anal_type(typearg, aliasing=True) types.append(typearg) expr.analyzed = TypeApplication(expr.base, types) expr.analyzed.line = expr.line # list, dict, set are not directly subscriptable n = self.lookup_type_node(expr.base) if n and not n.normalized and n.fullname in nongen_builtins: self.fail(no_subscript_builtin_alias(n.fullname, propose_alt=False), expr) else: expr.index.accept(self) def lookup_type_node(self, expr: Expression) -> Optional[SymbolTableNode]: try: t = expr_to_unanalyzed_type(expr) except TypeTranslationError: return None if isinstance(t, UnboundType): n = self.lookup_qualified(t.name, expr, suppress_errors=True) return n return None def visit_slice_expr(self, expr: SliceExpr) -> None: if expr.begin_index: expr.begin_index.accept(self) if expr.end_index: expr.end_index.accept(self) if expr.stride: expr.stride.accept(self) def visit_cast_expr(self, expr: CastExpr) -> None: expr.expr.accept(self) expr.type = self.anal_type(expr.type) def visit_reveal_type_expr(self, expr: RevealTypeExpr) -> None: expr.expr.accept(self) def visit_type_application(self, expr: TypeApplication) -> None: expr.expr.accept(self) for i in range(len(expr.types)): expr.types[i] = self.anal_type(expr.types[i]) def visit_list_comprehension(self, expr: ListComprehension) -> None: expr.generator.accept(self) def visit_set_comprehension(self, expr: SetComprehension) -> None: expr.generator.accept(self) def visit_dictionary_comprehension(self, expr: DictionaryComprehension) -> None: self.enter() self.analyze_comp_for(expr) expr.key.accept(self) expr.value.accept(self) self.leave() self.analyze_comp_for_2(expr) def visit_generator_expr(self, expr: GeneratorExpr) -> None: self.enter() self.analyze_comp_for(expr) expr.left_expr.accept(self) self.leave() self.analyze_comp_for_2(expr) def analyze_comp_for(self, expr: Union[GeneratorExpr, DictionaryComprehension]) -> None: """Analyses the 'comp_for' part of comprehensions (part 1). That is the part after 'for' in (x for x in l if p). This analyzes variables and conditions which are analyzed in a local scope. """ for i, (index, sequence, conditions) in enumerate(zip(expr.indices, expr.sequences, expr.condlists)): if i > 0: sequence.accept(self) # Bind index variables. self.analyze_lvalue(index) for cond in conditions: cond.accept(self) def analyze_comp_for_2(self, expr: Union[GeneratorExpr, DictionaryComprehension]) -> None: """Analyses the 'comp_for' part of comprehensions (part 2). That is the part after 'for' in (x for x in l if p). This analyzes the 'l' part which is analyzed in the surrounding scope. """ expr.sequences[0].accept(self) def visit_lambda_expr(self, expr: LambdaExpr) -> None: self.analyze_function(expr) def visit_conditional_expr(self, expr: ConditionalExpr) -> None: expr.if_expr.accept(self) expr.cond.accept(self) expr.else_expr.accept(self) def visit_backquote_expr(self, expr: BackquoteExpr) -> None: expr.expr.accept(self) def visit__promote_expr(self, expr: PromoteExpr) -> None: expr.type = self.anal_type(expr.type) def visit_yield_expr(self, expr: YieldExpr) -> None: if not self.is_func_scope(): self.fail("'yield' outside function", expr, True, blocker=True) else: if self.function_stack[-1].is_coroutine: if self.options.python_version < (3, 6): self.fail("'yield' in async function", expr, True, blocker=True) else: self.function_stack[-1].is_generator = True self.function_stack[-1].is_async_generator = True else: self.function_stack[-1].is_generator = True if expr.expr: expr.expr.accept(self) def visit_await_expr(self, expr: AwaitExpr) -> None: if not self.is_func_scope(): self.fail("'await' outside function", expr) elif not self.function_stack[-1].is_coroutine: self.fail("'await' outside coroutine ('async def')", expr) expr.expr.accept(self) # # Helpers # @contextmanager def tvar_scope_frame(self, frame: TypeVarScope) -> Iterator[None]: old_scope = self.tvar_scope self.tvar_scope = frame yield self.tvar_scope = old_scope def lookup(self, name: str, ctx: Context, suppress_errors: bool = False) -> Optional[SymbolTableNode]: """Look up an unqualified name in all active namespaces.""" implicit_name = False # 1a. Name declared using 'global x' takes precedence if name in self.global_decls[-1]: if name in self.globals: return self.globals[name] if not suppress_errors: self.name_not_defined(name, ctx) return None # 1b. Name declared using 'nonlocal x' takes precedence if name in self.nonlocal_decls[-1]: for table in reversed(self.locals[:-1]): if table is not None and name in table: return table[name] else: if not suppress_errors: self.name_not_defined(name, ctx) return None # 2. Class attributes (if within class definition) if self.type and not self.is_func_scope() and name in self.type.names: node = self.type.names[name] if not node.implicit: return node implicit_name = True implicit_node = node # 3. Local (function) scopes for table in reversed(self.locals): if table is not None and name in table: return table[name] # 4. Current file global scope if name in self.globals: return self.globals[name] # 5. Builtins b = self.globals.get('__builtins__', None) if b: assert isinstance(b.node, MypyFile) table = b.node.names if name in table: if name[0] == "_" and name[1] != "_": if not suppress_errors: self.name_not_defined(name, ctx) return None node = table[name] return node # Give up. if not implicit_name and not suppress_errors: self.name_not_defined(name, ctx) self.check_for_obsolete_short_name(name, ctx) else: if implicit_name: return implicit_node return None def check_for_obsolete_short_name(self, name: str, ctx: Context) -> None: matches = [obsolete_name for obsolete_name in obsolete_name_mapping if obsolete_name.rsplit('.', 1)[-1] == name] if len(matches) == 1: self.note("(Did you mean '{}'?)".format(obsolete_name_mapping[matches[0]]), ctx) def lookup_qualified(self, name: str, ctx: Context, suppress_errors: bool = False) -> Optional[SymbolTableNode]: if '.' not in name: return self.lookup(name, ctx, suppress_errors=suppress_errors) else: parts = name.split('.') n = self.lookup(parts[0], ctx, suppress_errors=suppress_errors) if n: for i in range(1, len(parts)): if isinstance(n.node, TypeInfo): if n.node.mro is None: # We haven't yet analyzed the class `n.node`. Fall back to direct # lookup in the names declared directly under it, without its base # classes. This can happen when we have a forward reference to a # nested class, and the reference is bound before the outer class # has been fully semantically analyzed. # # A better approach would be to introduce a new analysis pass or # to move things around between passes, but this unblocks a common # use case even though this is a little limited in case there is # inheritance involved. result = n.node.names.get(parts[i]) else: result = n.node.get(parts[i]) n = result elif isinstance(n.node, MypyFile): names = n.node.names # Rebind potential references to old version of current module in # fine-grained incremental mode. # # TODO: Do this for all modules in the set of modified files. if n.node.fullname() == self.cur_mod_id: names = self.globals n = names.get(parts[i], None) # TODO: What if node is Var or FuncDef? if not n: if not suppress_errors: self.name_not_defined(name, ctx) break if n: n = self.normalize_type_alias(n, ctx) if n and n.module_hidden: self.name_not_defined(name, ctx) if n and not n.module_hidden: n = self.rebind_symbol_table_node(n) return n return None def rebind_symbol_table_node(self, n: SymbolTableNode) -> Optional[SymbolTableNode]: """If node refers to old version of module, return reference to new version. If the reference is removed in the new version, return None. """ # TODO: Handle type aliases, type variables and other sorts of references if isinstance(n.node, (FuncDef, OverloadedFuncDef, TypeInfo, Var)): # TODO: Why is it possible for fullname() to be None, even though it's not # annotated as Optional[str]? # TODO: Do this for all modules in the set of modified files # TODO: This doesn't work for things nested within classes if n.node.fullname() and get_prefix(n.node.fullname()) == self.cur_mod_id: # This is an indirect reference to a name defined in the current module. # Rebind it. return self.globals.get(n.node.name()) # No need to rebind. return n def builtin_type(self, fully_qualified_name: str) -> Instance: sym = self.lookup_fully_qualified(fully_qualified_name) node = sym.node assert isinstance(node, TypeInfo) return Instance(node, [AnyType(TypeOfAny.special_form)] * len(node.defn.type_vars)) def lookup_fully_qualified(self, name: str) -> SymbolTableNode: """Lookup a fully qualified name. Assume that the name is defined. This happens in the global namespace -- the local module namespace is ignored. """ parts = name.split('.') n = self.modules[parts[0]] for i in range(1, len(parts) - 1): next_sym = n.names[parts[i]] assert isinstance(next_sym.node, MypyFile) n = next_sym.node return n.names[parts[-1]] def lookup_fully_qualified_or_none(self, name: str) -> Optional[SymbolTableNode]: """Lookup a fully qualified name. Don't assume that the name is defined. This happens in the global namespace -- the local module namespace is ignored. """ assert '.' in name parts = name.split('.') n = self.modules[parts[0]] for i in range(1, len(parts) - 1): next_sym = n.names.get(parts[i]) if not next_sym: return None assert isinstance(next_sym.node, MypyFile) n = next_sym.node return n.names.get(parts[-1]) def qualified_name(self, n: str) -> str: if self.type is not None: base = self.type._fullname else: base = self.cur_mod_id return base + '.' + n def enter(self) -> None: self.locals.append(SymbolTable()) self.global_decls.append(set()) self.nonlocal_decls.append(set()) # -1 since entering block will increment this to 0. self.block_depth.append(-1) def leave(self) -> None: self.locals.pop() self.global_decls.pop() self.nonlocal_decls.pop() self.block_depth.pop() def is_func_scope(self) -> bool: return self.locals[-1] is not None def is_class_scope(self) -> bool: return self.type is not None and not self.is_func_scope() def is_module_scope(self) -> bool: return not (self.is_class_scope() or self.is_func_scope()) def add_symbol(self, name: str, node: SymbolTableNode, context: Context) -> None: if self.is_func_scope(): assert self.locals[-1] is not None if name in self.locals[-1]: # Flag redefinition unless this is a reimport of a module. if not (node.kind == MODULE_REF and self.locals[-1][name].node == node.node): self.name_already_defined(name, context) self.locals[-1][name] = node elif self.type: self.type.names[name] = node else: existing = self.globals.get(name) if existing and (not isinstance(node.node, MypyFile) or existing.node != node.node) and existing.kind != UNBOUND_IMPORTED: # Modules can be imported multiple times to support import # of multiple submodules of a package (e.g. a.x and a.y). ok = False # Only report an error if the symbol collision provides a different type. if existing.type and node.type and is_same_type(existing.type, node.type): ok = True if not ok: self.name_already_defined(name, context) self.globals[name] = node def add_local(self, node: Union[Var, FuncDef, OverloadedFuncDef], ctx: Context) -> None: assert self.locals[-1] is not None, "Should not add locals outside a function" name = node.name() if name in self.locals[-1]: self.name_already_defined(name, ctx) node._fullname = name self.locals[-1][name] = SymbolTableNode(LDEF, node) def add_exports(self, *exps: Expression) -> None: for exp in exps: if isinstance(exp, StrExpr): self.all_exports.add(exp.value) def check_no_global(self, n: str, ctx: Context, is_overloaded_func: bool = False) -> None: if n in self.globals: prev_is_overloaded = isinstance(self.globals[n], OverloadedFuncDef) if is_overloaded_func and prev_is_overloaded: self.fail("Nonconsecutive overload {} found".format(n), ctx) elif prev_is_overloaded: self.fail("Definition of '{}' missing 'overload'".format(n), ctx) else: self.name_already_defined(n, ctx, self.globals[n]) def name_not_defined(self, name: str, ctx: Context) -> None: message = "Name '{}' is not defined".format(name) extra = self.undefined_name_extra_info(name) if extra: message += ' {}'.format(extra) self.fail(message, ctx) if 'builtins.{}'.format(name) in SUGGESTED_TEST_FIXTURES: # The user probably has a missing definition in a test fixture. Let's verify. fullname = 'builtins.{}'.format(name) if self.lookup_fully_qualified_or_none(fullname) is None: # Yes. Generate a helpful note. self.add_fixture_note(fullname, ctx) def name_already_defined(self, name: str, ctx: Context, original_ctx: Optional[SymbolTableNode] = None) -> None: if original_ctx: if original_ctx.node and original_ctx.node.get_line() != -1: extra_msg = ' on line {}'.format(original_ctx.node.get_line()) else: extra_msg = ' (possibly by an import)' else: extra_msg = '' self.fail("Name '{}' already defined{}".format(name, extra_msg), ctx) def fail(self, msg: str, ctx: Context, serious: bool = False, *, blocker: bool = False) -> None: if (not serious and not self.options.check_untyped_defs and self.function_stack and self.function_stack[-1].is_dynamic()): return # In case it's a bug and we don't really have context assert ctx is not None, msg self.errors.report(ctx.get_line(), ctx.get_column(), msg, blocker=blocker) def fail_blocker(self, msg: str, ctx: Context) -> None: self.fail(msg, ctx, blocker=True) def note(self, msg: str, ctx: Context) -> None: if (not self.options.check_untyped_defs and self.function_stack and self.function_stack[-1].is_dynamic()): return self.errors.report(ctx.get_line(), ctx.get_column(), msg, severity='note') def undefined_name_extra_info(self, fullname: str) -> Optional[str]: if fullname in obsolete_name_mapping: return "(it's now called '{}')".format(obsolete_name_mapping[fullname]) else: return None def accept(self, node: Node) -> None: try: node.accept(self) except Exception as err: report_internal_error(err, self.errors.file, node.line, self.errors, self.options) def replace_implicit_first_type(sig: FunctionLike, new: Type) -> FunctionLike: if isinstance(sig, CallableType): return sig.copy_modified(arg_types=[new] + sig.arg_types[1:]) elif isinstance(sig, Overloaded): return Overloaded([cast(CallableType, replace_implicit_first_type(i, new)) for i in sig.items()]) else: assert False def set_callable_name(sig: Type, fdef: FuncDef) -> Type: if isinstance(sig, FunctionLike): if fdef.info: return sig.with_name( '{} of {}'.format(fdef.name(), fdef.info.name())) else: return sig.with_name(fdef.name()) else: return sig def refers_to_fullname(node: Expression, fullname: str) -> bool: """Is node a name or member expression with the given full name?""" return isinstance(node, RefExpr) and node.fullname == fullname def refers_to_class_or_function(node: Expression) -> bool: """Does semantically analyzed node refer to a class?""" return (isinstance(node, RefExpr) and isinstance(node.node, (TypeInfo, FuncDef, OverloadedFuncDef))) def calculate_class_mro(defn: ClassDef, fail: Callable[[str, Context], None]) -> None: try: defn.info.calculate_mro() except MroError: fail("Cannot determine consistent method resolution order " '(MRO) for "%s"' % defn.name, defn) defn.info.mro = [] # The property of falling back to Any is inherited. defn.info.fallback_to_any = any(baseinfo.fallback_to_any for baseinfo in defn.info.mro) def find_duplicate(list: List[T]) -> Optional[T]: """If the list has duplicates, return one of the duplicates. Otherwise, return None. """ for i in range(1, len(list)): if list[i] in list[:i]: return list[i] return None def remove_imported_names_from_symtable(names: SymbolTable, module: str) -> None: """Remove all imported names from the symbol table of a module.""" removed = [] # type: List[str] for name, node in names.items(): if node.node is None: continue fullname = node.node.fullname() prefix = fullname[:fullname.rfind('.')] if prefix != module: removed.append(name) for name in removed: del names[name] def infer_reachability_of_if_statement(s: IfStmt, pyversion: Tuple[int, int], platform: str) -> None: for i in range(len(s.expr)): result = infer_condition_value(s.expr[i], pyversion, platform) if result in (ALWAYS_FALSE, MYPY_FALSE): # The condition is considered always false, so we skip the if/elif body. mark_block_unreachable(s.body[i]) elif result in (ALWAYS_TRUE, MYPY_TRUE): # This condition is considered always true, so all of the remaining # elif/else bodies should not be checked. if result == MYPY_TRUE: # This condition is false at runtime; this will affect # import priorities. mark_block_mypy_only(s.body[i]) for body in s.body[i + 1:]: mark_block_unreachable(body) # Make sure else body always exists and is marked as # unreachable so the type checker always knows that # all control flow paths will flow through the if # statement body. if not s.else_body: s.else_body = Block([]) mark_block_unreachable(s.else_body) break def infer_condition_value(expr: Expression, pyversion: Tuple[int, int], platform: str) -> int: """Infer whether the given condition is always true/false. Return ALWAYS_TRUE if always true, ALWAYS_FALSE if always false, MYPY_TRUE if true under mypy and false at runtime, MYPY_FALSE if false under mypy and true at runtime, else TRUTH_VALUE_UNKNOWN. """ name = '' negated = False alias = expr if isinstance(alias, UnaryExpr): if alias.op == 'not': expr = alias.expr negated = True result = TRUTH_VALUE_UNKNOWN if isinstance(expr, NameExpr): name = expr.name elif isinstance(expr, MemberExpr): name = expr.name elif isinstance(expr, OpExpr) and expr.op in ('and', 'or'): left = infer_condition_value(expr.left, pyversion, platform) if ((left == ALWAYS_TRUE and expr.op == 'and') or (left == ALWAYS_FALSE and expr.op == 'or')): # Either `True and ` or `False or `: the result will # always be the right-hand-side. return infer_condition_value(expr.right, pyversion, platform) else: # The result will always be the left-hand-side (e.g. ALWAYS_* or # TRUTH_VALUE_UNKNOWN). return left else: result = consider_sys_version_info(expr, pyversion) if result == TRUTH_VALUE_UNKNOWN: result = consider_sys_platform(expr, platform) if result == TRUTH_VALUE_UNKNOWN: if name == 'PY2': result = ALWAYS_TRUE if pyversion[0] == 2 else ALWAYS_FALSE elif name == 'PY3': result = ALWAYS_TRUE if pyversion[0] == 3 else ALWAYS_FALSE elif name == 'MYPY' or name == 'TYPE_CHECKING': result = MYPY_TRUE if negated: result = inverted_truth_mapping[result] return result def consider_sys_version_info(expr: Expression, pyversion: Tuple[int, ...]) -> int: """Consider whether expr is a comparison involving sys.version_info. Return ALWAYS_TRUE, ALWAYS_FALSE, or TRUTH_VALUE_UNKNOWN. """ # Cases supported: # - sys.version_info[] # - sys.version_info[:] # - sys.version_info # (in this case must be >, >=, <, <=, but cannot be ==, !=) if not isinstance(expr, ComparisonExpr): return TRUTH_VALUE_UNKNOWN # Let's not yet support chained comparisons. if len(expr.operators) > 1: return TRUTH_VALUE_UNKNOWN op = expr.operators[0] if op not in ('==', '!=', '<=', '>=', '<', '>'): return TRUTH_VALUE_UNKNOWN thing = contains_int_or_tuple_of_ints(expr.operands[1]) if thing is None: return TRUTH_VALUE_UNKNOWN index = contains_sys_version_info(expr.operands[0]) if isinstance(index, int) and isinstance(thing, int): # sys.version_info[i] k if 0 <= index <= 1: return fixed_comparison(pyversion[index], op, thing) else: return TRUTH_VALUE_UNKNOWN elif isinstance(index, tuple) and isinstance(thing, tuple): lo, hi = index if lo is None: lo = 0 if hi is None: hi = 2 if 0 <= lo < hi <= 2: val = pyversion[lo:hi] if len(val) == len(thing) or len(val) > len(thing) and op not in ('==', '!='): return fixed_comparison(val, op, thing) return TRUTH_VALUE_UNKNOWN def consider_sys_platform(expr: Expression, platform: str) -> int: """Consider whether expr is a comparison involving sys.platform. Return ALWAYS_TRUE, ALWAYS_FALSE, or TRUTH_VALUE_UNKNOWN. """ # Cases supported: # - sys.platform == 'posix' # - sys.platform != 'win32' # - sys.platform.startswith('win') if isinstance(expr, ComparisonExpr): # Let's not yet support chained comparisons. if len(expr.operators) > 1: return TRUTH_VALUE_UNKNOWN op = expr.operators[0] if op not in ('==', '!='): return TRUTH_VALUE_UNKNOWN if not is_sys_attr(expr.operands[0], 'platform'): return TRUTH_VALUE_UNKNOWN right = expr.operands[1] if not isinstance(right, (StrExpr, UnicodeExpr)): return TRUTH_VALUE_UNKNOWN return fixed_comparison(platform, op, right.value) elif isinstance(expr, CallExpr): if not isinstance(expr.callee, MemberExpr): return TRUTH_VALUE_UNKNOWN if len(expr.args) != 1 or not isinstance(expr.args[0], (StrExpr, UnicodeExpr)): return TRUTH_VALUE_UNKNOWN if not is_sys_attr(expr.callee.expr, 'platform'): return TRUTH_VALUE_UNKNOWN if expr.callee.name != 'startswith': return TRUTH_VALUE_UNKNOWN if platform.startswith(expr.args[0].value): return ALWAYS_TRUE else: return ALWAYS_FALSE else: return TRUTH_VALUE_UNKNOWN Targ = TypeVar('Targ', int, str, Tuple[int, ...]) def fixed_comparison(left: Targ, op: str, right: Targ) -> int: rmap = {False: ALWAYS_FALSE, True: ALWAYS_TRUE} if op == '==': return rmap[left == right] if op == '!=': return rmap[left != right] if op == '<=': return rmap[left <= right] if op == '>=': return rmap[left >= right] if op == '<': return rmap[left < right] if op == '>': return rmap[left > right] return TRUTH_VALUE_UNKNOWN def contains_int_or_tuple_of_ints(expr: Expression ) -> Union[None, int, Tuple[int], Tuple[int, ...]]: if isinstance(expr, IntExpr): return expr.value if isinstance(expr, TupleExpr): if literal(expr) == LITERAL_YES: thing = [] for x in expr.items: if not isinstance(x, IntExpr): return None thing.append(x.value) return tuple(thing) return None def contains_sys_version_info(expr: Expression ) -> Union[None, int, Tuple[Optional[int], Optional[int]]]: if is_sys_attr(expr, 'version_info'): return (None, None) # Same as sys.version_info[:] if isinstance(expr, IndexExpr) and is_sys_attr(expr.base, 'version_info'): index = expr.index if isinstance(index, IntExpr): return index.value if isinstance(index, SliceExpr): if index.stride is not None: if not isinstance(index.stride, IntExpr) or index.stride.value != 1: return None begin = end = None if index.begin_index is not None: if not isinstance(index.begin_index, IntExpr): return None begin = index.begin_index.value if index.end_index is not None: if not isinstance(index.end_index, IntExpr): return None end = index.end_index.value return (begin, end) return None def is_sys_attr(expr: Expression, name: str) -> bool: # TODO: This currently doesn't work with code like this: # - import sys as _sys # - from sys import version_info if isinstance(expr, MemberExpr) and expr.name == name: if isinstance(expr.expr, NameExpr) and expr.expr.name == 'sys': # TODO: Guard against a local named sys, etc. # (Though later passes will still do most checking.) return True return False def mark_block_unreachable(block: Block) -> None: block.is_unreachable = True block.accept(MarkImportsUnreachableVisitor()) class MarkImportsUnreachableVisitor(TraverserVisitor): """Visitor that flags all imports nested within a node as unreachable.""" def visit_import(self, node: Import) -> None: node.is_unreachable = True def visit_import_from(self, node: ImportFrom) -> None: node.is_unreachable = True def visit_import_all(self, node: ImportAll) -> None: node.is_unreachable = True def mark_block_mypy_only(block: Block) -> None: block.accept(MarkImportsMypyOnlyVisitor()) class MarkImportsMypyOnlyVisitor(TraverserVisitor): """Visitor that sets is_mypy_only (which affects priority).""" def visit_import(self, node: Import) -> None: node.is_mypy_only = True def visit_import_from(self, node: ImportFrom) -> None: node.is_mypy_only = True def visit_import_all(self, node: ImportAll) -> None: node.is_mypy_only = True def make_any_non_explicit(t: Type) -> Type: """Replace all Any types within in with Any that has attribute 'explicit' set to False""" return t.accept(MakeAnyNonExplicit()) class MakeAnyNonExplicit(TypeTranslator): def visit_any(self, t: AnyType) -> Type: if t.type_of_any == TypeOfAny.explicit: return t.copy_modified(TypeOfAny.special_form) return t mypy-0.560/mypy/semanal_pass1.py0000644€tŠÔÚ€2›s®0000003500013215007205022764 0ustar jukkaDROPBOX\Domain Users00000000000000"""The semantic analyzer pass 1. This sets up externally visible names defined in a module but doesn't follow imports and mostly ignores local definitions. It helps enable (some) cyclic references between modules, such as module 'a' that imports module 'b' and used names defined in b *and* vice versa. The first pass can be performed before dependent modules have been processed. Since this pass can't assume that other modules have been processed, this pass cannot detect certain definitions that can only be recognized in later passes. Examples of these include TypeVar and NamedTuple definitions, as these look like regular assignments until we are able to bind names, which only happens in pass 2. This pass also infers the reachability of certain if staments, such as those with platform checks. """ from typing import List, Tuple from mypy import experiments from mypy.nodes import ( MypyFile, SymbolTable, SymbolTableNode, Var, Block, AssignmentStmt, FuncDef, Decorator, ClassDef, TypeInfo, ImportFrom, Import, ImportAll, IfStmt, WhileStmt, ForStmt, WithStmt, TryStmt, OverloadedFuncDef, Lvalue, Context, LDEF, GDEF, MDEF, UNBOUND_IMPORTED, MODULE_REF, implicit_module_attrs ) from mypy.types import Type, UnboundType, UnionType, AnyType, TypeOfAny, NoneTyp from mypy.semanal import SemanticAnalyzerPass2, infer_reachability_of_if_statement from mypy.options import Options from mypy.sametypes import is_same_type from mypy.visitor import NodeVisitor class SemanticAnalyzerPass1(NodeVisitor[None]): """First phase of semantic analysis. See docstring of 'analyze()' below for a description of what this does. """ def __init__(self, sem: SemanticAnalyzerPass2) -> None: self.sem = sem def visit_file(self, file: MypyFile, fnam: str, mod_id: str, options: Options) -> None: """Perform the first analysis pass. Populate module global table. Resolve the full names of definitions not nested within functions and construct type info structures, but do not resolve inter-definition references such as base classes. Also add implicit definitions such as __name__. In this phase we don't resolve imports. For 'from ... import', we generate dummy symbol table nodes for the imported names, and these will get resolved in later phases of semantic analysis. """ sem = self.sem self.sem.options = options # Needed because we sometimes call into it self.pyversion = options.python_version self.platform = options.platform sem.cur_mod_id = mod_id sem.errors.set_file(fnam, mod_id) sem.globals = SymbolTable() sem.global_decls = [set()] sem.nonlocal_decls = [set()] sem.block_depth = [0] defs = file.defs with experiments.strict_optional_set(options.strict_optional): # Add implicit definitions of module '__name__' etc. for name, t in implicit_module_attrs.items(): # unicode docstrings should be accepted in Python 2 if name == '__doc__': if self.pyversion >= (3, 0): typ = UnboundType('__builtins__.str') # type: Type else: typ = UnionType([UnboundType('__builtins__.str'), UnboundType('__builtins__.unicode')]) else: assert t is not None, 'type should be specified for {}'.format(name) typ = UnboundType(t) v = Var(name, typ) v._fullname = self.sem.qualified_name(name) self.sem.globals[name] = SymbolTableNode(GDEF, v) for d in defs: d.accept(self) # Add implicit definition of literals/keywords to builtins, as we # cannot define a variable with them explicitly. if mod_id == 'builtins': literal_types = [ ('None', NoneTyp()), # reveal_type is a mypy-only function that gives an error with # the type of its arg. ('reveal_type', AnyType(TypeOfAny.special_form)), ] # type: List[Tuple[str, Type]] # TODO(ddfisher): This guard is only needed because mypy defines # fake builtins for its tests which often don't define bool. If # mypy is fast enough that we no longer need those, this # conditional check should be removed. if 'bool' in self.sem.globals: bool_type = self.sem.named_type('bool') literal_types.extend([ ('True', bool_type), ('False', bool_type), ('__debug__', bool_type), ]) else: # We are running tests without 'bool' in builtins. # TODO: Find a permanent solution to this problem. # Maybe add 'bool' to all fixtures? literal_types.append(('True', AnyType(TypeOfAny.special_form))) for name, typ in literal_types: v = Var(name, typ) v._fullname = self.sem.qualified_name(name) self.sem.globals[name] = SymbolTableNode(GDEF, v) del self.sem.options def visit_block(self, b: Block) -> None: if b.is_unreachable: return self.sem.block_depth[-1] += 1 for node in b.body: node.accept(self) self.sem.block_depth[-1] -= 1 def visit_assignment_stmt(self, s: AssignmentStmt) -> None: if self.sem.is_module_scope(): for lval in s.lvalues: self.analyze_lvalue(lval, explicit_type=s.type is not None) def visit_func_def(self, func: FuncDef) -> None: sem = self.sem if sem.type is not None: # Don't process methods during pass 1. return func.is_conditional = sem.block_depth[-1] > 0 func._fullname = sem.qualified_name(func.name()) at_module = sem.is_module_scope() if at_module and func.name() in sem.globals: # Already defined in this module. original_sym = sem.globals[func.name()] if original_sym.kind == UNBOUND_IMPORTED: # Ah this is an imported name. We can't resolve them now, so we'll postpone # this until the main phase of semantic analysis. return if not sem.set_original_def(original_sym.node, func): # Report error. sem.check_no_global(func.name(), func) else: if at_module: sem.globals[func.name()] = SymbolTableNode(GDEF, func) # Also analyze the function body (needed in case there are unreachable # conditional imports). sem.function_stack.append(func) sem.errors.push_function(func.name()) sem.enter() func.body.accept(self) sem.leave() sem.errors.pop_function() sem.function_stack.pop() def visit_overloaded_func_def(self, func: OverloadedFuncDef) -> None: if self.sem.type is not None: # Don't process methods during pass 1. return kind = self.kind_by_scope() if kind == GDEF: self.sem.check_no_global(func.name(), func, True) func._fullname = self.sem.qualified_name(func.name()) if kind == GDEF: self.sem.globals[func.name()] = SymbolTableNode(kind, func) if func.impl: impl = func.impl # Also analyze the function body (in case there are conditional imports). sem = self.sem if isinstance(impl, FuncDef): sem.function_stack.append(impl) sem.errors.push_function(func.name()) sem.enter() impl.body.accept(self) elif isinstance(impl, Decorator): sem.function_stack.append(impl.func) sem.errors.push_function(func.name()) sem.enter() impl.func.body.accept(self) else: assert False, "Implementation of an overload needs to be FuncDef or Decorator" sem.leave() sem.errors.pop_function() sem.function_stack.pop() def visit_class_def(self, cdef: ClassDef) -> None: kind = self.kind_by_scope() if kind == LDEF: return elif kind == GDEF: self.sem.check_no_global(cdef.name, cdef) cdef.fullname = self.sem.qualified_name(cdef.name) info = TypeInfo(SymbolTable(), cdef, self.sem.cur_mod_id) info.set_line(cdef.line, cdef.column) cdef.info = info if kind == GDEF: self.sem.globals[cdef.name] = SymbolTableNode(kind, info) self.process_nested_classes(cdef) def process_nested_classes(self, outer_def: ClassDef) -> None: self.sem.enter_class(outer_def.info) for node in outer_def.defs.body: if isinstance(node, ClassDef): node.info = TypeInfo(SymbolTable(), node, self.sem.cur_mod_id) if outer_def.fullname: node.info._fullname = outer_def.fullname + '.' + node.info.name() else: node.info._fullname = node.info.name() node.fullname = node.info._fullname symbol = SymbolTableNode(MDEF, node.info) outer_def.info.names[node.name] = symbol self.process_nested_classes(node) elif isinstance(node, (ImportFrom, Import, ImportAll, IfStmt)): node.accept(self) self.sem.leave_class() def visit_import_from(self, node: ImportFrom) -> None: # We can't bind module names during the first pass, as the target module might be # unprocessed. However, we add dummy unbound imported names to the symbol table so # that we at least know that the name refers to a module. at_module = self.sem.is_module_scope() node.is_top_level = at_module if not at_module: return for name, as_name in node.names: imported_name = as_name or name if imported_name not in self.sem.globals: self.add_symbol(imported_name, SymbolTableNode(UNBOUND_IMPORTED, None), node) def visit_import(self, node: Import) -> None: node.is_top_level = self.sem.is_module_scope() # This is similar to visit_import_from -- see the comment there. if not self.sem.is_module_scope(): return for id, as_id in node.ids: imported_id = as_id or id # For 'import a.b.c' we create symbol 'a'. imported_id = imported_id.split('.')[0] if imported_id not in self.sem.globals: self.add_symbol(imported_id, SymbolTableNode(UNBOUND_IMPORTED, None), node) def visit_import_all(self, node: ImportAll) -> None: node.is_top_level = self.sem.is_module_scope() def visit_while_stmt(self, s: WhileStmt) -> None: if self.sem.is_module_scope(): s.body.accept(self) if s.else_body: s.else_body.accept(self) def visit_for_stmt(self, s: ForStmt) -> None: if self.sem.is_module_scope(): self.analyze_lvalue(s.index, explicit_type=s.index_type is not None) s.body.accept(self) if s.else_body: s.else_body.accept(self) def visit_with_stmt(self, s: WithStmt) -> None: if self.sem.is_module_scope(): for n in s.target: if n: self.analyze_lvalue(n, explicit_type=s.target_type is not None) s.body.accept(self) def visit_decorator(self, d: Decorator) -> None: if self.sem.type is not None: # Don't process methods during pass 1. return d.var._fullname = self.sem.qualified_name(d.var.name()) self.add_symbol(d.var.name(), SymbolTableNode(self.kind_by_scope(), d), d) def visit_if_stmt(self, s: IfStmt) -> None: infer_reachability_of_if_statement(s, pyversion=self.pyversion, platform=self.platform) for node in s.body: node.accept(self) if s.else_body: s.else_body.accept(self) def visit_try_stmt(self, s: TryStmt) -> None: if self.sem.is_module_scope(): self.sem.analyze_try_stmt(s, self, add_global=self.sem.is_module_scope()) def analyze_lvalue(self, lvalue: Lvalue, explicit_type: bool = False) -> None: self.sem.analyze_lvalue(lvalue, add_global=self.sem.is_module_scope(), explicit_type=explicit_type) def kind_by_scope(self) -> int: if self.sem.is_module_scope(): return GDEF elif self.sem.is_class_scope(): return MDEF elif self.sem.is_func_scope(): return LDEF else: assert False, "Couldn't determine scope" def add_symbol(self, name: str, node: SymbolTableNode, context: Context) -> None: # This is related to SemanticAnalyzerPass2.add_symbol. Since both methods will # be called on top-level definitions, they need to co-operate. if self.sem.is_func_scope(): assert self.sem.locals[-1] is not None if name in self.sem.locals[-1]: # Flag redefinition unless this is a reimport of a module. if not (node.kind == MODULE_REF and self.sem.locals[-1][name].node == node.node): self.sem.name_already_defined(name, context) self.sem.locals[-1][name] = node else: assert self.sem.type is None # Pass 1 doesn't look inside classes existing = self.sem.globals.get(name) if existing and (not isinstance(node.node, MypyFile) or existing.node != node.node) and existing.kind != UNBOUND_IMPORTED: # Modules can be imported multiple times to support import # of multiple submodules of a package (e.g. a.x and a.y). ok = False # Only report an error if the symbol collision provides a different type. if existing.type and node.type and is_same_type(existing.type, node.type): ok = True if not ok: self.sem.name_already_defined(name, context) elif not existing: self.sem.globals[name] = node mypy-0.560/mypy/semanal_pass3.py0000644€tŠÔÚ€2›s®0000006544613215007206023010 0ustar jukkaDROPBOX\Domain Users00000000000000"""The semantic analyzer pass 3. This pass checks that type argument counts are valid; for example, it will reject Dict[int]. We don't do this in the second pass, since we infer the type argument counts of classes during this pass, and it is possible to refer to classes defined later in a file, which would not have the type argument count set yet. This pass also recomputes the method resolution order of each class, in case one of its bases belongs to a module involved in an import loop. """ from collections import OrderedDict from typing import Dict, List, Callable, Optional, Union, Set, cast from mypy import messages, experiments from mypy.nodes import ( Node, Expression, MypyFile, FuncDef, FuncItem, Decorator, RefExpr, Context, TypeInfo, ClassDef, Block, TypedDictExpr, NamedTupleExpr, AssignmentStmt, IndexExpr, TypeAliasExpr, NameExpr, CallExpr, NewTypeExpr, ForStmt, WithStmt, CastExpr, TypeVarExpr, TypeApplication, Lvalue, TupleExpr, RevealTypeExpr, SymbolTableNode, Var, ARG_POS, OverloadedFuncDef ) from mypy.types import ( Type, Instance, AnyType, TypeOfAny, CallableType, TupleType, TypeVarType, TypedDictType, UnionType, TypeType, Overloaded, ForwardRef, TypeTranslator, function_type ) from mypy.errors import Errors, report_internal_error from mypy.options import Options from mypy.traverser import TraverserVisitor from mypy.typeanal import TypeAnalyserPass3, collect_any_types from mypy.typevars import has_no_typevars import mypy.semanal class SemanticAnalyzerPass3(TraverserVisitor): """The third and final pass of semantic analysis. Check type argument counts and values of generic types, and perform some straightforward type inference. """ def __init__(self, modules: Dict[str, MypyFile], errors: Errors, sem: 'mypy.semanal.SemanticAnalyzerPass2') -> None: self.modules = modules self.errors = errors self.sem = sem def visit_file(self, file_node: MypyFile, fnam: str, options: Options, patches: List[Callable[[], None]]) -> None: self.errors.set_file(fnam, file_node.fullname()) self.options = options self.sem.options = options self.patches = patches self.is_typeshed_file = self.errors.is_typeshed_file(fnam) self.sem.cur_mod_id = file_node.fullname() self.sem.globals = file_node.names with experiments.strict_optional_set(options.strict_optional): self.accept(file_node) def refresh_partial(self, node: Union[MypyFile, FuncItem]) -> None: """Refresh a stale target in fine-grained incremental mode.""" if isinstance(node, MypyFile): self.refresh_top_level(node) else: self.accept(node) def refresh_top_level(self, file_node: MypyFile) -> None: """Reanalyze a stale module top-level in fine-grained incremental mode.""" for d in file_node.defs: if not isinstance(d, (FuncItem, ClassDef)): self.accept(d) def accept(self, node: Node) -> None: try: node.accept(self) except Exception as err: report_internal_error(err, self.errors.file, node.line, self.errors, self.options) def visit_block(self, b: Block) -> None: if b.is_unreachable: return super().visit_block(b) def visit_func_def(self, fdef: FuncDef) -> None: self.errors.push_function(fdef.name()) self.analyze(fdef.type, fdef) super().visit_func_def(fdef) self.errors.pop_function() def visit_overloaded_func_def(self, fdef: OverloadedFuncDef) -> None: self.analyze(fdef.type, fdef) super().visit_overloaded_func_def(fdef) def visit_class_def(self, tdef: ClassDef) -> None: # NamedTuple base classes are validated in check_namedtuple_classdef; we don't have to # check them again here. if not tdef.info.is_named_tuple: types = list(tdef.info.bases) # type: List[Type] for tvar in tdef.type_vars: if tvar.upper_bound: types.append(tvar.upper_bound) if tvar.values: types.extend(tvar.values) self.analyze_types(types, tdef.info) for type in tdef.info.bases: if tdef.info.is_protocol: if not isinstance(type, Instance) or not type.type.is_protocol: if type.type.fullname() != 'builtins.object': self.fail('All bases of a protocol must be protocols', tdef) # Recompute MRO now that we have analyzed all modules, to pick # up superclasses of bases imported from other modules in an # import loop. (Only do so if we succeeded the first time.) if tdef.info.mro: tdef.info.mro = [] # Force recomputation mypy.semanal.calculate_class_mro(tdef, self.fail_blocker) if tdef.info.is_protocol: add_protocol_members(tdef.info) if tdef.analyzed is not None: # Also check synthetic types associated with this ClassDef. # Currently these are TypedDict, and NamedTuple. if isinstance(tdef.analyzed, TypedDictExpr): self.analyze(tdef.analyzed.info.typeddict_type, tdef.analyzed, warn=True) elif isinstance(tdef.analyzed, NamedTupleExpr): self.analyze(tdef.analyzed.info.tuple_type, tdef.analyzed, warn=True) self.analyze_info(tdef.analyzed.info) super().visit_class_def(tdef) def visit_decorator(self, dec: Decorator) -> None: """Try to infer the type of the decorated function. This lets us resolve references to decorated functions during type checking when there are cyclic imports, as otherwise the type might not be available when we need it. This basically uses a simple special-purpose type inference engine just for decorators. """ super().visit_decorator(dec) if dec.var.is_property: # Decorators are expected to have a callable type (it's a little odd). if dec.func.type is None: dec.var.type = CallableType( [AnyType(TypeOfAny.special_form)], [ARG_POS], [None], AnyType(TypeOfAny.special_form), self.builtin_type('function'), name=dec.var.name()) elif isinstance(dec.func.type, CallableType): dec.var.type = dec.func.type return decorator_preserves_type = True for expr in dec.decorators: preserve_type = False if isinstance(expr, RefExpr) and isinstance(expr.node, FuncDef): if expr.node.type and is_identity_signature(expr.node.type): preserve_type = True if not preserve_type: decorator_preserves_type = False break if decorator_preserves_type: # No non-identity decorators left. We can trivially infer the type # of the function here. dec.var.type = function_type(dec.func, self.builtin_type('function')) if dec.decorators: return_type = calculate_return_type(dec.decorators[0]) if return_type and isinstance(return_type, AnyType): # The outermost decorator will return Any so we know the type of the # decorated function. dec.var.type = AnyType(TypeOfAny.from_another_any, source_any=return_type) sig = find_fixed_callable_return(dec.decorators[0]) if sig: # The outermost decorator always returns the same kind of function, # so we know that this is the type of the decoratored function. orig_sig = function_type(dec.func, self.builtin_type('function')) sig.name = orig_sig.items()[0].name dec.var.type = sig def visit_assignment_stmt(self, s: AssignmentStmt) -> None: """Traverse the assignment statement. This includes the actual assignment and synthetic types resulted from this assignment (if any). Currently this includes NewType, TypedDict, NamedTuple, and TypeVar. """ self.analyze(s.type, s) if isinstance(s.rvalue, IndexExpr) and isinstance(s.rvalue.analyzed, TypeAliasExpr): self.analyze(s.rvalue.analyzed.type, s.rvalue.analyzed, warn=True) if isinstance(s.rvalue, CallExpr): analyzed = s.rvalue.analyzed if isinstance(analyzed, NewTypeExpr): self.analyze(analyzed.old_type, analyzed) if analyzed.info: # Currently NewTypes only have __init__, but to be future proof, # we analyze all symbols. self.analyze_info(analyzed.info) if analyzed.info and analyzed.info.mro: analyzed.info.mro = [] # Force recomputation mypy.semanal.calculate_class_mro(analyzed.info.defn, self.fail_blocker) if isinstance(analyzed, TypeVarExpr): types = [] if analyzed.upper_bound: types.append(analyzed.upper_bound) if analyzed.values: types.extend(analyzed.values) self.analyze_types(types, analyzed) if isinstance(analyzed, TypedDictExpr): self.analyze(analyzed.info.typeddict_type, analyzed, warn=True) if isinstance(analyzed, NamedTupleExpr): self.analyze(analyzed.info.tuple_type, analyzed, warn=True) self.analyze_info(analyzed.info) # We need to pay additional attention to assignments that define a type alias. # The resulting type is also stored in the 'type_override' attribute of # the corresponding SymbolTableNode. if isinstance(s.lvalues[0], RefExpr) and isinstance(s.lvalues[0].node, Var): self.analyze(s.lvalues[0].node.type, s.lvalues[0].node) if isinstance(s.lvalues[0], NameExpr): node = self.sem.lookup(s.lvalues[0].name, s, suppress_errors=True) if node: self.analyze(node.type_override, node) super().visit_assignment_stmt(s) def visit_for_stmt(self, s: ForStmt) -> None: self.analyze(s.index_type, s) super().visit_for_stmt(s) def visit_with_stmt(self, s: WithStmt) -> None: self.analyze(s.target_type, s) super().visit_with_stmt(s) def visit_cast_expr(self, e: CastExpr) -> None: self.analyze(e.type, e) super().visit_cast_expr(e) def visit_reveal_type_expr(self, e: RevealTypeExpr) -> None: super().visit_reveal_type_expr(e) def visit_type_application(self, e: TypeApplication) -> None: for type in e.types: self.analyze(type, e) super().visit_type_application(e) # Helpers def perform_transform(self, node: Union[Node, SymbolTableNode], transform: Callable[[Type], Type]) -> None: """Apply transform to all types associated with node.""" if isinstance(node, ForStmt): if node.index_type: node.index_type = transform(node.index_type) self.transform_types_in_lvalue(node.index, transform) if isinstance(node, WithStmt): if node.target_type: node.target_type = transform(node.target_type) for n in node.target: if isinstance(n, NameExpr) and isinstance(n.node, Var) and n.node.type: n.node.type = transform(n.node.type) if isinstance(node, (FuncDef, OverloadedFuncDef, CastExpr, AssignmentStmt, TypeAliasExpr, Var)): assert node.type, "Scheduled patch for non-existent type" node.type = transform(node.type) if isinstance(node, NewTypeExpr): assert node.old_type, "Scheduled patch for non-existent type" node.old_type = transform(node.old_type) if node.info: new_bases = [] # type: List[Instance] for b in node.info.bases: new_b = transform(b) # TODO: this code can be combined with code in second pass. if isinstance(new_b, Instance): new_bases.append(new_b) elif isinstance(new_b, TupleType): new_bases.append(new_b.fallback) else: self.fail("Argument 2 to NewType(...) must be subclassable" " (got {})".format(new_b), node) new_bases.append(self.builtin_type('object')) node.info.bases = new_bases if isinstance(node, TypeVarExpr): if node.upper_bound: node.upper_bound = transform(node.upper_bound) if node.values: node.values = [transform(v) for v in node.values] if isinstance(node, TypedDictExpr): assert node.info.typeddict_type, "Scheduled patch for non-existent type" node.info.typeddict_type = cast(TypedDictType, transform(node.info.typeddict_type)) if isinstance(node, NamedTupleExpr): assert node.info.tuple_type, "Scheduled patch for non-existent type" node.info.tuple_type = cast(TupleType, transform(node.info.tuple_type)) if isinstance(node, TypeApplication): node.types = [transform(t) for t in node.types] if isinstance(node, SymbolTableNode): assert node.type_override, "Scheduled patch for non-existent type" node.type_override = transform(node.type_override) if isinstance(node, TypeInfo): for tvar in node.defn.type_vars: if tvar.upper_bound: tvar.upper_bound = transform(tvar.upper_bound) if tvar.values: tvar.values = [transform(v) for v in tvar.values] new_bases = [] for base in node.bases: new_base = transform(base) if isinstance(new_base, Instance): new_bases.append(new_base) else: # Don't fix the NamedTuple bases, they are Instance's intentionally. # Patch the 'args' just in case, although generic tuple types are # not supported yet. alt_base = Instance(base.type, [transform(a) for a in base.args]) new_bases.append(alt_base) node.bases = new_bases def transform_types_in_lvalue(self, lvalue: Lvalue, transform: Callable[[Type], Type]) -> None: if isinstance(lvalue, RefExpr): if isinstance(lvalue.node, Var): var = lvalue.node if var.type: var.type = transform(var.type) elif isinstance(lvalue, TupleExpr): for item in lvalue.items: self.transform_types_in_lvalue(item, transform) def analyze(self, type: Optional[Type], node: Union[Node, SymbolTableNode], warn: bool = False) -> None: # Recursive type warnings are only emitted on type definition 'node's, marked by 'warn' # Flags appeared during analysis of 'type' are collected in this dict. indicator = {} # type: Dict[str, bool] if type: analyzer = self.make_type_analyzer(indicator) type.accept(analyzer) self.check_for_omitted_generics(type) if indicator.get('forward') or indicator.get('synthetic'): def patch() -> None: self.perform_transform(node, lambda tp: tp.accept(ForwardReferenceResolver(self.fail, node, warn))) self.patches.append(patch) def analyze_types(self, types: List[Type], node: Node) -> None: # Similar to above but for nodes with multiple types. indicator = {} # type: Dict[str, bool] for type in types: analyzer = self.make_type_analyzer(indicator) type.accept(analyzer) self.check_for_omitted_generics(type) if indicator.get('forward') or indicator.get('synthetic'): def patch() -> None: self.perform_transform(node, lambda tp: tp.accept(ForwardReferenceResolver(self.fail, node, warn=False))) self.patches.append(patch) def analyze_info(self, info: TypeInfo) -> None: # Similar to above but for nodes with synthetic TypeInfos (NamedTuple and NewType). for name in info.names: sym = info.names[name] if isinstance(sym.node, (FuncDef, Decorator)): self.accept(sym.node) if isinstance(sym.node, Var): self.analyze(sym.node.type, sym.node) def make_type_analyzer(self, indicator: Dict[str, bool]) -> TypeAnalyserPass3: return TypeAnalyserPass3(self.sem.lookup_qualified, self.sem.lookup_fully_qualified, self.fail, self.sem.note, self.sem.plugin, self.options, self.is_typeshed_file, indicator) def check_for_omitted_generics(self, typ: Type) -> None: if not self.options.disallow_any_generics or self.is_typeshed_file: return for t in collect_any_types(typ): if t.type_of_any == TypeOfAny.from_omitted_generics: self.fail(messages.BARE_GENERIC, t) def fail(self, msg: str, ctx: Context, *, blocker: bool = False) -> None: self.errors.report(ctx.get_line(), ctx.get_column(), msg) def fail_blocker(self, msg: str, ctx: Context) -> None: self.fail(msg, ctx, blocker=True) def builtin_type(self, name: str, args: Optional[List[Type]] = None) -> Instance: names = self.modules['builtins'] sym = names.names[name] node = sym.node assert isinstance(node, TypeInfo) if args: # TODO: assert len(args) == len(node.defn.type_vars) return Instance(node, args) any_type = AnyType(TypeOfAny.special_form) return Instance(node, [any_type] * len(node.defn.type_vars)) def add_protocol_members(typ: TypeInfo) -> None: members = set() # type: Set[str] if typ.mro: for base in typ.mro[:-1]: # we skip "object" since everyone implements it if base.is_protocol: for name in base.names: members.add(name) typ.protocol_members = sorted(list(members)) def is_identity_signature(sig: Type) -> bool: """Is type a callable of form T -> T (where T is a type variable)?""" if isinstance(sig, CallableType) and sig.arg_kinds == [ARG_POS]: if isinstance(sig.arg_types[0], TypeVarType) and isinstance(sig.ret_type, TypeVarType): return sig.arg_types[0].id == sig.ret_type.id return False def calculate_return_type(expr: Expression) -> Optional[Type]: """Return the return type if we can calculate it. This only uses information available during semantic analysis so this will sometimes return None because of insufficient information (as type inference hasn't run yet). """ if isinstance(expr, RefExpr): if isinstance(expr.node, FuncDef): typ = expr.node.type if typ is None: # No signature -> default to Any. return AnyType(TypeOfAny.unannotated) # Explicit Any return? if isinstance(typ, CallableType): return typ.ret_type return None elif isinstance(expr.node, Var): return expr.node.type elif isinstance(expr, CallExpr): return calculate_return_type(expr.callee) return None def find_fixed_callable_return(expr: Expression) -> Optional[CallableType]: if isinstance(expr, RefExpr): if isinstance(expr.node, FuncDef): typ = expr.node.type if typ: if isinstance(typ, CallableType) and has_no_typevars(typ.ret_type): if isinstance(typ.ret_type, CallableType): return typ.ret_type elif isinstance(expr, CallExpr): t = find_fixed_callable_return(expr.callee) if t: if isinstance(t.ret_type, CallableType): return t.ret_type return None class ForwardReferenceResolver(TypeTranslator): """Visitor to replace previously detected forward reference to synthetic types. This is similar to TypeTranslator but tracks visited nodes to avoid infinite recursion on potentially circular (self- or mutually-referential) types. This visitor: * Fixes forward references by unwrapping the linked type. * Generates errors for unsupported type recursion and breaks recursion by resolving recursive back references to Any types. * Replaces instance types generated from unanalyzed NamedTuple and TypedDict class syntax found in first pass with analyzed TupleType and TypedDictType. """ def __init__(self, fail: Callable[[str, Context], None], start: Union[Node, SymbolTableNode], warn: bool) -> None: self.seen = [] # type: List[Type] self.fail = fail self.start = start self.warn = warn def check_recursion(self, t: Type) -> bool: if any(t is s for s in self.seen): if self.warn: assert isinstance(self.start, Node), "Internal error: invalid error context" self.fail('Recursive types not fully supported yet,' ' nested types replaced with "Any"', self.start) return True self.seen.append(t) return False def visit_forwardref_type(self, t: ForwardRef) -> Type: """This visitor method tracks situations like this: x: A # This type is not yet known and therefore wrapped in ForwardRef, # its content is updated in SemanticAnalyzerPass3, now we need to unwrap # this type. A = NewType('A', int) """ assert t.resolved, 'Internal error: Unresolved forward reference: {}'.format( t.unbound.name) return t.resolved.accept(self) def visit_instance(self, t: Instance, from_fallback: bool = False) -> Type: """This visitor method tracks situations like this: x: A # When analyzing this type we will get an Instance from SemanticAnalyzerPass1. # Now we need to update this to actual analyzed TupleType. class A(NamedTuple): attr: str If from_fallback is True, then we always return an Instance type. This is needed since TupleType and TypedDictType fallbacks are always instances. """ info = t.type # Special case, analyzed bases transformed the type into TupleType. if info.tuple_type and not from_fallback: items = [it.accept(self) for it in info.tuple_type.items] info.tuple_type.items = items return TupleType(items, Instance(info, [])) # Update forward Instances to corresponding analyzed NamedTuples. if info.replaced and info.replaced.tuple_type: tp = info.replaced.tuple_type if self.check_recursion(tp): # The key idea is that when we recursively return to a type already traversed, # then we break the cycle and put AnyType as a leaf. return AnyType(TypeOfAny.from_error) return tp.copy_modified(fallback=Instance(info.replaced, [])).accept(self) # Same as above but for TypedDicts. if info.replaced and info.replaced.typeddict_type: td = info.replaced.typeddict_type if self.check_recursion(td): # We also break the cycles for TypedDicts as explained above for NamedTuples. return AnyType(TypeOfAny.from_error) return td.copy_modified(fallback=Instance(info.replaced, [])).accept(self) if self.check_recursion(t): # We also need to break a potential cycle with normal (non-synthetic) instance types. return Instance(t.type, [AnyType(TypeOfAny.from_error)] * len(t.type.defn.type_vars)) return super().visit_instance(t) def visit_type_var(self, t: TypeVarType) -> Type: if self.check_recursion(t): return AnyType(TypeOfAny.from_error) if t.upper_bound: t.upper_bound = t.upper_bound.accept(self) if t.values: t.values = [v.accept(self) for v in t.values] return t def visit_callable_type(self, t: CallableType) -> Type: if self.check_recursion(t): return AnyType(TypeOfAny.from_error) arg_types = [tp.accept(self) for tp in t.arg_types] ret_type = t.ret_type.accept(self) variables = t.variables.copy() for v in variables: if v.upper_bound: v.upper_bound = v.upper_bound.accept(self) if v.values: v.values = [val.accept(self) for val in v.values] return t.copy_modified(arg_types=arg_types, ret_type=ret_type, variables=variables) def visit_overloaded(self, t: Overloaded) -> Type: if self.check_recursion(t): return AnyType(TypeOfAny.from_error) return super().visit_overloaded(t) def visit_tuple_type(self, t: TupleType) -> Type: if self.check_recursion(t): return AnyType(TypeOfAny.from_error) items = [it.accept(self) for it in t.items] fallback = self.visit_instance(t.fallback, from_fallback=True) assert isinstance(fallback, Instance) return TupleType(items, fallback, t.line, t.column) def visit_typeddict_type(self, t: TypedDictType) -> Type: if self.check_recursion(t): return AnyType(TypeOfAny.from_error) items = OrderedDict([ (item_name, item_type.accept(self)) for (item_name, item_type) in t.items.items() ]) fallback = self.visit_instance(t.fallback, from_fallback=True) assert isinstance(fallback, Instance) return TypedDictType(items, t.required_keys, fallback, t.line, t.column) def visit_union_type(self, t: UnionType) -> Type: if self.check_recursion(t): return AnyType(TypeOfAny.from_error) return super().visit_union_type(t) def visit_type_type(self, t: TypeType) -> Type: if self.check_recursion(t): return AnyType(TypeOfAny.from_error) return super().visit_type_type(t) mypy-0.560/mypy/server/0000755€tŠÔÚ€2›s®0000000000013215007242021174 5ustar jukkaDROPBOX\Domain Users00000000000000mypy-0.560/mypy/server/__init__.py0000644€tŠÔÚ€2›s®0000000000013215007205023272 0ustar jukkaDROPBOX\Domain Users00000000000000mypy-0.560/mypy/server/astdiff.py0000644€tŠÔÚ€2›s®0000003442213215007205023172 0ustar jukkaDROPBOX\Domain Users00000000000000"""Compare two versions of a module symbol table. The goal is to find which AST nodes have externally visible changes, so that we can fire triggers and re-type-check other parts of the program that are stale because of the changes. Only look at detail at definitions at the current module. """ from typing import Set, List, TypeVar, Dict, Tuple, Optional, Sequence from mypy.nodes import ( SymbolTable, SymbolTableNode, FuncBase, TypeInfo, Var, MypyFile, SymbolNode, Decorator, TypeVarExpr, MODULE_REF, TYPE_ALIAS, UNBOUND_IMPORTED, TVAR ) from mypy.types import ( Type, TypeVisitor, UnboundType, TypeList, AnyType, NoneTyp, UninhabitedType, ErasedType, DeletedType, Instance, TypeVarType, CallableType, TupleType, TypedDictType, UnionType, Overloaded, PartialType, TypeType ) from mypy.util import get_prefix def is_identical_type(t: Type, s: Type) -> bool: return t.accept(IdenticalTypeVisitor(s)) TT = TypeVar('TT', bound=Type) def is_identical_types(a: List[TT], b: List[TT]) -> bool: return len(a) == len(b) and all(is_identical_type(t, s) for t, s in zip(a, b)) class IdenticalTypeVisitor(TypeVisitor[bool]): """Visitor for checking whether two types are identical. This may be conservative -- it's okay for two types to be considered different even if they are actually the same. The results are only used to improve performance, not relied on for correctness. Differences from mypy.sametypes: * Types with the same name but different AST nodes are considered identical. * If one of the types is not valid for whatever reason, they are considered different. * Sometimes require types to be structurally identical, even if they are semantically the same type. """ def __init__(self, right: Type) -> None: self.right = right # visit_x(left) means: is left (which is an instance of X) the same type as # right? def visit_unbound_type(self, left: UnboundType) -> bool: return False def visit_any(self, left: AnyType) -> bool: return isinstance(self.right, AnyType) def visit_none_type(self, left: NoneTyp) -> bool: return isinstance(self.right, NoneTyp) def visit_uninhabited_type(self, t: UninhabitedType) -> bool: return isinstance(self.right, UninhabitedType) def visit_erased_type(self, left: ErasedType) -> bool: return False def visit_deleted_type(self, left: DeletedType) -> bool: return isinstance(self.right, DeletedType) def visit_instance(self, left: Instance) -> bool: return (isinstance(self.right, Instance) and left.type.fullname() == self.right.type.fullname() and is_identical_types(left.args, self.right.args)) def visit_type_var(self, left: TypeVarType) -> bool: return (isinstance(self.right, TypeVarType) and left.id == self.right.id) def visit_callable_type(self, left: CallableType) -> bool: # FIX generics if isinstance(self.right, CallableType): cright = self.right return (is_identical_type(left.ret_type, cright.ret_type) and is_identical_types(left.arg_types, cright.arg_types) and left.arg_names == cright.arg_names and left.arg_kinds == cright.arg_kinds and left.is_type_obj() == cright.is_type_obj() and left.is_ellipsis_args == cright.is_ellipsis_args) return False def visit_tuple_type(self, left: TupleType) -> bool: if isinstance(self.right, TupleType): return is_identical_types(left.items, self.right.items) return False def visit_typeddict_type(self, left: TypedDictType) -> bool: if isinstance(self.right, TypedDictType): if left.items.keys() != self.right.items.keys(): return False for (_, left_item_type, right_item_type) in left.zip(self.right): if not is_identical_type(left_item_type, right_item_type): return False return True return False def visit_union_type(self, left: UnionType) -> bool: if isinstance(self.right, UnionType): # Require structurally identical types. return is_identical_types(left.items, self.right.items) return False def visit_overloaded(self, left: Overloaded) -> bool: if isinstance(self.right, Overloaded): return is_identical_types(left.items(), self.right.items()) return False def visit_partial_type(self, left: PartialType) -> bool: # A partial type is not fully defined, so the result is indeterminate. We shouldn't # get here. raise RuntimeError def visit_type_type(self, left: TypeType) -> bool: if isinstance(self.right, TypeType): return is_identical_type(left.item, self.right.item) return False # Snapshot representation of a symbol table node or type. The representation is # opaque -- the only supported operations are comparing for equality and # hashing (latter for type snapshots only). Snapshots can contain primitive # objects, nested tuples, lists and dictionaries and primitive objects (type # snapshots are immutable). # # For example, the snapshot of the 'int' type is ('Instance', 'builtins.int', ()). SnapshotItem = Tuple[object, ...] def compare_symbol_table_snapshots( name_prefix: str, snapshot1: Dict[str, SnapshotItem], snapshot2: Dict[str, SnapshotItem]) -> Set[str]: """Return names that are different in two snapshots of a symbol table. Return a set of fully-qualified names (e.g., 'mod.func' or 'mod.Class.method'). Only shallow (intra-module) differences are considered. References to things defined outside the module are compared based on the name of the target only. """ # Find names only defined only in one version. names1 = {'%s.%s' % (name_prefix, name) for name in snapshot1} names2 = {'%s.%s' % (name_prefix, name) for name in snapshot2} triggers = names1 ^ names2 # Look for names defined in both versions that are different. for name in set(snapshot1.keys()) & set(snapshot2.keys()): item1 = snapshot1[name] item2 = snapshot2[name] kind1 = item1[0] kind2 = item2[0] item_name = '%s.%s' % (name_prefix, name) if kind1 != kind2: # Different kind of node in two snapshots -> trivially different. triggers.add(item_name) elif kind1 == 'TypeInfo': if item1[:-1] != item2[:-1]: # Record major difference (outside class symbol tables). triggers.add(item_name) # Look for differences in nested class symbol table entries. assert isinstance(item1[-1], dict) assert isinstance(item2[-1], dict) triggers |= compare_symbol_table_snapshots(item_name, item1[-1], item2[-1]) else: # Shallow node (no interesting internal structure). Just use equality. if snapshot1[name] != snapshot2[name]: triggers.add(item_name) return triggers def snapshot_symbol_table(name_prefix: str, table: SymbolTable) -> Dict[str, SnapshotItem]: """Create a snapshot description that represents the state of a symbol table. The snapshot has a representation based on nested tuples and dicts that makes it easy and fast to find differences. Only "shallow" state is included in the snapshot -- references to things defined in other modules are represented just by the names of the targers. """ result = {} # type: Dict[str, SnapshotItem] for name, symbol in table.items(): node = symbol.node # TODO: cross_ref? fullname = node.fullname() if node else None common = (fullname, symbol.kind, symbol.module_public) if symbol.kind == MODULE_REF: # This is a cross-reference to another module. assert isinstance(node, MypyFile) result[name] = ('Moduleref', common) elif symbol.kind == TVAR: assert isinstance(node, TypeVarExpr) result[name] = ('TypeVar', node.variance, [snapshot_type(value) for value in node.values], snapshot_type(node.upper_bound)) elif symbol.kind == TYPE_ALIAS: result[name] = ('TypeAlias', symbol.alias_tvars, snapshot_optional_type(symbol.type_override)) else: assert symbol.kind != UNBOUND_IMPORTED if node and get_prefix(node.fullname()) != name_prefix: # This is a cross-reference to a node defined in another module. result[name] = ('CrossRef', common, symbol.normalized) else: result[name] = snapshot_definition(node, common) return result def snapshot_definition(node: Optional[SymbolNode], common: Tuple[object, ...]) -> Tuple[object, ...]: """Create a snapshot description of a symbol table node. The representation is nested tuples and dicts. Only externally visible attributes are included. """ if isinstance(node, FuncBase): # TODO: info return ('Func', common, node.is_property, snapshot_type(node.type)) elif isinstance(node, Var): return ('Var', common, snapshot_optional_type(node.type)) elif isinstance(node, Decorator): # Note that decorated methods are represented by Decorator instances in # a symbol table since we need to preserve information about the # decorated function (whether it's a class function, for # example). Top-level decorated functions, however, are represented by # the corresponding Var node, since that happens to provide enough # context. return ('Decorator', node.is_overload, snapshot_optional_type(node.var.type), snapshot_definition(node.func, common)) elif isinstance(node, TypeInfo): # TODO: # type_vars # bases # _promote # tuple_type # typeddict_type attrs = (node.is_abstract, node.is_enum, node.fallback_to_any, node.is_named_tuple, node.is_newtype, [base.fullname() for base in node.mro]) prefix = node.fullname() symbol_table = snapshot_symbol_table(prefix, node.names) return ('TypeInfo', common, attrs, symbol_table) else: # TODO: Handle additional types: TypeVarExpr, MypyFile, ... assert False, type(node) def snapshot_type(typ: Type) -> SnapshotItem: """Create a snapshot representation of a type using nested tuples.""" return typ.accept(SnapshotTypeVisitor()) def snapshot_optional_type(typ: Optional[Type]) -> Optional[SnapshotItem]: if typ: return snapshot_type(typ) else: return None def snapshot_types(types: Sequence[Type]) -> SnapshotItem: return tuple(snapshot_type(item) for item in types) def snapshot_simple_type(typ: Type) -> SnapshotItem: return (type(typ).__name__,) class SnapshotTypeVisitor(TypeVisitor[SnapshotItem]): """Creates a read-only, self-contained snapshot of a type object. Properties of a snapshot: - Contains (nested) tuples and other immutable primitive objects only. - References to AST nodes are replaced with full names of targets. - Has no references to mutable or non-primitive objects. - Two snapshots represent the same object if and only if they are equal. """ def visit_unbound_type(self, typ: UnboundType) -> SnapshotItem: return ('UnboundType', typ.name, typ.optional, typ.empty_tuple_index, snapshot_types(typ.args)) def visit_any(self, typ: AnyType) -> SnapshotItem: return snapshot_simple_type(typ) def visit_none_type(self, typ: NoneTyp) -> SnapshotItem: return snapshot_simple_type(typ) def visit_uninhabited_type(self, typ: UninhabitedType) -> SnapshotItem: return snapshot_simple_type(typ) def visit_erased_type(self, typ: ErasedType) -> SnapshotItem: return snapshot_simple_type(typ) def visit_deleted_type(self, typ: DeletedType) -> SnapshotItem: return snapshot_simple_type(typ) def visit_instance(self, typ: Instance) -> SnapshotItem: return ('Instance', typ.type.fullname(), snapshot_types(typ.args)) def visit_type_var(self, typ: TypeVarType) -> SnapshotItem: return ('TypeVar', typ.name, typ.fullname, typ.id.raw_id, typ.id.meta_level, snapshot_types(typ.values), snapshot_type(typ.upper_bound), typ.variance) def visit_callable_type(self, typ: CallableType) -> SnapshotItem: # FIX generics return ('CallableType', snapshot_types(typ.arg_types), snapshot_type(typ.ret_type), tuple(typ.arg_names), tuple(typ.arg_kinds), typ.is_type_obj(), typ.is_ellipsis_args) def visit_tuple_type(self, typ: TupleType) -> SnapshotItem: return ('TupleType', snapshot_types(typ.items)) def visit_typeddict_type(self, typ: TypedDictType) -> SnapshotItem: items = tuple((key, snapshot_type(item_type)) for key, item_type in typ.items.items()) return ('TypedDictType', items) def visit_union_type(self, typ: UnionType) -> SnapshotItem: # Sort and remove duplicates so that we can use equality to test for # equivalent union type snapshots. items = {snapshot_type(item) for item in typ.items} normalized = tuple(sorted(items)) return ('UnionType', normalized) def visit_overloaded(self, typ: Overloaded) -> SnapshotItem: return ('Overloaded', snapshot_types(typ.items())) def visit_partial_type(self, typ: PartialType) -> SnapshotItem: # A partial type is not fully defined, so the result is indeterminate. We shouldn't # get here. raise RuntimeError def visit_type_type(self, typ: TypeType) -> SnapshotItem: return ('TypeType', snapshot_type(typ.item)) mypy-0.560/mypy/server/astmerge.py0000644€tŠÔÚ€2›s®0000002143013215007205023354 0ustar jukkaDROPBOX\Domain Users00000000000000"""Merge a new version of a module AST to an old version. See the main entry point merge_asts for details. """ from typing import Dict, List, cast, TypeVar, Optional from mypy.nodes import ( Node, MypyFile, SymbolTable, Block, AssignmentStmt, NameExpr, MemberExpr, RefExpr, TypeInfo, FuncDef, ClassDef, NamedTupleExpr, SymbolNode, Var, Statement, MDEF ) from mypy.traverser import TraverserVisitor from mypy.types import ( Type, TypeVisitor, Instance, AnyType, NoneTyp, CallableType, DeletedType, PartialType, TupleType, TypeType, TypeVarType, TypedDictType, UnboundType, UninhabitedType, UnionType, Overloaded ) from mypy.util import get_prefix def merge_asts(old: MypyFile, old_symbols: SymbolTable, new: MypyFile, new_symbols: SymbolTable) -> None: """Merge a new version of a module AST to a previous version. The main idea is to preserve the identities of externally visible nodes in the old AST (that have a corresponding node in the new AST). All old node state (outside identity) will come from the new AST. When this returns, 'old' will refer to the merged AST, but 'new_symbols' will be the new symbol table. 'new' and 'old_symbols' will no longer be valid. """ assert new.fullname() == old.fullname() replacement_map = replacement_map_from_symbol_table( old_symbols, new_symbols, prefix=old.fullname()) replacement_map[new] = old node = replace_nodes_in_ast(new, replacement_map) assert node is old replace_nodes_in_symbol_table(new_symbols, replacement_map) def replacement_map_from_symbol_table( old: SymbolTable, new: SymbolTable, prefix: str) -> Dict[SymbolNode, SymbolNode]: replacements = {} # type: Dict[SymbolNode, SymbolNode] for name, node in old.items(): if (name in new and (node.kind == MDEF or node.node and get_prefix(node.node.fullname()) == prefix)): new_node = new[name] if (type(new_node.node) == type(node.node) # noqa and new_node.node and node.node and new_node.node.fullname() == node.node.fullname() and new_node.kind == node.kind): replacements[new_node.node] = node.node if isinstance(node.node, TypeInfo) and isinstance(new_node.node, TypeInfo): type_repl = replacement_map_from_symbol_table( node.node.names, new_node.node.names, prefix) replacements.update(type_repl) return replacements def replace_nodes_in_ast(node: SymbolNode, replacements: Dict[SymbolNode, SymbolNode]) -> SymbolNode: visitor = NodeReplaceVisitor(replacements) node.accept(visitor) return replacements.get(node, node) SN = TypeVar('SN', bound=SymbolNode) class NodeReplaceVisitor(TraverserVisitor): """Transform some nodes to new identities in an AST. Only nodes that live in the symbol table may be replaced, which simplifies the implementation some. """ def __init__(self, replacements: Dict[SymbolNode, SymbolNode]) -> None: self.replacements = replacements def visit_mypy_file(self, node: MypyFile) -> None: node = self.fixup(node) node.defs = self.replace_statements(node.defs) super().visit_mypy_file(node) def visit_block(self, node: Block) -> None: super().visit_block(node) node.body = self.replace_statements(node.body) def visit_func_def(self, node: FuncDef) -> None: node = self.fixup(node) if node.type: self.fixup_type(node.type) super().visit_func_def(node) def visit_class_def(self, node: ClassDef) -> None: # TODO additional things like the MRO node.defs.body = self.replace_statements(node.defs.body) self.process_type_info(node.info) super().visit_class_def(node) def visit_assignment_stmt(self, node: AssignmentStmt) -> None: if node.type: self.fixup_type(node.type) super().visit_assignment_stmt(node) # Expressions def visit_name_expr(self, node: NameExpr) -> None: self.visit_ref_expr(node) def visit_member_expr(self, node: MemberExpr) -> None: self.visit_ref_expr(node) super().visit_member_expr(node) def visit_ref_expr(self, node: RefExpr) -> None: if node.node is not None: node.node = self.fixup(node.node) def visit_namedtuple_expr(self, node: NamedTupleExpr) -> None: super().visit_namedtuple_expr(node) self.process_type_info(node.info) # Helpers def fixup(self, node: SN) -> SN: if node in self.replacements: new = self.replacements[node] new.__dict__ = node.__dict__ return cast(SN, new) return node def fixup_type(self, typ: Type) -> None: typ.accept(TypeReplaceVisitor(self.replacements)) def process_type_info(self, info: TypeInfo) -> None: # TODO additional things like the MRO replace_nodes_in_symbol_table(info.names, self.replacements) for i, item in enumerate(info.mro): info.mro[i] = self.fixup(info.mro[i]) for i, base in enumerate(info.bases): self.fixup_type(info.bases[i]) def replace_statements(self, nodes: List[Statement]) -> List[Statement]: result = [] for node in nodes: if isinstance(node, SymbolNode): node = self.fixup(node) result.append(node) return result class TypeReplaceVisitor(TypeVisitor[None]): def __init__(self, replacements: Dict[SymbolNode, SymbolNode]) -> None: self.replacements = replacements def visit_instance(self, typ: Instance) -> None: typ.type = self.fixup(typ.type) for arg in typ.args: arg.accept(self) def visit_any(self, typ: AnyType) -> None: pass def visit_none_type(self, typ: NoneTyp) -> None: pass def visit_callable_type(self, typ: CallableType) -> None: for arg in typ.arg_types: arg.accept(self) typ.ret_type.accept(self) if typ.definition: # No need to fixup since this is just a cross-reference. typ.definition = self.replacements.get(typ.definition, typ.definition) # TODO: typ.fallback for tv in typ.variables: tv.upper_bound.accept(self) for value in tv.values: value.accept(self) def visit_overloaded(self, t: Overloaded) -> None: for item in t.items(): item.accept(self) def visit_deleted_type(self, typ: DeletedType) -> None: pass def visit_partial_type(self, typ: PartialType) -> None: raise RuntimeError def visit_tuple_type(self, typ: TupleType) -> None: for item in typ.items: item.accept(self) def visit_type_type(self, typ: TypeType) -> None: typ.item.accept(self) def visit_type_var(self, typ: TypeVarType) -> None: typ.upper_bound.accept(self) for value in typ.values: value.accept(self) def visit_typeddict_type(self, typ: TypedDictType) -> None: raise NotImplementedError def visit_unbound_type(self, typ: UnboundType) -> None: for arg in typ.args: arg.accept(self) def visit_uninhabited_type(self, typ: UninhabitedType) -> None: pass def visit_union_type(self, typ: UnionType) -> None: for item in typ.items: item.accept(self) # Helpers def fixup(self, node: SN) -> SN: if node in self.replacements: new = self.replacements[node] # TODO: This may be unnecessary? new.__dict__ = node.__dict__ return cast(SN, new) return node def replace_nodes_in_symbol_table(symbols: SymbolTable, replacements: Dict[SymbolNode, SymbolNode]) -> None: for name, node in symbols.items(): if node.node: if node.node in replacements: new = replacements[node.node] new.__dict__ = node.node.__dict__ node.node = new # TODO: Other node types if isinstance(node.node, Var) and node.node.type: node.node.type.accept(TypeReplaceVisitor(replacements)) node.node.info = cast(TypeInfo, replacements.get(node.node.info, node.node.info)) else: # TODO: Other node types if isinstance(node.node, Var) and node.node.type: node.node.type.accept(TypeReplaceVisitor(replacements)) override = node.type_override if override: override.accept(TypeReplaceVisitor(replacements)) mypy-0.560/mypy/server/aststrip.py0000644€tŠÔÚ€2›s®0000001245513215007206023426 0ustar jukkaDROPBOX\Domain Users00000000000000"""Strip AST from semantic information. This is used in fine-grained incremental checking to reprocess existing AST nodes. """ import contextlib from typing import Union, Iterator, Optional from mypy.nodes import ( Node, FuncDef, NameExpr, MemberExpr, RefExpr, MypyFile, FuncItem, ClassDef, AssignmentStmt, ImportFrom, Import, TypeInfo, SymbolTable, Var, UNBOUND_IMPORTED, GDEF ) from mypy.traverser import TraverserVisitor def strip_target(node: Union[MypyFile, FuncItem]) -> None: """Strip a fine-grained incremental mode target from semantic information.""" visitor = NodeStripVisitor() if isinstance(node, MypyFile): visitor.strip_file_top_level(node) else: node.accept(visitor) class NodeStripVisitor(TraverserVisitor): def __init__(self) -> None: self.type = None # type: Optional[TypeInfo] self.names = None # type: Optional[SymbolTable] def strip_file_top_level(self, file_node: MypyFile) -> None: """Strip a module top-level (don't recursive into functions).""" self.names = file_node.names # TODO: Functions nested within statements for node in file_node.defs: if not isinstance(node, (FuncItem, ClassDef)): node.accept(self) elif isinstance(node, ClassDef): self.strip_class_body(node) def strip_class_body(self, node: ClassDef) -> None: """Strip class body and type info, but don't strip methods.""" # TODO: Statements in class body node.info.type_vars = [] node.info.bases = [] node.info.abstract_attributes = [] node.info.mro = [] node.info.add_type_vars() def visit_func_def(self, node: FuncDef) -> None: node.expanded = [] node.type = node.unanalyzed_type with self.enter_class(node.info) if node.info else nothing(): super().visit_func_def(node) @contextlib.contextmanager def enter_class(self, info: TypeInfo) -> Iterator[None]: # TODO: Update and restore self.names old = self.type self.type = info yield self.type = old def visit_assignment_stmt(self, node: AssignmentStmt) -> None: node.type = node.unanalyzed_type super().visit_assignment_stmt(node) def visit_import_from(self, node: ImportFrom) -> None: if node.assignments: node.assignments = [] else: if self.names: # Reset entries in the symbol table. This is necessary since # otherwise the semantic analyzer will think that the import # assigns to an existing name instead of defining a new one. for name, as_name in node.names: imported_name = as_name or name symnode = self.names[imported_name] symnode.kind = UNBOUND_IMPORTED symnode.node = None def visit_import(self, node: Import) -> None: if node.assignments: node.assignments = [] else: if self.names: # Reset entries in the symbol table. This is necessary since # otherwise the semantic analyzer will think that the import # assigns to an existing name instead of defining a new one. for name, as_name in node.ids: imported_name = as_name or name initial = imported_name.split('.')[0] symnode = self.names[initial] symnode.kind = UNBOUND_IMPORTED symnode.node = None def visit_name_expr(self, node: NameExpr) -> None: # Global assignments are processed in semantic analysis pass 1, and we # only want to strip changes made in passes 2 or later. if not (node.kind == GDEF and node.is_new_def): self.strip_ref_expr(node) def visit_member_expr(self, node: MemberExpr) -> None: self.strip_ref_expr(node) if self.is_duplicate_attribute_def(node): # This is marked as an instance variable definition but a base class # defines an attribute with the same name, and we can't have # multiple definitions for an attribute. Defer to the base class # definition. if self.type is not None: del self.type.names[node.name] node.is_inferred_def = False node.def_var = None def is_duplicate_attribute_def(self, node: MemberExpr) -> bool: if not node.is_inferred_def: return False assert self.type is not None, "Internal error: Member defined outside class" if node.name not in self.type.names: return False return any(info.get(node.name) is not None for info in self.type.mro[1:]) def strip_ref_expr(self, node: RefExpr) -> None: node.kind = None node.node = None node.fullname = None # TODO: handle more node types def is_self_member_ref(memberexpr: MemberExpr) -> bool: """Does memberexpr refer to an attribute of self?""" # TODO: Merge with is_self_member_ref in semanal.py. if not isinstance(memberexpr.expr, NameExpr): return False node = memberexpr.expr.node return isinstance(node, Var) and node.is_self @contextlib.contextmanager def nothing() -> Iterator[None]: yield mypy-0.560/mypy/server/deps.py0000644€tŠÔÚ€2›s®0000006313013215007205022503 0ustar jukkaDROPBOX\Domain Users00000000000000"""Generate fine-grained dependencies for AST nodes.""" from typing import Dict, List, Set, Optional, Tuple, Union from mypy.checkmember import bind_self from mypy.nodes import ( Node, Expression, MypyFile, FuncDef, ClassDef, AssignmentStmt, NameExpr, MemberExpr, Import, ImportFrom, CallExpr, CastExpr, TypeVarExpr, TypeApplication, IndexExpr, UnaryExpr, OpExpr, ComparisonExpr, GeneratorExpr, DictionaryComprehension, StarExpr, PrintStmt, ForStmt, WithStmt, TupleExpr, ListExpr, OperatorAssignmentStmt, DelStmt, YieldFromExpr, Decorator, Block, TypeInfo, FuncBase, OverloadedFuncDef, RefExpr, Var, NamedTupleExpr, LDEF, MDEF, GDEF, op_methods, reverse_op_methods, ops_with_inplace_method, unary_op_methods ) from mypy.traverser import TraverserVisitor from mypy.types import ( Type, Instance, AnyType, NoneTyp, TypeVisitor, CallableType, DeletedType, PartialType, TupleType, TypeType, TypeVarType, TypedDictType, UnboundType, UninhabitedType, UnionType, FunctionLike, ForwardRef, Overloaded ) from mypy.server.trigger import make_trigger def get_dependencies(target: MypyFile, type_map: Dict[Expression, Type], python_version: Tuple[int, int]) -> Dict[str, Set[str]]: """Get all dependencies of a node, recursively.""" visitor = DependencyVisitor(type_map, python_version) target.accept(visitor) return visitor.map def get_dependencies_of_target(module_id: str, target: Node, type_map: Dict[Expression, Type], python_version: Tuple[int, int]) -> Dict[str, Set[str]]: """Get dependencies of a target -- don't recursive into nested targets.""" # TODO: Add tests for this function. visitor = DependencyVisitor(type_map, python_version) visitor.enter_file_scope(module_id) if isinstance(target, MypyFile): # Only get dependencies of the top-level of the module. Don't recurse into # functions. for defn in target.defs: # TODO: Recurse into top-level statements and class bodies but skip functions. if not isinstance(defn, (ClassDef, Decorator, FuncDef, OverloadedFuncDef)): defn.accept(visitor) elif isinstance(target, FuncBase) and target.info: # It's a method. # TODO: Methods in nested classes. visitor.enter_class_scope(target.info) target.accept(visitor) visitor.leave_scope() else: target.accept(visitor) visitor.leave_scope() return visitor.map class DependencyVisitor(TraverserVisitor): def __init__(self, type_map: Dict[Expression, Type], python_version: Tuple[int, int]) -> None: # Stack of names of targets being processed. For stack targets we use the # surrounding module. self.target_stack = [] # type: List[str] # Stack of names of targets being processed, including class targets. self.full_target_stack = [] # type: List[str] self.scope_stack = [] # type: List[Union[None, TypeInfo, FuncDef]] self.type_map = type_map self.python2 = python_version[0] == 2 self.map = {} # type: Dict[str, Set[str]] self.is_class = False # TODO (incomplete): # from m import * # await # named tuples # TypedDict # protocols # metaclasses # type aliases # super() # relative imports # functional enum # type variable with value restriction def visit_mypy_file(self, o: MypyFile) -> None: self.enter_file_scope(o.fullname()) super().visit_mypy_file(o) self.leave_scope() def visit_func_def(self, o: FuncDef) -> None: if not isinstance(self.current_scope(), FuncDef): # Not a nested function, so create a new target. new_scope = True target = self.enter_function_scope(o) else: # Treat nested functions as components of the parent function target. new_scope = False target = self.current_target() if o.type: if self.is_class and isinstance(o.type, FunctionLike): signature = bind_self(o.type) # type: Type else: signature = o.type for trigger in get_type_triggers(signature): self.add_dependency(trigger) self.add_dependency(trigger, target=make_trigger(target)) if o.info: for base in non_trivial_bases(o.info): self.add_dependency(make_trigger(base.fullname() + '.' + o.name())) super().visit_func_def(o) if new_scope: self.leave_scope() def visit_decorator(self, o: Decorator) -> None: self.add_dependency(make_trigger(o.func.fullname())) super().visit_decorator(o) def visit_class_def(self, o: ClassDef) -> None: target = self.enter_class_scope(o.info) self.add_dependency(make_trigger(target), target) old_is_class = self.is_class self.is_class = True # Add dependencies to type variables of a generic class. for tv in o.type_vars: self.add_dependency(make_trigger(tv.fullname), target) # Add dependencies to base types. for base in o.info.bases: self.add_type_dependencies(base, target=target) # TODO: Add dependencies based on remaining TypeInfo attributes. super().visit_class_def(o) self.is_class = old_is_class info = o.info for name, node in info.names.items(): if isinstance(node.node, Var): for base_info in non_trivial_bases(info): # If the type of an attribute changes in a base class, we make references # to the attribute in the subclass stale. self.add_dependency(make_trigger(base_info.fullname() + '.' + name), target=make_trigger(info.fullname() + '.' + name)) for base_info in non_trivial_bases(info): for name, node in base_info.names.items(): self.add_dependency(make_trigger(base_info.fullname() + '.' + name), target=make_trigger(info.fullname() + '.' + name)) self.add_dependency(make_trigger(base_info.fullname() + '.__init__'), target=make_trigger(info.fullname() + '.__init__')) self.leave_scope() def visit_import(self, o: Import) -> None: for id, as_id in o.ids: # TODO: as_id self.add_dependency(make_trigger(id), self.current_target()) def visit_import_from(self, o: ImportFrom) -> None: assert o.relative == 0 # Relative imports not supported for name, as_name in o.names: self.add_dependency(make_trigger(o.id + '.' + name)) def visit_block(self, o: Block) -> None: if not o.is_unreachable: super().visit_block(o) def visit_assignment_stmt(self, o: AssignmentStmt) -> None: # TODO: Implement all assignment special forms, including these: # TypedDict # NamedTuple # Enum # type aliases rvalue = o.rvalue if isinstance(rvalue, CallExpr) and isinstance(rvalue.analyzed, TypeVarExpr): # TODO: Support type variable value restriction analyzed = rvalue.analyzed self.add_type_dependencies(analyzed.upper_bound, target=make_trigger(analyzed.fullname())) elif isinstance(rvalue, CallExpr) and isinstance(rvalue.analyzed, NamedTupleExpr): # Depend on types of named tuple items. info = rvalue.analyzed.info prefix = '%s.%s' % (self.current_full_target(), info.name()) for name, symnode in info.names.items(): if not name.startswith('_') and isinstance(symnode.node, Var): typ = symnode.node.type if typ: self.add_type_dependencies(typ) attr_target = make_trigger('%s.%s' % (prefix, name)) self.add_type_dependencies(typ, target=attr_target) else: # Normal assignment super().visit_assignment_stmt(o) for lvalue in o.lvalues: self.process_lvalue(lvalue) items = o.lvalues + [rvalue] for i in range(len(items) - 1): lvalue = items[i] rvalue = items[i + 1] if isinstance(lvalue, (TupleExpr, ListExpr)): self.add_attribute_dependency_for_expr(rvalue, '__iter__') if o.type: for trigger in get_type_triggers(o.type): self.add_dependency(trigger) def process_lvalue(self, lvalue: Expression) -> None: """Generate additional dependencies for an lvalue.""" if isinstance(lvalue, IndexExpr): self.add_operator_method_dependency(lvalue.base, '__setitem__') elif isinstance(lvalue, NameExpr): if lvalue.kind in (MDEF, GDEF): # Assignment to an attribute in the class body, or direct assignment to a # global variable. lvalue_type = self.get_non_partial_lvalue_type(lvalue) type_triggers = get_type_triggers(lvalue_type) attr_trigger = make_trigger('%s.%s' % (self.full_target_stack[-1], lvalue.name)) for type_trigger in type_triggers: self.add_dependency(type_trigger, attr_trigger) elif isinstance(lvalue, MemberExpr): if lvalue.kind is None: # Reference to a non-module attribute if lvalue.expr not in self.type_map: # Unreachable assignment -> not checked so no dependencies to generate. return object_type = self.type_map[lvalue.expr] lvalue_type = self.get_non_partial_lvalue_type(lvalue) type_triggers = get_type_triggers(lvalue_type) for attr_trigger in self.attribute_triggers(object_type, lvalue.name): for type_trigger in type_triggers: self.add_dependency(type_trigger, attr_trigger) elif isinstance(lvalue, (ListExpr, TupleExpr)): for item in lvalue.items: self.process_lvalue(item) # TODO: star lvalue def get_non_partial_lvalue_type(self, lvalue: RefExpr) -> Type: lvalue_type = self.type_map[lvalue] if isinstance(lvalue_type, PartialType): if isinstance(lvalue.node, Var) and lvalue.node.type: lvalue_type = lvalue.node.type else: assert False, "Unexpected partial type" return lvalue_type def visit_operator_assignment_stmt(self, o: OperatorAssignmentStmt) -> None: super().visit_operator_assignment_stmt(o) self.process_lvalue(o.lvalue) method = op_methods[o.op] self.add_attribute_dependency_for_expr(o.lvalue, method) if o.op in ops_with_inplace_method: inplace_method = '__i' + method[2:] self.add_attribute_dependency_for_expr(o.lvalue, inplace_method) def visit_for_stmt(self, o: ForStmt) -> None: super().visit_for_stmt(o) # __getitem__ is only used if __iter__ is missing but for simplicity we # just always depend on both. self.add_attribute_dependency_for_expr(o.expr, '__iter__') self.add_attribute_dependency_for_expr(o.expr, '__getitem__') self.process_lvalue(o.index) if isinstance(o.index, (TupleExpr, ListExpr)): # Process multiple assignment to index variables. item_type = o.inferred_item_type if item_type: # This is similar to above. self.add_attribute_dependency(item_type, '__iter__') self.add_attribute_dependency(item_type, '__getitem__') if o.index_type: self.add_type_dependencies(o.index_type) def visit_with_stmt(self, o: WithStmt) -> None: super().visit_with_stmt(o) for e in o.expr: self.add_attribute_dependency_for_expr(e, '__enter__') self.add_attribute_dependency_for_expr(e, '__exit__') if o.target_type: self.add_type_dependencies(o.target_type) def visit_print_stmt(self, o: PrintStmt) -> None: super().visit_print_stmt(o) if o.target: self.add_attribute_dependency_for_expr(o.target, 'write') def visit_del_stmt(self, o: DelStmt) -> None: super().visit_del_stmt(o) if isinstance(o.expr, IndexExpr): self.add_attribute_dependency_for_expr(o.expr.base, '__delitem__') # Expressions # TODO # dependency on __init__ (e.g. ClassName()) # super() def visit_name_expr(self, o: NameExpr) -> None: if o.kind == LDEF: # We don't track depdendencies to local variables, since they # aren't externally visible. return if o.kind == MDEF: # Direct reference to member is only possible in the scope that # defined the name, so no dependency is required. return if o.fullname is not None: trigger = make_trigger(o.fullname) self.add_dependency(trigger) def visit_member_expr(self, e: MemberExpr) -> None: super().visit_member_expr(e) if e.kind is not None: # Reference to a module attribute if e.fullname is not None: trigger = make_trigger(e.fullname) self.add_dependency(trigger) else: # Reference to a non-module attribute if e.expr not in self.type_map: # No type available -- this happens for unreachable code. Since it's unreachable, # it wasn't type checked and we don't need to generate dependencies. return typ = self.type_map[e.expr] self.add_attribute_dependency(typ, e.name) def visit_call_expr(self, e: CallExpr) -> None: super().visit_call_expr(e) callee_type = self.type_map.get(e.callee) if isinstance(callee_type, FunctionLike) and callee_type.is_type_obj(): class_name = callee_type.type_object().fullname() self.add_dependency(make_trigger(class_name + '.__init__')) def visit_cast_expr(self, e: CastExpr) -> None: super().visit_cast_expr(e) self.add_type_dependencies(e.type) def visit_type_application(self, e: TypeApplication) -> None: super().visit_type_application(e) for typ in e.types: self.add_type_dependencies(typ) def visit_index_expr(self, e: IndexExpr) -> None: super().visit_index_expr(e) self.add_operator_method_dependency(e.base, '__getitem__') def visit_unary_expr(self, e: UnaryExpr) -> None: super().visit_unary_expr(e) if e.op not in unary_op_methods: return method = unary_op_methods[e.op] self.add_operator_method_dependency(e.expr, method) def visit_op_expr(self, e: OpExpr) -> None: super().visit_op_expr(e) self.process_binary_op(e.op, e.left, e.right) def visit_comparison_expr(self, e: ComparisonExpr) -> None: super().visit_comparison_expr(e) for i, op in enumerate(e.operators): left = e.operands[i] right = e.operands[i + 1] self.process_binary_op(op, left, right) if self.python2 and op in ('==', '!=', '<', '<=', '>', '>='): self.add_operator_method_dependency(left, '__cmp__') self.add_operator_method_dependency(right, '__cmp__') def process_binary_op(self, op: str, left: Expression, right: Expression) -> None: method = op_methods.get(op) if method: if op == 'in': self.add_operator_method_dependency(right, method) else: self.add_operator_method_dependency(left, method) rev_method = reverse_op_methods.get(method) if rev_method: self.add_operator_method_dependency(right, rev_method) def add_operator_method_dependency(self, e: Expression, method: str) -> None: typ = self.type_map.get(e) if typ is not None: self.add_operator_method_dependency_for_type(typ, method) def add_operator_method_dependency_for_type(self, typ: Type, method: str) -> None: # Note that operator methods can't be (non-metaclass) methods of type objects # (that is, TypeType objects or Callables representing a type). # TODO: TypedDict # TODO: metaclasses if isinstance(typ, TypeVarType): typ = typ.upper_bound if isinstance(typ, TupleType): typ = typ.fallback if isinstance(typ, Instance): trigger = make_trigger(typ.type.fullname() + '.' + method) self.add_dependency(trigger) elif isinstance(typ, UnionType): for item in typ.items: self.add_operator_method_dependency_for_type(item, method) def visit_generator_expr(self, e: GeneratorExpr) -> None: super().visit_generator_expr(e) for seq in e.sequences: self.add_iter_dependency(seq) def visit_dictionary_comprehension(self, e: DictionaryComprehension) -> None: super().visit_dictionary_comprehension(e) for seq in e.sequences: self.add_iter_dependency(seq) def visit_star_expr(self, e: StarExpr) -> None: super().visit_star_expr(e) self.add_iter_dependency(e.expr) def visit_yield_from_expr(self, e: YieldFromExpr) -> None: super().visit_yield_from_expr(e) self.add_iter_dependency(e.expr) # Helpers def add_dependency(self, trigger: str, target: Optional[str] = None) -> None: """Add dependency from trigger to a target. If the target is not given explicitly, use the current target. """ if trigger.startswith((' None: """Add dependencies to all components of a type. Args: target: If not None, override the default (current) target of the generated dependency. """ # TODO: Use this method in more places where get_type_triggers() + add_dependency() # are called together. for trigger in get_type_triggers(typ): self.add_dependency(trigger, target) def add_attribute_dependency(self, typ: Type, name: str) -> None: """Add dependencies for accessing a named attribute of a type.""" targets = self.attribute_triggers(typ, name) for target in targets: self.add_dependency(target) def attribute_triggers(self, typ: Type, name: str) -> List[str]: """Return all triggers associated with the attribute of a type.""" if isinstance(typ, TypeVarType): typ = typ.upper_bound if isinstance(typ, TupleType): typ = typ.fallback if isinstance(typ, Instance): member = '%s.%s' % (typ.type.fullname(), name) return [make_trigger(member)] elif isinstance(typ, FunctionLike) and typ.is_type_obj(): member = '%s.%s' % (typ.type_object().fullname(), name) return [make_trigger(member)] elif isinstance(typ, UnionType): targets = [] for item in typ.items: targets.extend(self.attribute_triggers(item, name)) return targets elif isinstance(typ, TypeType): # TODO: Metaclass attribute lookup return self.attribute_triggers(typ.item, name) else: return [] def add_attribute_dependency_for_expr(self, e: Expression, name: str) -> None: typ = self.type_map.get(e) if typ is not None: self.add_attribute_dependency(typ, name) def add_iter_dependency(self, node: Expression) -> None: typ = self.type_map.get(node) if typ: self.add_attribute_dependency(typ, '__iter__') def enter_file_scope(self, prefix: str) -> None: """Enter a module target scope.""" self.target_stack.append(prefix) self.full_target_stack.append(prefix) self.scope_stack.append(None) def enter_function_scope(self, fdef: FuncDef) -> str: """Enter a function target scope.""" target = '%s.%s' % (self.full_target_stack[-1], fdef.name()) self.target_stack.append(target) self.full_target_stack.append(target) self.scope_stack.append(fdef) return target def enter_class_scope(self, info: TypeInfo) -> str: """Enter a class target scope.""" # Duplicate the previous top non-class target (it can't be a class but since the # depths of all stacks must agree we need something). self.target_stack.append(self.target_stack[-1]) full_target = '%s.%s' % (self.full_target_stack[-1], info.name()) self.full_target_stack.append(full_target) self.scope_stack.append(info) return full_target def leave_scope(self) -> None: """Leave a target scope.""" self.target_stack.pop() self.full_target_stack.pop() self.scope_stack.pop() def current_target(self) -> str: """Return the current target (non-class; for a class return enclosing module).""" return self.target_stack[-1] def current_full_target(self) -> str: """Return the current target (may be a class).""" return self.full_target_stack[-1] def current_scope(self) -> Optional[Node]: return self.scope_stack[-1] def get_type_triggers(typ: Type) -> List[str]: """Return all triggers that correspond to a type becoming stale.""" return typ.accept(TypeTriggersVisitor()) class TypeTriggersVisitor(TypeVisitor[List[str]]): def __init__(self) -> None: self.deps = [] # type: List[str] def visit_instance(self, typ: Instance) -> List[str]: trigger = make_trigger(typ.type.fullname()) triggers = [trigger] for arg in typ.args: triggers.extend(get_type_triggers(arg)) return triggers def visit_any(self, typ: AnyType) -> List[str]: return [] def visit_none_type(self, typ: NoneTyp) -> List[str]: return [] def visit_callable_type(self, typ: CallableType) -> List[str]: # TODO: generic callables # TODO: fallback? triggers = [] for arg in typ.arg_types: triggers.extend(get_type_triggers(arg)) triggers.extend(get_type_triggers(typ.ret_type)) return triggers def visit_overloaded(self, typ: Overloaded) -> List[str]: triggers = [] for item in typ.items(): triggers.extend(get_type_triggers(item)) return triggers def visit_deleted_type(self, typ: DeletedType) -> List[str]: return [] def visit_partial_type(self, typ: PartialType) -> List[str]: assert False, "Should not see a partial type here" def visit_tuple_type(self, typ: TupleType) -> List[str]: triggers = [] for item in typ.items: triggers.extend(get_type_triggers(item)) triggers.extend(get_type_triggers(typ.fallback)) return triggers def visit_type_type(self, typ: TypeType) -> List[str]: return get_type_triggers(typ.item) def visit_forwardref_type(self, typ: ForwardRef) -> List[str]: assert False, 'Internal error: Leaked forward reference object {}'.format(typ) def visit_type_var(self, typ: TypeVarType) -> List[str]: # TODO: bound (values?) triggers = [] if typ.fullname: triggers.append(make_trigger(typ.fullname)) return triggers def visit_typeddict_type(self, typ: TypedDictType) -> List[str]: # TODO: implement return [] def visit_unbound_type(self, typ: UnboundType) -> List[str]: return [] def visit_uninhabited_type(self, typ: UninhabitedType) -> List[str]: return [] def visit_union_type(self, typ: UnionType) -> List[str]: triggers = [] for item in typ.items: triggers.extend(get_type_triggers(item)) return triggers def non_trivial_bases(info: TypeInfo) -> List[TypeInfo]: return [base for base in info.mro[1:] if base.fullname() != 'builtins.object'] def dump_all_dependencies(modules: Dict[str, MypyFile], type_map: Dict[Expression, Type], python_version: Tuple[int, int]) -> None: """Generate dependencies for all interesting modules and print them to stdout.""" all_deps = {} # type: Dict[str, Set[str]] for id, node in modules.items(): # Uncomment for debugging: # print('processing', id) if id in ('builtins', 'typing') or '/typeshed/' in node.path: continue assert id == node.fullname() deps = get_dependencies(node, type_map, python_version) for trigger, targets in deps.items(): all_deps.setdefault(trigger, set()).update(targets) for trigger, targets in sorted(all_deps.items(), key=lambda x: x[0]): print(trigger) for target in sorted(targets): print(' %s' % target) mypy-0.560/mypy/server/subexpr.py0000644€tŠÔÚ€2›s®0000001051613215007205023240 0ustar jukkaDROPBOX\Domain Users00000000000000"""Find all subexpressions of an AST node.""" from typing import List from mypy.nodes import ( Expression, Node, MemberExpr, YieldFromExpr, YieldExpr, CallExpr, OpExpr, ComparisonExpr, SliceExpr, CastExpr, RevealTypeExpr, UnaryExpr, ListExpr, TupleExpr, DictExpr, SetExpr, IndexExpr, GeneratorExpr, ListComprehension, SetComprehension, DictionaryComprehension, ConditionalExpr, TypeApplication, LambdaExpr, StarExpr, BackquoteExpr, AwaitExpr, ) from mypy.traverser import TraverserVisitor def get_subexpressions(node: Node) -> List[Expression]: visitor = SubexpressionFinder() node.accept(visitor) return visitor.expressions class SubexpressionFinder(TraverserVisitor): def __init__(self) -> None: self.expressions = [] # type: List[Expression] def _visit_leaf(self, o: Expression) -> None: self.add(o) visit_int_expr = _visit_leaf visit_name_expr = _visit_leaf visit_float_expr = _visit_leaf visit_str_expr = _visit_leaf visit_bytes_expr = _visit_leaf visit_unicode_expr = _visit_leaf visit_complex_expr = _visit_leaf visit_ellipsis = _visit_leaf visit_super_expr = _visit_leaf visit_type_var_expr = _visit_leaf visit_type_alias_expr = _visit_leaf visit_namedtuple_expr = _visit_leaf visit_typeddict_expr = _visit_leaf visit__promote_expr = _visit_leaf visit_newtype_expr = _visit_leaf def visit_member_expr(self, e: MemberExpr) -> None: self.add(e) super().visit_member_expr(e) def visit_yield_from_expr(self, e: YieldFromExpr) -> None: self.add(e) super().visit_yield_from_expr(e) def visit_yield_expr(self, e: YieldExpr) -> None: self.add(e) super().visit_yield_expr(e) def visit_call_expr(self, e: CallExpr) -> None: self.add(e) super().visit_call_expr(e) def visit_op_expr(self, e: OpExpr) -> None: self.add(e) super().visit_op_expr(e) def visit_comparison_expr(self, e: ComparisonExpr) -> None: self.add(e) super().visit_comparison_expr(e) def visit_slice_expr(self, e: SliceExpr) -> None: self.add(e) super().visit_slice_expr(e) def visit_cast_expr(self, e: CastExpr) -> None: self.add(e) super().visit_cast_expr(e) def visit_reveal_type_expr(self, e: RevealTypeExpr) -> None: self.add(e) super().visit_reveal_type_expr(e) def visit_unary_expr(self, e: UnaryExpr) -> None: self.add(e) super().visit_unary_expr(e) def visit_list_expr(self, e: ListExpr) -> None: self.add(e) super().visit_list_expr(e) def visit_tuple_expr(self, e: TupleExpr) -> None: self.add(e) super().visit_tuple_expr(e) def visit_dict_expr(self, e: DictExpr) -> None: self.add(e) super().visit_dict_expr(e) def visit_set_expr(self, e: SetExpr) -> None: self.add(e) super().visit_set_expr(e) def visit_index_expr(self, e: IndexExpr) -> None: self.add(e) super().visit_index_expr(e) def visit_generator_expr(self, e: GeneratorExpr) -> None: self.add(e) super().visit_generator_expr(e) def visit_dictionary_comprehension(self, e: DictionaryComprehension) -> None: self.add(e) super().visit_dictionary_comprehension(e) def visit_list_comprehension(self, e: ListComprehension) -> None: self.add(e) super().visit_list_comprehension(e) def visit_set_comprehension(self, e: SetComprehension) -> None: self.add(e) super().visit_set_comprehension(e) def visit_conditional_expr(self, e: ConditionalExpr) -> None: self.add(e) super().visit_conditional_expr(e) def visit_type_application(self, e: TypeApplication) -> None: self.add(e) super().visit_type_application(e) def visit_lambda_expr(self, e: LambdaExpr) -> None: self.add(e) super().visit_lambda_expr(e) def visit_star_expr(self, e: StarExpr) -> None: self.add(e) super().visit_star_expr(e) def visit_backquote_expr(self, e: BackquoteExpr) -> None: self.add(e) super().visit_backquote_expr(e) def visit_await_expr(self, e: AwaitExpr) -> None: self.add(e) super().visit_await_expr(e) def add(self, e: Expression) -> None: self.expressions.append(e) mypy-0.560/mypy/server/target.py0000644€tŠÔÚ€2›s®0000000116013215007205023031 0ustar jukkaDROPBOX\Domain Users00000000000000from typing import Iterable, Tuple, List, Optional def module_prefix(modules: Iterable[str], target: str) -> Optional[str]: result = split_target(modules, target) if result is None: return None return result[0] def split_target(modules: Iterable[str], target: str) -> Optional[Tuple[str, str]]: remaining = [] # type: List[str] while True: if target in modules: return target, '.'.join(remaining) components = target.rsplit('.', 1) if len(components) == 1: return None target = components[0] remaining.insert(0, components[1]) mypy-0.560/mypy/server/trigger.py0000644€tŠÔÚ€2›s®0000000020613215007205023206 0ustar jukkaDROPBOX\Domain Users00000000000000"""AST triggers that are used for fine-grained dependency handling.""" def make_trigger(name: str) -> str: return '<%s>' % name mypy-0.560/mypy/server/update.py0000644€tŠÔÚ€2›s®0000010345313215007206023036 0ustar jukkaDROPBOX\Domain Users00000000000000"""Update build result by incrementally processing changed modules. Use fine-grained dependencies to update targets in other modules that may be affected by externally-visible changes in the changed modules. Terms: * A 'target' is a function definition or the top level of a module. We refer to targets using their fully qualified name (e.g. 'mod.Cls.attr'). Targets are the smallest units of processing during fine-grained incremental checking. * A 'trigger' represents the properties of a part of a program, and it gets triggered/activated when these properties change. For example, '' refers to a module-level function, and it gets triggered if the signature of the function changes, or if if the function is removed. Some program state is maintained across multiple build increments: * The full ASTs of all modules in memory all the time (+ type map). * Maintain a fine-grained dependency map, which is from triggers to targets/triggers. The latter determine what other parts of a program need to be processed again due to an externally visible change to a module. We perform a fine-grained incremental program update like this: * Determine which modules have changes in their source code since the previous build. * Fully process these modules, creating new ASTs and symbol tables for them. Retain the existing ASTs and symbol tables of modules that have no changes in their source code. * Determine which parts of the changed modules have changed. The result is a set of triggered triggers. * Using the dependency map, decide which other targets have become stale and need to be reprocessed. * Replace old ASTs of the modules that we reprocessed earlier with the new ones, but try to retain the identities of original externally visible AST nodes so that we don't (always) need to patch references in the rest of the program. * Semantically analyze and type check the stale targets. * Repeat the previous steps until nothing externally visible has changed. Major todo items: - Support multiple type checking passes """ import os.path from typing import Dict, List, Set, Tuple, Iterable, Union, Optional, Mapping, NamedTuple from mypy.build import ( BuildManager, State, BuildSource, Graph, load_graph, SavedCache, CacheMeta, cache_meta_from_dict, find_module_clear_caches ) from mypy.checker import DeferredNode from mypy.errors import Errors, CompileError from mypy.nodes import ( MypyFile, FuncDef, TypeInfo, Expression, SymbolNode, Var, FuncBase, ClassDef, Decorator, Import, ImportFrom, SymbolTable ) from mypy.options import Options from mypy.types import Type from mypy.server.astdiff import ( snapshot_symbol_table, compare_symbol_table_snapshots, is_identical_type, SnapshotItem ) from mypy.server.astmerge import merge_asts from mypy.server.aststrip import strip_target from mypy.server.deps import get_dependencies, get_dependencies_of_target from mypy.server.target import module_prefix, split_target from mypy.server.trigger import make_trigger # If True, print out debug logging output. DEBUG = False class FineGrainedBuildManager: def __init__(self, manager: BuildManager, graph: Graph) -> None: """Initialize fine-grained build based on a batch build. Args: manager: State of the build (mutated by this class) graph: Additional state of the build (only read to initialize state) """ self.manager = manager self.options = manager.options self.previous_modules = get_module_to_path_map(manager) self.deps = get_all_dependencies(manager, graph, self.options) self.previous_targets_with_errors = manager.errors.targets() # Module, if any, that had blocking errors in the last run as (id, path) tuple. # TODO: Handle blocking errors in the initial build self.blocking_error = None # type: Optional[Tuple[str, str]] # Module that we haven't processed yet but that are known to be stale. self.stale = [] # type: List[Tuple[str, str]] mark_all_meta_as_memory_only(graph, manager) manager.saved_cache = preserve_full_cache(graph, manager) self.type_maps = extract_type_maps(graph) def update(self, changed_modules: List[Tuple[str, str]]) -> List[str]: """Update previous build result by processing changed modules. Also propagate changes to other modules as needed, but only process those parts of other modules that are affected by the changes. Retain the existing ASTs and symbol tables of unaffected modules. Create new graph with new State objects, but reuse original BuildManager. Args: changed_modules: Modules changed since the previous update/build; each is a (module id, path) tuple. Includes modified, added and deleted modules. Assume this is correct; it's not validated here. Returns: A list of errors. """ assert changed_modules, 'No changed modules' # Reset global caches for the new build. find_module_clear_caches() changed_modules = dedupe_modules(changed_modules + self.stale) initial_set = {id for id, _ in changed_modules} if DEBUG: print('==== update %s ====' % ', '.join(repr(id) for id, _ in changed_modules)) if self.blocking_error: # Handle blocking errors first. We'll exit as soon as we find a # module that still has blocking errors. if DEBUG: print('existing blocker: %s' % self.blocking_error[0]) changed_modules = dedupe_modules([self.blocking_error] + changed_modules) self.blocking_error = None while changed_modules: next_id, next_path = changed_modules.pop(0) if next_id not in self.previous_modules and next_id not in initial_set: print('skip %r (module not in import graph)' % next_id) continue result = self.update_single(next_id, next_path) messages, remaining, (next_id, next_path), blocker = result changed_modules = [(id, path) for id, path in changed_modules if id != next_id] changed_modules = dedupe_modules(changed_modules + remaining) if blocker: self.blocking_error = (next_id, next_path) self.stale = changed_modules return messages return messages def update_single(self, module: str, path: str) -> Tuple[List[str], List[Tuple[str, str]], Tuple[str, str], bool]: """Update a single modified module. If the module contains imports of previously unseen modules, only process one of the new modules and return the remaining work to be done. Returns: Tuple with these items: - Error messages - Remaining modules to process as (module id, path) tuples - Module which was actually processed as (id, path) tuple - Whether there was a blocking error in the module """ if DEBUG: print('--- update single %r ---' % module) # TODO: If new module brings in other modules, we parse some files multiple times. manager = self.manager previous_modules = self.previous_modules # Record symbol table snaphot of old version the changed module. old_snapshots = {} # type: Dict[str, Dict[str, SnapshotItem]] if module in manager.modules: snapshot = snapshot_symbol_table(module, manager.modules[module].names) old_snapshots[module] = snapshot manager.errors.reset() result = update_single_isolated(module, path, manager, previous_modules) if isinstance(result, BlockedUpdate): # Blocking error -- just give up module, path, remaining = result self.previous_modules = get_module_to_path_map(manager) return manager.errors.messages(), remaining, (module, path), True assert isinstance(result, NormalUpdate) # Work around #4124 module, path, remaining, tree, graph = result # TODO: What to do with stale dependencies? triggered = calculate_active_triggers(manager, old_snapshots, {module: tree}) if DEBUG: print('triggered:', sorted(triggered)) update_dependencies({module: tree}, self.deps, graph, self.options) propagate_changes_using_dependencies(manager, graph, self.deps, triggered, {module}, self.previous_targets_with_errors, graph) # Preserve state needed for the next update. self.previous_targets_with_errors = manager.errors.targets() # If deleted, module won't be in the graph. if module in graph: # Generate metadata so that we can reuse the AST in the next run. graph[module].write_cache() for id, state in graph.items(): # Look up missing ASTs from saved cache. if state.tree is None and id in manager.saved_cache: meta, tree, type_map = manager.saved_cache[id] state.tree = tree mark_all_meta_as_memory_only(graph, manager) manager.saved_cache = preserve_full_cache(graph, manager) self.previous_modules = get_module_to_path_map(manager) self.type_maps = extract_type_maps(graph) return manager.errors.messages(), remaining, (module, path), False def mark_all_meta_as_memory_only(graph: Dict[str, State], manager: BuildManager) -> None: for id, state in graph.items(): if id in manager.saved_cache: # Don't look at disk. old = manager.saved_cache[id] manager.saved_cache[id] = (old[0]._replace(memory_only=True), old[1], old[2]) def get_all_dependencies(manager: BuildManager, graph: Dict[str, State], options: Options) -> Dict[str, Set[str]]: """Return the fine-grained dependency map for an entire build.""" deps = {} # type: Dict[str, Set[str]] update_dependencies(manager.modules, deps, graph, options) return deps # The result of update_single_isolated when no blockers, with these items: # # - Id of the changed module (can be different from the module argument) # - Path of the changed module # - New AST for the changed module (None if module was deleted) # - The entire updated build graph # - Remaining changed modules that are not processed yet as (module id, path) # tuples (non-empty if the original changed module imported other new # modules) NormalUpdate = NamedTuple('NormalUpdate', [('module', str), ('path', str), ('remaining', List[Tuple[str, str]]), ('tree', Optional[MypyFile]), ('graph', Graph)]) # The result of update_single_isolated when there is a blocking error. Items # are similar to NormalUpdate (but there are fewer). BlockedUpdate = NamedTuple('BlockedUpdate', [('module', str), ('path', str), ('remaining', List[Tuple[str, str]])]) UpdateResult = Union[NormalUpdate, BlockedUpdate] def update_single_isolated(module: str, path: str, manager: BuildManager, previous_modules: Dict[str, str]) -> UpdateResult: """Build a new version of one changed module only. Don't propagate changes to elsewhere in the program. Raise CompleError on encountering a blocking error. Args: module: Changed module (modified, created or deleted) path: Path of the changed module manager: Build manager graph: Build graph Returns a named tuple describing the result (see above for details). """ if module in manager.modules: assert_equivalent_paths(path, manager.modules[module].path) old_modules = dict(manager.modules) sources = get_sources(previous_modules, [(module, path)]) invalidate_stale_cache_entries(manager.saved_cache, [(module, path)]) manager.missing_modules = set() try: graph = load_graph(sources, manager) except CompileError as err: # Parse error somewhere in the program -- a blocker assert err.module_with_blocker if err.module_with_blocker != module: # Blocker is in a fresh module. Delete the state of the original target module # since it will be stale. # # TODO: It would be more efficient to store the original target module path = manager.modules[module].path del manager.modules[module] remaining_modules = [(module, path)] else: remaining_modules = [] return BlockedUpdate(err.module_with_blocker, path, remaining_modules) if not os.path.isfile(path): graph = delete_module(module, graph, manager) return NormalUpdate(module, path, [], None, graph) # Find any other modules brought in by imports. changed_modules = get_all_changed_modules(module, path, previous_modules, graph) # If there are multiple modules to process, only process the last one of them and return # the remaining ones to the caller. Often the last one is going to be imported by # one of the prior modules, making it more efficient to process it first. if len(changed_modules) > 1: module, path = changed_modules.pop() remaining_modules = changed_modules # The remaining modules haven't been processed yet so drop them. for id, _ in remaining_modules: del manager.modules[id] del graph[id] if DEBUG: print('--> %r (newly imported)' % module) else: remaining_modules = [] state = graph[module] # Process the changed file. state.parse_file() # TODO: state.fix_suppressed_dependencies()? try: state.semantic_analysis() except CompileError as err: # There was a blocking error, so module AST is incomplete. Restore old modules. manager.modules.clear() manager.modules.update(old_modules) del graph[module] return BlockedUpdate(module, path, remaining_modules) state.semantic_analysis_pass_three() state.semantic_analysis_apply_patches() # Merge old and new ASTs. assert state.tree is not None, "file must be at least parsed" new_modules = {module: state.tree} # type: Dict[str, Optional[MypyFile]] replace_modules_with_new_variants(manager, graph, old_modules, new_modules) # Perform type checking. state.type_check_first_pass() state.type_check_second_pass() state.finish_passes() # TODO: state.write_cache()? # TODO: state.mark_as_rechecked()? graph[module] = state return NormalUpdate(module, path, remaining_modules, state.tree, graph) def assert_equivalent_paths(path1: str, path2: str) -> None: path1 = os.path.normpath(path1) path2 = os.path.normpath(path2) assert path1 == path2, '%s != %s' % (path1, path2) def delete_module(module_id: str, graph: Dict[str, State], manager: BuildManager) -> Dict[str, State]: # TODO: Deletion of a package # TODO: Remove deps for the module (this only affects memory use, not correctness) assert module_id not in graph new_graph = graph.copy() del manager.modules[module_id] if module_id in manager.saved_cache: del manager.saved_cache[module_id] components = module_id.split('.') if len(components) > 1: parent = manager.modules['.'.join(components[:-1])] if components[-1] in parent.names: del parent.names[components[-1]] return new_graph def dedupe_modules(modules: List[Tuple[str, str]]) -> List[Tuple[str, str]]: seen = set() # type: Set[str] result = [] for id, path in modules: if id not in seen: seen.add(id) result.append((id, path)) return result def get_module_to_path_map(manager: BuildManager) -> Dict[str, str]: return {module: node.path for module, node in manager.modules.items()} def get_sources(modules: Dict[str, str], changed_modules: List[Tuple[str, str]]) -> List[BuildSource]: # TODO: Race condition when reading from the file system; we should only read each # bit of external state once during a build to have a consistent view of the world items = sorted(modules.items(), key=lambda x: x[0]) sources = [BuildSource(path, id, None) for id, path in items if os.path.isfile(path)] for id, path in changed_modules: if os.path.isfile(path) and id not in modules: sources.append(BuildSource(path, id, None)) return sources def get_all_changed_modules(root_module: str, root_path: str, old_modules: Dict[str, str], new_graph: Dict[str, State]) -> List[Tuple[str, str]]: changed_set = {root_module} changed_modules = [(root_module, root_path)] for st in new_graph.values(): if st.id not in old_modules and st.id not in changed_set: assert st.path changed_set.add(st.id) changed_modules.append((st.id, st.path)) return changed_modules def preserve_full_cache(graph: Graph, manager: BuildManager) -> SavedCache: """Preserve every module with an AST in the graph, including modules with errors.""" saved_cache = {} for id, state in graph.items(): assert state.id == id if state.tree is not None: meta = state.meta if meta is None: # No metadata, likely because of an error. We still want to retain the AST. # There is no corresponding JSON so create partial "memory-only" metadata. assert state.path dep_prios = state.dependency_priorities() meta = memory_only_cache_meta( id, state.path, state.dependencies, state.suppressed, list(state.child_modules), dep_prios, state.source_hash, state.ignore_all, manager) else: meta = meta._replace(memory_only=True) saved_cache[id] = (meta, state.tree, state.type_map()) return saved_cache def memory_only_cache_meta(id: str, path: str, dependencies: List[str], suppressed: List[str], child_modules: List[str], dep_prios: List[int], source_hash: str, ignore_all: bool, manager: BuildManager) -> CacheMeta: """Create cache metadata for module that doesn't have a JSON cache files. JSON cache files aren't written for modules with errors, but we want to still cache them in fine-grained incremental mode. """ options = manager.options.clone_for_module(id) # Note that we omit attributes related to the JSON files. meta = {'id': id, 'path': path, 'memory_only': True, # Important bit: don't expect JSON files to exist 'hash': source_hash, 'dependencies': dependencies, 'suppressed': suppressed, 'child_modules': child_modules, 'options': options.select_options_affecting_cache(), 'dep_prios': dep_prios, 'interface_hash': '', 'version_id': manager.version_id, 'ignore_all': ignore_all, } return cache_meta_from_dict(meta, '') def invalidate_stale_cache_entries(cache: SavedCache, changed_modules: List[Tuple[str, str]]) -> None: for name, _ in changed_modules: if name in cache: del cache[name] def verify_dependencies(state: State, manager: BuildManager) -> None: """Report errors for import targets in module that don't exist.""" for dep in state.dependencies + state.suppressed: # TODO: ancestors? if dep not in manager.modules: assert state.tree line = find_import_line(state.tree, dep) or 1 assert state.path manager.module_not_found(state.path, state.id, line, dep) def find_import_line(node: MypyFile, target: str) -> Optional[int]: for imp in node.imports: if isinstance(imp, Import): for name, _ in imp.ids: if name == target: return imp.line if isinstance(imp, ImportFrom): if imp.id == target: return imp.line # TODO: Relative imports for name, _ in imp.names: if '%s.%s' % (imp.id, name) == target: return imp.line # TODO: ImportAll return None def update_dependencies(new_modules: Mapping[str, Optional[MypyFile]], deps: Dict[str, Set[str]], graph: Dict[str, State], options: Options) -> None: for id, node in new_modules.items(): if node is None: continue if '/typeshed/' in node.path: # We don't track changes to typeshed -- the assumption is that they are only changed # as part of mypy updates, which will invalidate everything anyway. # # TODO: Not a reliable test, as we could have a package named typeshed. # TODO: Consider relaxing this -- maybe allow some typeshed changes to be tracked. continue module_deps = get_dependencies(target=node, type_map=graph[id].type_map(), python_version=options.python_version) for trigger, targets in module_deps.items(): deps.setdefault(trigger, set()).update(targets) def calculate_active_triggers(manager: BuildManager, old_snapshots: Dict[str, Dict[str, SnapshotItem]], new_modules: Dict[str, Optional[MypyFile]]) -> Set[str]: """Determine activated triggers by comparing old and new symbol tables. For example, if only the signature of function m.f is different in the new symbol table, return {''}. """ names = set() # type: Set[str] for id in new_modules: snapshot1 = old_snapshots.get(id) if snapshot1 is None: names.add(id) snapshot1 = {} new = new_modules[id] if new is None: snapshot2 = snapshot_symbol_table(id, SymbolTable()) names.add(id) else: snapshot2 = snapshot_symbol_table(id, new.names) names |= compare_symbol_table_snapshots(id, snapshot1, snapshot2) return {make_trigger(name) for name in names} def replace_modules_with_new_variants( manager: BuildManager, graph: Dict[str, State], old_modules: Dict[str, MypyFile], new_modules: Dict[str, Optional[MypyFile]]) -> None: """Replace modules with newly builds versions. Retain the identities of externally visible AST nodes in the old ASTs so that references to the affected modules from other modules will still be valid (unless something was deleted or replaced with an incompatible definition, in which case there will be dangling references that will be handled by propagate_changes_using_dependencies). """ for id in new_modules: new_module = new_modules[id] if id in old_modules and new_module is not None: merge_asts(old_modules[id], old_modules[id].names, new_module, new_module.names) manager.modules[id] = old_modules[id] def propagate_changes_using_dependencies( manager: BuildManager, graph: Dict[str, State], deps: Dict[str, Set[str]], triggered: Set[str], up_to_date_modules: Set[str], targets_with_errors: Set[str], modules: Iterable[str]) -> None: # TODO: Multiple type checking passes # TODO: Restrict the number of iterations to some maximum to avoid infinite loops # Propagate changes until nothing visible has changed during the last # iteration. while triggered or targets_with_errors: todo = find_targets_recursive(triggered, deps, manager.modules, up_to_date_modules) # Also process targets that used to have errors, as otherwise some # errors might be lost. for target in targets_with_errors: id = module_prefix(modules, target) if id is not None and id not in up_to_date_modules: if id not in todo: todo[id] = set() if DEBUG: print('process', target) todo[id].update(lookup_target(manager.modules, target)) triggered = set() # TODO: Preserve order (set is not optimal) for id, nodes in sorted(todo.items(), key=lambda x: x[0]): assert id not in up_to_date_modules triggered |= reprocess_nodes(manager, graph, id, nodes, deps) # Changes elsewhere may require us to reprocess modules that were # previously considered up to date. For example, there may be a # dependency loop that loops back to an originally processed module. up_to_date_modules = set() targets_with_errors = set() if DEBUG: print('triggered:', list(triggered)) def find_targets_recursive( triggers: Set[str], deps: Dict[str, Set[str]], modules: Dict[str, MypyFile], up_to_date_modules: Set[str]) -> Dict[str, Set[DeferredNode]]: """Find names of all targets that need to reprocessed, given some triggers. Returns: Dictionary from module id to a set of stale targets. """ result = {} # type: Dict[str, Set[DeferredNode]] worklist = triggers processed = set() # type: Set[str] # Find AST nodes corresponding to each target. # # TODO: Don't rely on a set, since the items are in an unpredictable order. while worklist: processed |= worklist current = worklist worklist = set() for target in current: if target.startswith('<'): worklist |= deps.get(target, set()) - processed else: module_id = module_prefix(modules, target) if module_id is None: # Deleted module. continue if module_id in up_to_date_modules: # Already processed. continue if module_id not in result: result[module_id] = set() if DEBUG: print('process', target) deferred = lookup_target(modules, target) result[module_id].update(deferred) return result def reprocess_nodes(manager: BuildManager, graph: Dict[str, State], module_id: str, nodeset: Set[DeferredNode], deps: Dict[str, Set[str]]) -> Set[str]: """Reprocess a set of nodes within a single module. Return fired triggers. """ if module_id not in manager.saved_cache or module_id not in graph: if DEBUG: print('%s not in saved cache or graph (blocking errors or deleted?)' % module_id) return set() file_node = manager.modules[module_id] def key(node: DeferredNode) -> str: fullname = node.node.fullname() if isinstance(node.node, FuncDef) and fullname is None: assert node.node.info is not None fullname = '%s.%s' % (node.node.info.fullname(), node.node.name()) return fullname # Some nodes by full name so that the order of processing is deterministic. nodes = sorted(nodeset, key=key) # Strip semantic analysis information. for deferred in nodes: strip_target(deferred.node) semantic_analyzer = manager.semantic_analyzer # Second pass of semantic analysis. We don't redo the first pass, because it only # does local things that won't go stale. for deferred in nodes: with semantic_analyzer.file_context( file_node=file_node, fnam=file_node.path, options=manager.options, active_type=deferred.active_typeinfo): manager.semantic_analyzer.refresh_partial(deferred.node) # Third pass of semantic analysis. for deferred in nodes: with semantic_analyzer.file_context( file_node=file_node, fnam=file_node.path, options=manager.options, active_type=deferred.active_typeinfo): manager.semantic_analyzer_pass3.refresh_partial(deferred.node) # Keep track of potentially affected attribute types before type checking. old_types_map = get_enclosing_namespace_types(nodes) # Type check. meta, file_node, type_map = manager.saved_cache[module_id] graph[module_id].tree = file_node graph[module_id].type_checker().type_map = type_map graph[module_id].type_checker().check_second_pass(nodes) # TODO: check return value # Check if any attribute types were changed and need to be propagated further. new_triggered = get_triggered_namespace_items(old_types_map) # Dependencies may have changed. update_deps(module_id, nodes, graph, deps, manager.options) # Report missing imports. verify_dependencies(graph[module_id], manager) return new_triggered NamespaceNode = Union[TypeInfo, MypyFile] def get_enclosing_namespace_types(nodes: List[DeferredNode]) -> Dict[NamespaceNode, Dict[str, Type]]: types = {} # type: Dict[NamespaceNode, Dict[str, Type]] for deferred in nodes: info = deferred.active_typeinfo if info: target = info # type: Optional[NamespaceNode] elif isinstance(deferred.node, MypyFile): target = deferred.node else: target = None if target and target not in types: local_types = {name: node.node.type for name, node in target.names.items() if isinstance(node.node, Var) and node.node.type} types[target] = local_types return types def get_triggered_namespace_items(old_types_map: Dict[NamespaceNode, Dict[str, Type]]) -> Set[str]: new_triggered = set() for namespace_node, old_types in old_types_map.items(): for name, node in namespace_node.names.items(): if (name in old_types and (not isinstance(node.node, Var) or node.node.type and not is_identical_type(node.node.type, old_types[name]))): # Type checking a method changed an attribute type. new_triggered.add(make_trigger('{}.{}'.format(namespace_node.fullname(), name))) return new_triggered def update_deps(module_id: str, nodes: List[DeferredNode], graph: Dict[str, State], deps: Dict[str, Set[str]], options: Options) -> None: for deferred in nodes: node = deferred.node type_map = graph[module_id].type_map() new_deps = get_dependencies_of_target(module_id, node, type_map, options.python_version) for trigger, targets in new_deps.items(): deps.setdefault(trigger, set()).update(targets) def lookup_target(modules: Dict[str, MypyFile], target: str) -> List[DeferredNode]: """Look up a target by fully-qualified name.""" items = split_target(modules, target) if items is None: # Deleted target return [] module, rest = items if rest: components = rest.split('.') else: components = [] node = modules[module] # type: Optional[SymbolNode] file = None # type: Optional[MypyFile] active_class = None active_class_name = None for c in components: if isinstance(node, TypeInfo): active_class = node active_class_name = node.name() # TODO: Is it possible for the assertion to fail? if isinstance(node, MypyFile): file = node assert isinstance(node, (MypyFile, TypeInfo)) if c not in node.names: # Deleted target return [] node = node.names[c].node if isinstance(node, TypeInfo): # A ClassDef target covers the body of the class and everything defined # within it. To get the body we include the entire surrounding target, # typically a module top-level, since we don't support processing class # bodies as separate entitites for simplicity. assert file is not None result = [DeferredNode(file, None, None)] for name, symnode in node.names.items(): node = symnode.node if isinstance(node, FuncDef): result.extend(lookup_target(modules, target + '.' + name)) return result if isinstance(node, Decorator): # Decorator targets actually refer to the function definition only. node = node.func assert isinstance(node, (FuncDef, MypyFile)), 'unexpected type: %s' % type(node) return [DeferredNode(node, active_class_name, active_class)] def extract_type_maps(graph: Graph) -> Dict[str, Dict[Expression, Type]]: return {id: state.type_map() for id, state in graph.items()} mypy-0.560/mypy/sharedparse.py0000644€tŠÔÚ€2›s®0000000340013215007205022535 0ustar jukkaDROPBOX\Domain Users00000000000000from typing import Optional """Shared logic between our three mypy parser files.""" MAGIC_METHODS = { "__abs__", "__add__", "__and__", "__call__", "__cmp__", "__complex__", "__contains__", "__del__", "__delattr__", "__delitem__", "__divmod__", "__div__", "__divmod__", "__enter__", "__exit__", "__eq__", "__floordiv__", "__float__", "__ge__", "__getattr__", "__getattribute__", "__getitem__", "__gt__", "__hex__", "__iadd__", "__iand__", "__idiv__", "__ifloordiv__", "__ilshift__", "__imod__", "__imul__", "__init__", "__init_subclass__", "__int__", "__invert__", "__ior__", "__ipow__", "__irshift__", "__isub__", "__iter__", "__ixor__", "__le__", "__len__", "__long__", "__lshift__", "__lt__", "__mod__", "__mul__", "__ne__", "__neg__", "__new__", "__nonzero__", "__oct__", "__or__", "__pos__", "__pow__", "__radd__", "__rand__", "__rdiv__", "__repr__", "__reversed__", "__rfloordiv__", "__rlshift__", "__rmod__", "__rmul__", "__ror__", "__rpow__", "__rrshift__", "__rshift__", "__rsub__", "__rxor__", "__setattr__", "__setitem__", "__str__", "__sub__", "__unicode__", "__xor__", } MAGIC_METHODS_ALLOWING_KWARGS = { "__init__", "__init_subclass__", "__new__", "__call__", } MAGIC_METHODS_POS_ARGS_ONLY = MAGIC_METHODS - MAGIC_METHODS_ALLOWING_KWARGS def special_function_elide_names(name: str) -> bool: return name in MAGIC_METHODS_POS_ARGS_ONLY def argument_elide_name(name: Optional[str]) -> bool: return name is not None and name.startswith("__") mypy-0.560/mypy/solve.py0000644€tŠÔÚ€2›s®0000000540413215007205021372 0ustar jukkaDROPBOX\Domain Users00000000000000"""Type inference constraint solving""" from typing import List, Dict, Optional from collections import defaultdict from mypy.types import Type, NoneTyp, AnyType, UninhabitedType, TypeVarId, TypeOfAny from mypy.constraints import Constraint, SUPERTYPE_OF from mypy.join import join_types from mypy.meet import meet_types from mypy.subtypes import is_subtype from mypy import experiments def solve_constraints(vars: List[TypeVarId], constraints: List[Constraint], strict: bool =True) -> List[Optional[Type]]: """Solve type constraints. Return the best type(s) for type variables; each type can be None if the value of the variable could not be solved. If a variable has no constraints, if strict=True then arbitrarily pick NoneTyp as the value of the type variable. If strict=False, pick AnyType. """ # Collect a list of constraints for each type variable. cmap = defaultdict(list) # type: Dict[TypeVarId, List[Constraint]] for con in constraints: cmap[con.type_var].append(con) res = [] # type: List[Optional[Type]] # Solve each type variable separately. for tvar in vars: bottom = None # type: Optional[Type] top = None # type: Optional[Type] candidate = None # type: Optional[Type] # Process each constraint separately, and calculate the lower and upper # bounds based on constraints. Note that we assume that the constraint # targets do not have constraint references. for c in cmap.get(tvar, []): if c.op == SUPERTYPE_OF: if bottom is None: bottom = c.target else: bottom = join_types(bottom, c.target) else: if top is None: top = c.target else: top = meet_types(top, c.target) if isinstance(top, AnyType) or isinstance(bottom, AnyType): source_any = top if isinstance(top, AnyType) else bottom assert isinstance(source_any, AnyType) res.append(AnyType(TypeOfAny.from_another_any, source_any=source_any)) continue elif bottom is None: if top: candidate = top else: # No constraints for type variable -- 'UninhabitedType' is the most specific type. if strict: candidate = UninhabitedType() candidate.ambiguous = True else: candidate = AnyType(TypeOfAny.special_form) elif top is None: candidate = bottom elif is_subtype(bottom, top): candidate = bottom else: candidate = None res.append(candidate) return res mypy-0.560/mypy/stats.py0000644€tŠÔÚ€2›s®0000002533213215007205021402 0ustar jukkaDROPBOX\Domain Users00000000000000"""Utilities for calculating and reporting statistics about types.""" import cgi import os.path import typing from collections import Counter from typing import Dict, List, cast, Tuple, Optional from mypy.traverser import TraverserVisitor from mypy.typeanal import collect_all_inner_types from mypy.types import ( Type, AnyType, Instance, FunctionLike, TupleType, TypeVarType, TypeQuery, CallableType, TypeOfAny ) from mypy import nodes from mypy.nodes import ( Expression, FuncDef, TypeApplication, AssignmentStmt, NameExpr, CallExpr, MypyFile, MemberExpr, OpExpr, ComparisonExpr, IndexExpr, UnaryExpr, YieldFromExpr, RefExpr, ClassDef ) TYPE_EMPTY = 0 TYPE_UNANALYZED = 1 # type of non-typechecked code TYPE_PRECISE = 2 TYPE_IMPRECISE = 3 TYPE_ANY = 4 precision_names = [ 'empty', 'unanalyzed', 'precise', 'imprecise', 'any', ] class StatisticsVisitor(TraverserVisitor): def __init__(self, inferred: bool, filename: str, typemap: Optional[Dict[Expression, Type]] = None, all_nodes: bool = False, visit_untyped_defs: bool = True) -> None: self.inferred = inferred self.filename = filename self.typemap = typemap self.all_nodes = all_nodes self.visit_untyped_defs = visit_untyped_defs self.num_precise_exprs = 0 self.num_imprecise_exprs = 0 self.num_any_exprs = 0 self.num_simple_types = 0 self.num_generic_types = 0 self.num_tuple_types = 0 self.num_function_types = 0 self.num_typevar_types = 0 self.num_complex_types = 0 self.num_any_types = 0 self.line = -1 self.line_map = {} # type: Dict[int, int] self.type_of_any_counter = Counter() # type: typing.Counter[TypeOfAny] self.any_line_map = {} # type: Dict[int, List[AnyType]] self.output = [] # type: List[str] TraverserVisitor.__init__(self) def visit_func_def(self, o: FuncDef) -> None: self.line = o.line if len(o.expanded) > 1 and o.expanded != [o] * len(o.expanded): if o in o.expanded: print('{}:{}: ERROR: cycle in function expansion; skipping'.format(self.filename, o.get_line())) return for defn in o.expanded: self.visit_func_def(cast(FuncDef, defn)) else: if o.type: sig = cast(CallableType, o.type) arg_types = sig.arg_types if (sig.arg_names and sig.arg_names[0] == 'self' and not self.inferred): arg_types = arg_types[1:] for arg in arg_types: self.type(arg) self.type(sig.ret_type) elif self.all_nodes: self.record_line(self.line, TYPE_ANY) if not o.is_dynamic() or self.visit_untyped_defs: super().visit_func_def(o) def visit_class_def(self, o: ClassDef) -> None: # Override this method because we don't want to analyze base_type_exprs (base_type_exprs # are base classes in a class declaration). # While base_type_exprs are technically expressions, type analyzer does not visit them and # they are not in the typemap. for d in o.decorators: d.accept(self) o.defs.accept(self) def visit_type_application(self, o: TypeApplication) -> None: self.line = o.line for t in o.types: self.type(t) super().visit_type_application(o) def visit_assignment_stmt(self, o: AssignmentStmt) -> None: self.line = o.line if (isinstance(o.rvalue, nodes.CallExpr) and isinstance(o.rvalue.analyzed, nodes.TypeVarExpr)): # Type variable definition -- not a real assignment. return if o.type: self.type(o.type) elif self.inferred and not self.all_nodes: # if self.all_nodes is set, lvalues will be visited later for lvalue in o.lvalues: if isinstance(lvalue, nodes.TupleExpr): items = lvalue.items elif isinstance(lvalue, nodes.ListExpr): items = lvalue.items else: items = [lvalue] for item in items: if isinstance(item, RefExpr) and item.is_inferred_def: if self.typemap is not None: self.type(self.typemap.get(item)) super().visit_assignment_stmt(o) def visit_name_expr(self, o: NameExpr) -> None: self.process_node(o) super().visit_name_expr(o) def visit_yield_from_expr(self, o: YieldFromExpr) -> None: if o.expr: o.expr.accept(self) def visit_call_expr(self, o: CallExpr) -> None: self.process_node(o) if o.analyzed: o.analyzed.accept(self) else: o.callee.accept(self) for a in o.args: a.accept(self) def visit_member_expr(self, o: MemberExpr) -> None: self.process_node(o) super().visit_member_expr(o) def visit_op_expr(self, o: OpExpr) -> None: self.process_node(o) super().visit_op_expr(o) def visit_comparison_expr(self, o: ComparisonExpr) -> None: self.process_node(o) super().visit_comparison_expr(o) def visit_index_expr(self, o: IndexExpr) -> None: self.process_node(o) super().visit_index_expr(o) def visit_unary_expr(self, o: UnaryExpr) -> None: self.process_node(o) super().visit_unary_expr(o) def process_node(self, node: Expression) -> None: if self.all_nodes: if self.typemap is not None: self.line = node.line self.type(self.typemap.get(node)) def type(self, t: Optional[Type]) -> None: if not t: # If an expression does not have a type, it is often due to dead code. # Don't count these because there can be an unanalyzed value on a line with other # analyzed expressions, which overwrite the TYPE_UNANALYZED. self.record_line(self.line, TYPE_UNANALYZED) return if isinstance(t, AnyType) and t.type_of_any == TypeOfAny.special_form: # This is not a real Any type, so don't collect stats for it. return if isinstance(t, AnyType): self.log(' !! Any type around line %d' % self.line) self.num_any_exprs += 1 self.record_line(self.line, TYPE_ANY) elif ((not self.all_nodes and is_imprecise(t)) or (self.all_nodes and is_imprecise2(t))): self.log(' !! Imprecise type around line %d' % self.line) self.num_imprecise_exprs += 1 self.record_line(self.line, TYPE_IMPRECISE) else: self.num_precise_exprs += 1 self.record_line(self.line, TYPE_PRECISE) for typ in collect_all_inner_types(t) + [t]: if isinstance(typ, AnyType): if typ.type_of_any == TypeOfAny.from_another_any: assert typ.source_any assert typ.source_any.type_of_any != TypeOfAny.from_another_any typ = typ.source_any self.type_of_any_counter[typ.type_of_any] += 1 self.num_any_types += 1 if self.line in self.any_line_map: self.any_line_map[self.line].append(typ) else: self.any_line_map[self.line] = [typ] elif isinstance(typ, Instance): if typ.args: if any(is_complex(arg) for arg in typ.args): self.num_complex_types += 1 else: self.num_generic_types += 1 else: self.num_simple_types += 1 elif isinstance(typ, FunctionLike): self.num_function_types += 1 elif isinstance(typ, TupleType): if any(is_complex(item) for item in typ.items): self.num_complex_types += 1 else: self.num_tuple_types += 1 elif isinstance(typ, TypeVarType): self.num_typevar_types += 1 def log(self, string: str) -> None: self.output.append(string) def record_line(self, line: int, precision: int) -> None: self.line_map[line] = max(precision, self.line_map.get(line, TYPE_EMPTY)) def dump_type_stats(tree: MypyFile, path: str, inferred: bool = False, typemap: Optional[Dict[Expression, Type]] = None) -> None: if is_special_module(path): return print(path) visitor = StatisticsVisitor(inferred, filename=tree.fullname(), typemap=typemap) tree.accept(visitor) for line in visitor.output: print(line) print(' ** precision **') print(' precise ', visitor.num_precise_exprs) print(' imprecise', visitor.num_imprecise_exprs) print(' any ', visitor.num_any_exprs) print(' ** kinds **') print(' simple ', visitor.num_simple_types) print(' generic ', visitor.num_generic_types) print(' function ', visitor.num_function_types) print(' tuple ', visitor.num_tuple_types) print(' TypeVar ', visitor.num_typevar_types) print(' complex ', visitor.num_complex_types) print(' any ', visitor.num_any_types) def is_special_module(path: str) -> bool: return os.path.basename(path) in ('abc.pyi', 'typing.pyi', 'builtins.pyi') def is_imprecise(t: Type) -> bool: return t.accept(HasAnyQuery()) class HasAnyQuery(TypeQuery[bool]): def __init__(self) -> None: super().__init__(any) def visit_any(self, t: AnyType) -> bool: return True def visit_instance(self, t: Instance) -> bool: if t.type.fullname() == 'builtins.tuple': return True else: return super().visit_instance(t) def is_imprecise2(t: Type) -> bool: return t.accept(HasAnyQuery2()) class HasAnyQuery2(HasAnyQuery): def visit_callable_type(self, t: CallableType) -> bool: # We don't want to flag references to functions with some Any # argument types (etc.) since they generally don't mean trouble. return False def is_generic(t: Type) -> bool: return isinstance(t, Instance) and bool(t.args) def is_complex(t: Type) -> bool: return is_generic(t) or isinstance(t, (FunctionLike, TupleType, TypeVarType)) def ensure_dir_exists(dir: str) -> None: if not os.path.exists(dir): os.makedirs(dir) mypy-0.560/mypy/strconv.py0000644€tŠÔÚ€2›s®0000005003113215007205021734 0ustar jukkaDROPBOX\Domain Users00000000000000"""Conversion of parse tree nodes to strings.""" import re import os from typing import Any, List, Tuple, Optional, Union, Sequence, Dict from mypy.util import short_type, IdMapper import mypy.nodes from mypy.visitor import NodeVisitor class StrConv(NodeVisitor[str]): """Visitor for converting a node to a human-readable string. For example, an MypyFile node from program '1' is converted into something like this: MypyFile:1( fnam ExpressionStmt:1( IntExpr(1))) """ def __init__(self, show_ids: bool = False) -> None: self.show_ids = show_ids self.id_mapper = None # type: Optional[IdMapper] if show_ids: self.id_mapper = IdMapper() def get_id(self, o: object) -> Optional[int]: if self.id_mapper: return self.id_mapper.id(o) return None def format_id(self, o: object) -> str: if self.id_mapper: return '<{}>'.format(self.get_id(o)) else: return '' def dump(self, nodes: Sequence[object], obj: 'mypy.nodes.Context') -> str: """Convert a list of items to a multiline pretty-printed string. The tag is produced from the type name of obj and its line number. See mypy.util.dump_tagged for a description of the nodes argument. """ tag = short_type(obj) + ':' + str(obj.get_line()) if self.show_ids: assert self.id_mapper is not None tag += '<{}>'.format(self.get_id(obj)) return dump_tagged(nodes, tag, self) def func_helper(self, o: 'mypy.nodes.FuncItem') -> List[object]: """Return a list in a format suitable for dump() that represents the arguments and the body of a function. The caller can then decorate the array with information specific to methods, global functions or anonymous functions. """ args = [] # type: List[Union[mypy.nodes.Var, Tuple[str, List[mypy.nodes.Node]]]] extra = [] # type: List[Tuple[str, List[mypy.nodes.Var]]] for arg in o.arguments: kind = arg.kind # type: int if kind in (mypy.nodes.ARG_POS, mypy.nodes.ARG_NAMED): args.append(arg.variable) elif kind in (mypy.nodes.ARG_OPT, mypy.nodes.ARG_NAMED_OPT): assert arg.initializer is not None args.append(('default', [arg.variable, arg.initializer])) elif kind == mypy.nodes.ARG_STAR: extra.append(('VarArg', [arg.variable])) elif kind == mypy.nodes.ARG_STAR2: extra.append(('DictVarArg', [arg.variable])) a = [] # type: List[Any] if args: a.append(('Args', args)) if o.type: a.append(o.type) if o.is_generator: a.append('Generator') a.extend(extra) a.append(o.body) return a # Top-level structures def visit_mypy_file(self, o: 'mypy.nodes.MypyFile') -> str: # Skip implicit definitions. a = [o.defs] # type: List[Any] if o.is_bom: a.insert(0, 'BOM') # Omit path to special file with name "main". This is used to simplify # test case descriptions; the file "main" is used by default in many # test cases. if o.path is not None and o.path != 'main': # Insert path. Normalize directory separators to / to unify test # case# output in all platforms. a.insert(0, o.path.replace(os.sep, '/')) if o.ignored_lines: a.append('IgnoredLines(%s)' % ', '.join(str(line) for line in sorted(o.ignored_lines))) return self.dump(a, o) def visit_import(self, o: 'mypy.nodes.Import') -> str: a = [] for id, as_id in o.ids: if as_id is not None: a.append('{} : {}'.format(id, as_id)) else: a.append(id) return 'Import:{}({})'.format(o.line, ', '.join(a)) def visit_import_from(self, o: 'mypy.nodes.ImportFrom') -> str: a = [] for name, as_name in o.names: if as_name is not None: a.append('{} : {}'.format(name, as_name)) else: a.append(name) return 'ImportFrom:{}({}, [{}])'.format(o.line, "." * o.relative + o.id, ', '.join(a)) def visit_import_all(self, o: 'mypy.nodes.ImportAll') -> str: return 'ImportAll:{}({})'.format(o.line, "." * o.relative + o.id) # Definitions def visit_func_def(self, o: 'mypy.nodes.FuncDef') -> str: a = self.func_helper(o) a.insert(0, o.name()) arg_kinds = {arg.kind for arg in o.arguments} if len(arg_kinds & {mypy.nodes.ARG_NAMED, mypy.nodes.ARG_NAMED_OPT}) > 0: a.insert(1, 'MaxPos({})'.format(o.max_pos)) if o.is_abstract: a.insert(-1, 'Abstract') if o.is_static: a.insert(-1, 'Static') if o.is_class: a.insert(-1, 'Class') if o.is_property: a.insert(-1, 'Property') return self.dump(a, o) def visit_overloaded_func_def(self, o: 'mypy.nodes.OverloadedFuncDef') -> str: a = o.items[:] # type: Any if o.type: a.insert(0, o.type) if o.impl: a.insert(0, o.impl) return self.dump(a, o) def visit_class_def(self, o: 'mypy.nodes.ClassDef') -> str: a = [o.name, o.defs.body] # Display base types unless they are implicitly just builtins.object # (in this case base_type_exprs is empty). if o.base_type_exprs: if o.info and o.info.bases: a.insert(1, ('BaseType', o.info.bases)) else: a.insert(1, ('BaseTypeExpr', o.base_type_exprs)) if o.type_vars: a.insert(1, ('TypeVars', o.type_vars)) if o.metaclass: a.insert(1, 'Metaclass({})'.format(o.metaclass)) if o.decorators: a.insert(1, ('Decorators', o.decorators)) if o.info and o.info._promote: a.insert(1, 'Promote({})'.format(o.info._promote)) if o.info and o.info.tuple_type: a.insert(1, ('TupleType', [o.info.tuple_type])) if o.info and o.info.fallback_to_any: a.insert(1, 'FallbackToAny') return self.dump(a, o) def visit_var(self, o: 'mypy.nodes.Var') -> str: lst = '' # Add :nil line number tag if no line number is specified to remain # compatible with old test case descriptions that assume this. if o.line < 0: lst = ':nil' return 'Var' + lst + '(' + o.name() + ')' def visit_global_decl(self, o: 'mypy.nodes.GlobalDecl') -> str: return self.dump([o.names], o) def visit_nonlocal_decl(self, o: 'mypy.nodes.NonlocalDecl') -> str: return self.dump([o.names], o) def visit_decorator(self, o: 'mypy.nodes.Decorator') -> str: return self.dump([o.var, o.decorators, o.func], o) # Statements def visit_block(self, o: 'mypy.nodes.Block') -> str: return self.dump(o.body, o) def visit_expression_stmt(self, o: 'mypy.nodes.ExpressionStmt') -> str: return self.dump([o.expr], o) def visit_assignment_stmt(self, o: 'mypy.nodes.AssignmentStmt') -> str: a = [] # type: List[Any] if len(o.lvalues) > 1: a = [('Lvalues', o.lvalues)] else: a = [o.lvalues[0]] a.append(o.rvalue) if o.type: a.append(o.type) return self.dump(a, o) def visit_operator_assignment_stmt(self, o: 'mypy.nodes.OperatorAssignmentStmt') -> str: return self.dump([o.op, o.lvalue, o.rvalue], o) def visit_while_stmt(self, o: 'mypy.nodes.WhileStmt') -> str: a = [o.expr, o.body] # type: List[Any] if o.else_body: a.append(('Else', o.else_body.body)) return self.dump(a, o) def visit_for_stmt(self, o: 'mypy.nodes.ForStmt') -> str: a = [] # type: List[Any] if o.is_async: a.append(('Async', '')) a.append(o.index) if o.index_type: a.append(o.index_type) a.extend([o.expr, o.body]) if o.else_body: a.append(('Else', o.else_body.body)) return self.dump(a, o) def visit_return_stmt(self, o: 'mypy.nodes.ReturnStmt') -> str: return self.dump([o.expr], o) def visit_if_stmt(self, o: 'mypy.nodes.IfStmt') -> str: a = [] # type: List[Any] for i in range(len(o.expr)): a.append(('If', [o.expr[i]])) a.append(('Then', o.body[i].body)) if not o.else_body: return self.dump(a, o) else: return self.dump([a, ('Else', o.else_body.body)], o) def visit_break_stmt(self, o: 'mypy.nodes.BreakStmt') -> str: return self.dump([], o) def visit_continue_stmt(self, o: 'mypy.nodes.ContinueStmt') -> str: return self.dump([], o) def visit_pass_stmt(self, o: 'mypy.nodes.PassStmt') -> str: return self.dump([], o) def visit_raise_stmt(self, o: 'mypy.nodes.RaiseStmt') -> str: return self.dump([o.expr, o.from_expr], o) def visit_assert_stmt(self, o: 'mypy.nodes.AssertStmt') -> str: if o.msg is not None: return self.dump([o.expr, o.msg], o) else: return self.dump([o.expr], o) def visit_await_expr(self, o: 'mypy.nodes.AwaitExpr') -> str: return self.dump([o.expr], o) def visit_del_stmt(self, o: 'mypy.nodes.DelStmt') -> str: return self.dump([o.expr], o) def visit_try_stmt(self, o: 'mypy.nodes.TryStmt') -> str: a = [o.body] # type: List[Any] for i in range(len(o.vars)): a.append(o.types[i]) if o.vars[i]: a.append(o.vars[i]) a.append(o.handlers[i]) if o.else_body: a.append(('Else', o.else_body.body)) if o.finally_body: a.append(('Finally', o.finally_body.body)) return self.dump(a, o) def visit_with_stmt(self, o: 'mypy.nodes.WithStmt') -> str: a = [] # type: List[Any] if o.is_async: a.append(('Async', '')) for i in range(len(o.expr)): a.append(('Expr', [o.expr[i]])) if o.target[i]: a.append(('Target', [o.target[i]])) if o.target_type: a.append(o.target_type) return self.dump(a + [o.body], o) def visit_print_stmt(self, o: 'mypy.nodes.PrintStmt') -> str: a = o.args[:] # type: List[Any] if o.target: a.append(('Target', [o.target])) if o.newline: a.append('Newline') return self.dump(a, o) def visit_exec_stmt(self, o: 'mypy.nodes.ExecStmt') -> str: return self.dump([o.expr, o.globals, o.locals], o) # Expressions # Simple expressions def visit_int_expr(self, o: 'mypy.nodes.IntExpr') -> str: return 'IntExpr({})'.format(o.value) def visit_str_expr(self, o: 'mypy.nodes.StrExpr') -> str: return 'StrExpr({})'.format(self.str_repr(o.value)) def visit_bytes_expr(self, o: 'mypy.nodes.BytesExpr') -> str: return 'BytesExpr({})'.format(self.str_repr(o.value)) def visit_unicode_expr(self, o: 'mypy.nodes.UnicodeExpr') -> str: return 'UnicodeExpr({})'.format(self.str_repr(o.value)) def str_repr(self, s: str) -> str: s = re.sub(r'\\u[0-9a-fA-F]{4}', lambda m: '\\' + m.group(0), s) return re.sub('[^\\x20-\\x7e]', lambda m: r'\u%.4x' % ord(m.group(0)), s) def visit_float_expr(self, o: 'mypy.nodes.FloatExpr') -> str: return 'FloatExpr({})'.format(o.value) def visit_complex_expr(self, o: 'mypy.nodes.ComplexExpr') -> str: return 'ComplexExpr({})'.format(o.value) def visit_ellipsis(self, o: 'mypy.nodes.EllipsisExpr') -> str: return 'Ellipsis' def visit_star_expr(self, o: 'mypy.nodes.StarExpr') -> str: return self.dump([o.expr], o) def visit_name_expr(self, o: 'mypy.nodes.NameExpr') -> str: pretty = self.pretty_name(o.name, o.kind, o.fullname, o.is_inferred_def, o.node) return short_type(o) + '(' + pretty + ')' def pretty_name(self, name: str, kind: Optional[int], fullname: Optional[str], is_inferred_def: bool, target_node: 'Optional[mypy.nodes.Node]' = None) -> str: n = name if is_inferred_def: n += '*' if target_node: id = self.format_id(target_node) else: id = '' if kind == mypy.nodes.GDEF or (fullname != name and fullname is not None): # Append fully qualified name for global references. n += ' [{}{}]'.format(fullname, id) elif kind == mypy.nodes.LDEF: # Add tag to signify a local reference. n += ' [l{}]'.format(id) elif kind == mypy.nodes.MDEF: # Add tag to signify a member reference. n += ' [m{}]'.format(id) else: n += id return n def visit_member_expr(self, o: 'mypy.nodes.MemberExpr') -> str: pretty = self.pretty_name(o.name, o.kind, o.fullname, o.is_inferred_def, o.node) return self.dump([o.expr, pretty], o) def visit_yield_expr(self, o: 'mypy.nodes.YieldExpr') -> str: return self.dump([o.expr], o) def visit_yield_from_expr(self, o: 'mypy.nodes.YieldFromExpr') -> str: if o.expr: return self.dump([o.expr.accept(self)], o) else: return self.dump([], o) def visit_call_expr(self, o: 'mypy.nodes.CallExpr') -> str: if o.analyzed: return o.analyzed.accept(self) args = [] # type: List[mypy.nodes.Expression] extra = [] # type: List[Union[str, Tuple[str, List[Any]]]] for i, kind in enumerate(o.arg_kinds): if kind in [mypy.nodes.ARG_POS, mypy.nodes.ARG_STAR]: args.append(o.args[i]) if kind == mypy.nodes.ARG_STAR: extra.append('VarArg') elif kind == mypy.nodes.ARG_NAMED: extra.append(('KwArgs', [o.arg_names[i], o.args[i]])) elif kind == mypy.nodes.ARG_STAR2: extra.append(('DictVarArg', [o.args[i]])) else: raise RuntimeError('unknown kind %d' % kind) a = [o.callee, ('Args', args)] # type: List[Any] return self.dump(a + extra, o) def visit_op_expr(self, o: 'mypy.nodes.OpExpr') -> str: return self.dump([o.op, o.left, o.right], o) def visit_comparison_expr(self, o: 'mypy.nodes.ComparisonExpr') -> str: return self.dump([o.operators, o.operands], o) def visit_cast_expr(self, o: 'mypy.nodes.CastExpr') -> str: return self.dump([o.expr, o.type], o) def visit_reveal_type_expr(self, o: 'mypy.nodes.RevealTypeExpr') -> str: return self.dump([o.expr], o) def visit_unary_expr(self, o: 'mypy.nodes.UnaryExpr') -> str: return self.dump([o.op, o.expr], o) def visit_list_expr(self, o: 'mypy.nodes.ListExpr') -> str: return self.dump(o.items, o) def visit_dict_expr(self, o: 'mypy.nodes.DictExpr') -> str: return self.dump([[k, v] for k, v in o.items], o) def visit_set_expr(self, o: 'mypy.nodes.SetExpr') -> str: return self.dump(o.items, o) def visit_tuple_expr(self, o: 'mypy.nodes.TupleExpr') -> str: return self.dump(o.items, o) def visit_index_expr(self, o: 'mypy.nodes.IndexExpr') -> str: if o.analyzed: return o.analyzed.accept(self) return self.dump([o.base, o.index], o) def visit_super_expr(self, o: 'mypy.nodes.SuperExpr') -> str: return self.dump([o.name], o) def visit_type_application(self, o: 'mypy.nodes.TypeApplication') -> str: return self.dump([o.expr, ('Types', o.types)], o) def visit_type_var_expr(self, o: 'mypy.nodes.TypeVarExpr') -> str: import mypy.types a = [] # type: List[Any] if o.variance == mypy.nodes.COVARIANT: a += ['Variance(COVARIANT)'] if o.variance == mypy.nodes.CONTRAVARIANT: a += ['Variance(CONTRAVARIANT)'] if o.values: a += [('Values', o.values)] if not mypy.types.is_named_instance(o.upper_bound, 'builtins.object'): a += ['UpperBound({})'.format(o.upper_bound)] return self.dump(a, o) def visit_type_alias_expr(self, o: 'mypy.nodes.TypeAliasExpr') -> str: return 'TypeAliasExpr({})'.format(o.type) def visit_namedtuple_expr(self, o: 'mypy.nodes.NamedTupleExpr') -> str: return 'NamedTupleExpr:{}({}, {})'.format(o.line, o.info.name(), o.info.tuple_type) def visit_enum_call_expr(self, o: 'mypy.nodes.EnumCallExpr') -> str: return 'EnumCallExpr:{}({}, {})'.format(o.line, o.info.name(), o.items) def visit_typeddict_expr(self, o: 'mypy.nodes.TypedDictExpr') -> str: return 'TypedDictExpr:{}({})'.format(o.line, o.info.name()) def visit__promote_expr(self, o: 'mypy.nodes.PromoteExpr') -> str: return 'PromoteExpr:{}({})'.format(o.line, o.type) def visit_newtype_expr(self, o: 'mypy.nodes.NewTypeExpr') -> str: return 'NewTypeExpr:{}({}, {})'.format(o.line, o.name, self.dump([o.old_type], o)) def visit_lambda_expr(self, o: 'mypy.nodes.LambdaExpr') -> str: a = self.func_helper(o) return self.dump(a, o) def visit_generator_expr(self, o: 'mypy.nodes.GeneratorExpr') -> str: condlists = o.condlists if any(o.condlists) else None return self.dump([o.left_expr, o.indices, o.sequences, condlists], o) def visit_list_comprehension(self, o: 'mypy.nodes.ListComprehension') -> str: return self.dump([o.generator], o) def visit_set_comprehension(self, o: 'mypy.nodes.SetComprehension') -> str: return self.dump([o.generator], o) def visit_dictionary_comprehension(self, o: 'mypy.nodes.DictionaryComprehension') -> str: condlists = o.condlists if any(o.condlists) else None return self.dump([o.key, o.value, o.indices, o.sequences, condlists], o) def visit_conditional_expr(self, o: 'mypy.nodes.ConditionalExpr') -> str: return self.dump([('Condition', [o.cond]), o.if_expr, o.else_expr], o) def visit_slice_expr(self, o: 'mypy.nodes.SliceExpr') -> str: a = [o.begin_index, o.end_index, o.stride] # type: List[Any] if not a[0]: a[0] = '' if not a[1]: a[1] = '' return self.dump(a, o) def visit_backquote_expr(self, o: 'mypy.nodes.BackquoteExpr') -> str: return self.dump([o.expr], o) def visit_temp_node(self, o: 'mypy.nodes.TempNode') -> str: return self.dump([o.type], o) def dump_tagged(nodes: Sequence[object], tag: Optional[str], str_conv: 'StrConv') -> str: """Convert an array into a pretty-printed multiline string representation. The format is tag( item1.. itemN) Individual items are formatted like this: - arrays are flattened - pairs (str, array) are converted recursively, so that str is the tag - other items are converted to strings and indented """ from mypy.types import Type, TypeStrVisitor a = [] # type: List[str] if tag: a.append(tag + '(') for n in nodes: if isinstance(n, list): if n: a.append(dump_tagged(n, None, str_conv)) elif isinstance(n, tuple): s = dump_tagged(n[1], n[0], str_conv) a.append(indent(s, 2)) elif isinstance(n, mypy.nodes.Node): a.append(indent(n.accept(str_conv), 2)) elif isinstance(n, Type): a.append(indent(n.accept(TypeStrVisitor(str_conv.id_mapper)), 2)) elif n: a.append(indent(str(n), 2)) if tag: a[-1] += ')' return '\n'.join(a) def indent(s: str, n: int) -> str: """Indent all the lines in s (separated by newlines) by n spaces.""" s = ' ' * n + s s = s.replace('\n', '\n' + ' ' * n) return s mypy-0.560/mypy/stubgen.py0000644€tŠÔÚ€2›s®0000011673013215007205021716 0ustar jukkaDROPBOX\Domain Users00000000000000"""Generator of dynamically typed draft stubs for arbitrary modules. Basic usage: $ mkdir out $ stubgen urllib.parse => Generate out/urllib/parse.pyi. For Python 2 mode, use --py2: $ stubgen --py2 textwrap For C modules, you can get more precise function signatures by parsing .rst (Sphinx) documentation for extra information. For this, use the --docpath option: $ scripts/stubgen --docpath /Python-3.4.2/Doc/library curses => Generate out/curses.py. Use "stubgen -h" for more help. Note: You should verify the generated stubs manually. TODO: - support stubs for C modules in Python 2 mode - support non-default Python interpreters in Python 3 mode - if using --no-import, look for __all__ in the AST - infer some return types, such as no return statement with value -> None - detect 'if PY2 / is_py2' etc. and either preserve those or only include Python 2 or 3 case - maybe export more imported names if there is no __all__ (this affects ssl.SSLError, for example) - a quick and dirty heuristic would be to turn this on if a module has something like 'from x import y as _y' - we don't seem to always detect properties ('closed' in 'io', for example) """ import glob import importlib import json import os.path import pkgutil import subprocess import sys import textwrap import traceback from collections import defaultdict from typing import ( Any, List, Dict, Tuple, Iterable, Iterator, Mapping, Optional, NamedTuple, Set, Union, cast ) import mypy.build import mypy.parse import mypy.errors import mypy.traverser from mypy import defaults from mypy.nodes import ( Expression, IntExpr, UnaryExpr, StrExpr, BytesExpr, NameExpr, FloatExpr, MemberExpr, TupleExpr, ListExpr, ComparisonExpr, CallExpr, IndexExpr, EllipsisExpr, ClassDef, MypyFile, Decorator, AssignmentStmt, IfStmt, ImportAll, ImportFrom, Import, FuncDef, FuncBase, TempNode, ARG_POS, ARG_STAR, ARG_STAR2, ARG_NAMED, ARG_NAMED_OPT, ) from mypy.stubgenc import parse_all_signatures, find_unique_signatures, generate_stub_for_c_module from mypy.stubutil import is_c_module, write_header from mypy.options import Options as MypyOptions from mypy.types import Type, TypeStrVisitor, AnyType, CallableType, UnboundType, NoneTyp, TupleType from mypy.visitor import NodeVisitor Options = NamedTuple('Options', [('pyversion', Tuple[int, int]), ('no_import', bool), ('doc_dir', str), ('search_path', List[str]), ('interpreter', str), ('modules', List[str]), ('ignore_errors', bool), ('recursive', bool), ('include_private', bool), ('output_dir', str), ]) class CantImport(Exception): pass def generate_stub_for_module(module: str, output_dir: str, quiet: bool = False, add_header: bool = False, sigs: Dict[str, str] = {}, class_sigs: Dict[str, str] = {}, pyversion: Tuple[int, int] = defaults.PYTHON3_VERSION, no_import: bool = False, search_path: List[str] = [], interpreter: str = sys.executable, include_private: bool = False) -> None: target = module.replace('.', '/') try: result = find_module_path_and_all(module=module, pyversion=pyversion, no_import=no_import, search_path=search_path, interpreter=interpreter) except CantImport: if not quiet: traceback.print_exc() print('Failed to import %s; skipping it' % module) return if not result: # C module target = os.path.join(output_dir, target + '.pyi') generate_stub_for_c_module(module_name=module, target=target, add_header=add_header, sigs=sigs, class_sigs=class_sigs) else: # Python module module_path, module_all = result if os.path.basename(module_path) == '__init__.py': target += '/__init__.pyi' else: target += '.pyi' target = os.path.join(output_dir, target) generate_stub(module_path, output_dir, module_all, target=target, add_header=add_header, module=module, pyversion=pyversion, include_private=include_private) if not quiet: print('Created %s' % target) def find_module_path_and_all(module: str, pyversion: Tuple[int, int], no_import: bool, search_path: List[str], interpreter: str) -> Optional[Tuple[str, Optional[List[str]]]]: """Find module and determine __all__. Return None if the module is a C module. Return (module_path, __all__) if Python module. Raise an exception or exit if failed. """ module_path = None # type: Optional[str] if not no_import: if pyversion[0] == 2: module_path, module_all = load_python_module_info(module, interpreter) else: # TODO: Support custom interpreters. try: mod = importlib.import_module(module) except Exception: raise CantImport(module) if is_c_module(mod): return None module_path = mod.__file__ module_all = getattr(mod, '__all__', None) else: # Find module by going through search path. module_path = mypy.build.find_module(module, ['.'] + search_path) if not module_path: raise SystemExit( "Can't find module '{}' (consider using --search-path)".format(module)) module_all = None return module_path, module_all def load_python_module_info(module: str, interpreter: str) -> Tuple[str, Optional[List[str]]]: """Return tuple (module path, module __all__) for a Python 2 module. The path refers to the .py/.py[co] file. The second tuple item is None if the module doesn't define __all__. Exit if the module can't be imported or if it's a C extension module. """ cmd_template = '{interpreter} -c "%s"'.format(interpreter=interpreter) code = ("import importlib, json; mod = importlib.import_module('%s'); " "print(mod.__file__); print(json.dumps(getattr(mod, '__all__', None)))") % module try: output_bytes = subprocess.check_output(cmd_template % code, shell=True) except subprocess.CalledProcessError: print("Can't import module %s" % module, file=sys.stderr) sys.exit(1) output = output_bytes.decode('ascii').strip().splitlines() module_path = output[0] if not module_path.endswith(('.py', '.pyc', '.pyo')): raise SystemExit('%s looks like a C module; they are not supported for Python 2' % module) if module_path.endswith(('.pyc', '.pyo')): module_path = module_path[:-1] module_all = json.loads(output[1]) return module_path, module_all def generate_stub(path: str, output_dir: str, _all_: Optional[List[str]] = None, target: Optional[str] = None, add_header: bool = False, module: Optional[str] = None, pyversion: Tuple[int, int] = defaults.PYTHON3_VERSION, include_private: bool = False ) -> None: with open(path, 'rb') as f: source = f.read() options = MypyOptions() options.python_version = pyversion try: ast = mypy.parse.parse(source, fnam=path, module=module, errors=None, options=options) except mypy.errors.CompileError as e: # Syntax error! for m in e.messages: sys.stderr.write('%s\n' % m) sys.exit(1) gen = StubGenerator(_all_, pyversion=pyversion, include_private=include_private) ast.accept(gen) if not target: target = os.path.join(output_dir, os.path.basename(path)) subdir = os.path.dirname(target) if subdir and not os.path.isdir(subdir): os.makedirs(subdir) with open(target, 'w') as file: if add_header: write_header(file, module, pyversion=pyversion) file.write(''.join(gen.output())) # What was generated previously in the stub file. We keep track of these to generate # nicely formatted output (add empty line between non-empty classes, for example). EMPTY = 'EMPTY' FUNC = 'FUNC' CLASS = 'CLASS' EMPTY_CLASS = 'EMPTY_CLASS' VAR = 'VAR' NOT_IN_ALL = 'NOT_IN_ALL' class AnnotationPrinter(TypeStrVisitor): def __init__(self, stubgen: 'StubGenerator') -> None: super().__init__() self.stubgen = stubgen def visit_unbound_type(self, t: UnboundType)-> str: s = t.name base = s.split('.')[0] self.stubgen.import_tracker.require_name(base) if t.args != []: s += '[{}]'.format(self.list_str(t.args)) return s def visit_none_type(self, t: NoneTyp) -> str: return "None" class AliasPrinter(NodeVisitor[str]): def __init__(self, stubgen: 'StubGenerator') -> None: self.stubgen = stubgen super().__init__() def visit_call_expr(self, node: CallExpr) -> str: # Call expressions are not usually types, but we also treat `X = TypeVar(...)` as a # type alias that has to be preserved (even if TypeVar is not the same as an alias) callee = node.callee.accept(self) args = [] for name, arg, kind in zip(node.arg_names, node.args, node.arg_kinds): if kind == ARG_POS: args.append(arg.accept(self)) elif kind == ARG_STAR: args.append('*' + arg.accept(self)) elif kind == ARG_STAR2: args.append('**' + arg.accept(self)) elif kind == ARG_NAMED: args.append('{}={}'.format(name, arg.accept(self))) else: raise ValueError("Unknown argument kind %d in call" % kind) return "{}({})".format(callee, ", ".join(args)) def visit_name_expr(self, node: NameExpr) -> str: self.stubgen.import_tracker.require_name(node.name) return node.name def visit_str_expr(self, node: StrExpr) -> str: return repr(node.value) def visit_index_expr(self, node: IndexExpr) -> str: base = node.base.accept(self) index = node.index.accept(self) return "{}[{}]".format(base, index) def visit_tuple_expr(self, node: TupleExpr) -> str: return ", ".join(n.accept(self) for n in node.items) def visit_list_expr(self, node: ListExpr) -> str: return "[{}]".format(", ".join(n.accept(self) for n in node.items)) def visit_ellipsis(self, node: EllipsisExpr) -> str: return "..." class ImportTracker: def __init__(self) -> None: # module_for['foo'] has the module name where 'foo' was imported from, or None if # 'foo' is a module imported directly; examples # 'from pkg.m import f as foo' ==> module_for['foo'] == 'pkg.m' # 'from m import f' ==> module_for['f'] == 'm' # 'import m' ==> module_for['m'] == None self.module_for = {} # type: Dict[str, Optional[str]] # direct_imports['foo'] is the module path used when the name 'foo' was added to the # namespace. # import foo.bar.baz ==> direct_imports['foo'] == 'foo.bar.baz' self.direct_imports = {} # type: Dict[str, str] # reverse_alias['foo'] is the name that 'foo' had originally when imported with an # alias; examples # 'import numpy as np' ==> reverse_alias['np'] == 'numpy' # 'from decimal import Decimal as D' ==> reverse_alias['D'] == 'Decimal' self.reverse_alias = {} # type: Dict[str, str] # required_names is the set of names that are actually used in a type annotation self.required_names = set() # type: Set[str] # Names that should be reexported if they come from another module self.reexports = set() # type: Set[str] def add_import_from(self, module: str, names: List[Tuple[str, Optional[str]]]) -> None: for name, alias in names: self.module_for[alias or name] = module if alias: self.reverse_alias[alias] = name def add_import(self, module: str, alias: Optional[str]=None) -> None: name = module.split('.')[0] self.module_for[alias or name] = None self.direct_imports[name] = module if alias: self.reverse_alias[alias] = name def require_name(self, name: str) -> None: self.required_names.add(name.split('.')[0]) def reexport(self, name: str) -> None: """ Mark a given non qualified name as needed in __all__. This means that in case it comes from a module, it should be imported with an alias even is the alias is the same as the name. """ self.require_name(name) self.reexports.add(name) def import_lines(self) -> List[str]: """ The list of required import lines (as strings with python code) """ result = [] # To summarize multiple names imported from a same module, we collect those # in the `module_map` dictionary, mapping a module path to the list of names that should # be imported from it. the names can also be alias in the form 'original as alias' module_map = defaultdict(list) # type: Mapping[str, List[str]] for name in sorted(self.required_names): # If we haven't seen this name in an import statement, ignore it if name not in self.module_for: continue m = self.module_for[name] if m is not None: # This name was found in a from ... import ... # Collect the name in the module_map if name in self.reverse_alias: name = '{} as {}'.format(self.reverse_alias[name], name) elif name in self.reexports: name = '{} as {}'.format(name, name) module_map[m].append(name) else: # This name was found in an import ... # We can already generate the import line if name in self.reverse_alias: name, alias = self.reverse_alias[name], name result.append("import {} as {}\n".format(self.direct_imports[name], alias)) elif name in self.reexports: assert '.' not in name # Because reexports only has nonqualified names result.append("import {} as {}\n".format(name, name)) else: result.append("import {}\n".format(self.direct_imports[name])) # Now generate all the from ... import ... lines collected in module_map for module, names in sorted(module_map.items()): result.append("from {} import {}\n".format(module, ', '.join(sorted(names)))) return result class StubGenerator(mypy.traverser.TraverserVisitor): def __init__(self, _all_: Optional[List[str]], pyversion: Tuple[int, int], include_private: bool = False) -> None: self._all_ = _all_ self._output = [] # type: List[str] self._import_lines = [] # type: List[str] self._indent = '' self._vars = [[]] # type: List[List[str]] self._state = EMPTY self._toplevel_names = [] # type: List[str] self._pyversion = pyversion self._include_private = include_private self.import_tracker = ImportTracker() # Add imports that could be implicitly generated self.import_tracker.add_import_from("collections", [("namedtuple", None)]) typing_imports = "Any Optional TypeVar".split() self.import_tracker.add_import_from("typing", [(t, None) for t in typing_imports]) # Names in __all__ are required for name in _all_ or (): self.import_tracker.reexport(name) def visit_mypy_file(self, o: MypyFile) -> None: super().visit_mypy_file(o) undefined_names = [name for name in self._all_ or [] if name not in self._toplevel_names] if undefined_names: if self._state != EMPTY: self.add('\n') self.add('# Names in __all__ with no definition:\n') for name in sorted(undefined_names): self.add('# %s\n' % name) def visit_func_def(self, o: FuncDef) -> None: if self.is_private_name(o.name()): return if self.is_not_in_all(o.name()): return if self.is_recorded_name(o.name()): return if not self._indent and self._state not in (EMPTY, FUNC): self.add('\n') if not self.is_top_level(): self_inits = find_self_initializers(o) for init, value in self_inits: init_code = self.get_init(init, value) if init_code: self.add(init_code) self.add("%sdef %s(" % (self._indent, o.name())) self.record_name(o.name()) args = [] # type: List[str] for i, arg_ in enumerate(o.arguments): var = arg_.variable kind = arg_.kind name = var.name() annotated_type = o.type.arg_types[i] if isinstance(o.type, CallableType) else None if annotated_type and not ( i == 0 and name == 'self' and isinstance(annotated_type, AnyType)): annotation = ": {}".format(self.print_annotation(annotated_type)) else: annotation = "" if arg_.initializer: initializer = '...' if kind in (ARG_NAMED, ARG_NAMED_OPT) and '*' not in args: args.append('*') if not annotation: typename = self.get_str_type_of_node(arg_.initializer, True) annotation = ': {} = ...'.format(typename) else: annotation += '={}'.format(initializer) arg = name + annotation elif kind == ARG_STAR: arg = '*%s%s' % (name, annotation) elif kind == ARG_STAR2: arg = '**%s%s' % (name, annotation) else: arg = name + annotation args.append(arg) retname = None if isinstance(o.type, CallableType): retname = self.print_annotation(o.type.ret_type) elif o.name() == '__init__': retname = 'None' retfield = '' if retname is not None: retfield = ' -> ' + retname self.add(', '.join(args)) self.add("){}: ...\n".format(retfield)) self._state = FUNC def visit_decorator(self, o: Decorator) -> None: if self.is_private_name(o.func.name()): return for decorator in o.decorators: if isinstance(decorator, NameExpr) and decorator.name in ('property', 'staticmethod', 'classmethod'): self.add('%s@%s\n' % (self._indent, decorator.name)) elif (isinstance(decorator, MemberExpr) and decorator.name == 'setter' and isinstance(decorator.expr, NameExpr)): self.add('%s@%s.setter\n' % (self._indent, decorator.expr.name)) super().visit_decorator(o) def visit_class_def(self, o: ClassDef) -> None: sep = None # type: Optional[int] if not self._indent and self._state != EMPTY: sep = len(self._output) self.add('\n') self.add('%sclass %s' % (self._indent, o.name)) self.record_name(o.name) base_types = self.get_base_types(o) if base_types: self.add('(%s)' % ', '.join(base_types)) for base in base_types: self.import_tracker.require_name(base) self.add(':\n') n = len(self._output) self._indent += ' ' self._vars.append([]) super().visit_class_def(o) self._indent = self._indent[:-4] self._vars.pop() self._vars[-1].append(o.name) if len(self._output) == n: if self._state == EMPTY_CLASS and sep is not None: self._output[sep] = '' self._output[-1] = self._output[-1][:-1] + ' ...\n' self._state = EMPTY_CLASS else: self._state = CLASS def get_base_types(self, cdef: ClassDef) -> List[str]: base_types = [] # type: List[str] for base in cdef.base_type_exprs: if isinstance(base, NameExpr): if base.name != 'object': base_types.append(base.name) elif isinstance(base, MemberExpr): modname = get_qualified_name(base.expr) base_types.append('%s.%s' % (modname, base.name)) elif isinstance(base, IndexExpr): p = AliasPrinter(self) base_types.append(base.accept(p)) return base_types def visit_assignment_stmt(self, o: AssignmentStmt) -> None: foundl = [] for lvalue in o.lvalues: if isinstance(lvalue, NameExpr) and self.is_namedtuple(o.rvalue): assert isinstance(o.rvalue, CallExpr) self.process_namedtuple(lvalue, o.rvalue) continue if (self.is_top_level() and isinstance(lvalue, NameExpr) and self.is_type_expression(o.rvalue)): self.process_typealias(lvalue, o.rvalue) continue if isinstance(lvalue, TupleExpr) or isinstance(lvalue, ListExpr): items = lvalue.items if isinstance(o.type, TupleType): annotations = o.type.items # type: Iterable[Optional[Type]] else: annotations = [None] * len(items) else: items = [lvalue] annotations = [o.type] sep = False found = False for item, annotation in zip(items, annotations): if isinstance(item, NameExpr): init = self.get_init(item.name, o.rvalue, annotation) if init: found = True if not sep and not self._indent and \ self._state not in (EMPTY, VAR): init = '\n' + init sep = True self.add(init) self.record_name(item.name) foundl.append(found) if all(foundl): self._state = VAR def is_namedtuple(self, expr: Expression) -> bool: if not isinstance(expr, CallExpr): return False callee = expr.callee return ((isinstance(callee, NameExpr) and callee.name.endswith('namedtuple')) or (isinstance(callee, MemberExpr) and callee.name == 'namedtuple')) def process_namedtuple(self, lvalue: NameExpr, rvalue: CallExpr) -> None: self.import_tracker.require_name('namedtuple') if self._state != EMPTY: self.add('\n') name = repr(getattr(rvalue.args[0], 'value', '')) if isinstance(rvalue.args[1], StrExpr): items = repr(rvalue.args[1].value) elif isinstance(rvalue.args[1], ListExpr): list_items = cast(List[StrExpr], rvalue.args[1].items) items = '[%s]' % ', '.join(repr(item.value) for item in list_items) else: items = '' self.add('%s = namedtuple(%s, %s)\n' % (lvalue.name, name, items)) self._state = CLASS def is_type_expression(self, expr: Expression, top_level: bool=True) -> bool: """Return True for things that look like type expressions Used to know if assignments look like typealiases """ # Assignment of TypeVar(...) are passed through if (isinstance(expr, CallExpr) and isinstance(expr.callee, NameExpr) and expr.callee.name == 'TypeVar'): return True elif isinstance(expr, EllipsisExpr): return not top_level elif isinstance(expr, NameExpr): if expr.name in ('True', 'False'): return False elif expr.name == 'None': return not top_level else: return True elif isinstance(expr, IndexExpr) and isinstance(expr.base, NameExpr): if isinstance(expr.index, TupleExpr): indices = expr.index.items else: indices = [expr.index] if expr.base.name == 'Callable' and len(indices) == 2: args, ret = indices if isinstance(args, EllipsisExpr): indices = [ret] elif isinstance(args, ListExpr): indices = args.items + [ret] else: return False return all(self.is_type_expression(i, top_level=False) for i in indices) else: return False def process_typealias(self, lvalue: NameExpr, rvalue: Expression) -> None: p = AliasPrinter(self) self.add("{} = {}\n".format(lvalue.name, rvalue.accept(p))) self.record_name(lvalue.name) self._vars[-1].append(lvalue.name) def visit_if_stmt(self, o: IfStmt) -> None: # Ignore if __name__ == '__main__'. expr = o.expr[0] if (isinstance(expr, ComparisonExpr) and isinstance(expr.operands[0], NameExpr) and isinstance(expr.operands[1], StrExpr) and expr.operands[0].name == '__name__' and '__main__' in expr.operands[1].value): return super().visit_if_stmt(o) def visit_import_all(self, o: ImportAll) -> None: self.add_import_line('from %s%s import *\n' % ('.' * o.relative, o.id)) def visit_import_from(self, o: ImportFrom) -> None: exported_names = set() # type: Set[str] self.import_tracker.add_import_from('.' * o.relative + o.id, o.names) self._vars[-1].extend(alias or name for name, alias in o.names) for name, alias in o.names: self.record_name(alias or name) if self._all_: # Include import froms that import names defined in __all__. names = [name for name, alias in o.names if name in self._all_ and alias is None] exported_names.update(names) else: # Include import from targets that import from a submodule of a package. if o.relative: sub_names = [name for name, alias in o.names if alias is None] exported_names.update(sub_names) if o.id: for name in sub_names: self.import_tracker.require_name(name) def visit_import(self, o: Import) -> None: for id, as_id in o.ids: self.import_tracker.add_import(id, as_id) if as_id is None: target_name = id.split('.')[0] else: target_name = as_id self._vars[-1].append(target_name) self.record_name(target_name) def get_init(self, lvalue: str, rvalue: Expression, annotation: Optional[Type] = None) -> Optional[str]: """Return initializer for a variable. Return None if we've generated one already or if the variable is internal. """ if lvalue in self._vars[-1]: # We've generated an initializer already for this variable. return None # TODO: Only do this at module top level. if self.is_private_name(lvalue) or self.is_not_in_all(lvalue): return None self._vars[-1].append(lvalue) if annotation is not None: typename = self.print_annotation(annotation) else: typename = self.get_str_type_of_node(rvalue) has_rhs = not (isinstance(rvalue, TempNode) and rvalue.no_rhs) initializer = " = ..." if has_rhs and not self.is_top_level() else "" return '%s%s: %s%s\n' % (self._indent, lvalue, typename, initializer) def add(self, string: str) -> None: """Add text to generated stub.""" self._output.append(string) def add_typing_import(self, name: str) -> None: """Add a name to be imported from typing, unless it's imported already. The import will be internal to the stub. """ self.import_tracker.require_name(name) def add_import_line(self, line: str) -> None: """Add a line of text to the import section, unless it's already there.""" if line not in self._import_lines: self._import_lines.append(line) def output(self) -> str: """Return the text for the stub.""" imports = '' if self._import_lines: imports += ''.join(self._import_lines) imports += ''.join(self.import_tracker.import_lines()) if imports and self._output: imports += '\n' return imports + ''.join(self._output) def is_not_in_all(self, name: str) -> bool: if self.is_private_name(name): return False if self._all_: return self.is_top_level() and name not in self._all_ return False def is_private_name(self, name: str) -> bool: if self._include_private: return False return name.startswith('_') and (not name.endswith('__') or name in ('__all__', '__author__', '__version__', '__str__', '__repr__', '__getstate__', '__setstate__', '__slots__')) def get_str_type_of_node(self, rvalue: Expression, can_infer_optional: bool = False) -> str: if isinstance(rvalue, IntExpr): return 'int' if isinstance(rvalue, StrExpr): return 'str' if isinstance(rvalue, BytesExpr): return 'bytes' if isinstance(rvalue, FloatExpr): return 'float' if isinstance(rvalue, UnaryExpr) and isinstance(rvalue.expr, IntExpr): return 'int' if isinstance(rvalue, NameExpr) and rvalue.name in ('True', 'False'): return 'bool' if can_infer_optional and \ isinstance(rvalue, NameExpr) and rvalue.name == 'None': self.add_typing_import('Optional') self.add_typing_import('Any') return 'Optional[Any]' self.add_typing_import('Any') return 'Any' def print_annotation(self, t: Type) -> str: printer = AnnotationPrinter(self) return t.accept(printer) def is_top_level(self) -> bool: """Are we processing the top level of a file?""" return self._indent == '' def record_name(self, name: str) -> None: """Mark a name as defined. This only does anything if at the top level of a module. """ if self.is_top_level(): self._toplevel_names.append(name) def is_recorded_name(self, name: str) -> bool: """Has this name been recorded previously?""" return self.is_top_level() and name in self._toplevel_names def find_self_initializers(fdef: FuncBase) -> List[Tuple[str, Expression]]: results = [] # type: List[Tuple[str, Expression]] class SelfTraverser(mypy.traverser.TraverserVisitor): def visit_assignment_stmt(self, o: AssignmentStmt) -> None: lvalue = o.lvalues[0] if (isinstance(lvalue, MemberExpr) and isinstance(lvalue.expr, NameExpr) and lvalue.expr.name == 'self'): results.append((lvalue.name, o.rvalue)) fdef.accept(SelfTraverser()) return results def get_qualified_name(o: Expression) -> str: if isinstance(o, NameExpr): return o.name elif isinstance(o, MemberExpr): return '%s.%s' % (get_qualified_name(o.expr), o.name) else: return '' def walk_packages(packages: List[str]) -> Iterator[str]: for package_name in packages: package = __import__(package_name) yield package.__name__ for importer, qualified_name, ispkg in pkgutil.walk_packages(package.__path__, prefix=package.__name__ + ".", onerror=lambda r: None): yield qualified_name def main() -> None: options = parse_options(sys.argv[1:]) if not os.path.isdir(options.output_dir): raise SystemExit('Directory "{}" does not exist'.format(options.output_dir)) if options.recursive and options.no_import: raise SystemExit('recursive stub generation without importing is not currently supported') sigs = {} # type: Any class_sigs = {} # type: Any if options.doc_dir: all_sigs = [] # type: Any all_class_sigs = [] # type: Any for path in glob.glob('%s/*.rst' % options.doc_dir): with open(path) as f: func_sigs, class_sigs = parse_all_signatures(f.readlines()) all_sigs += func_sigs all_class_sigs += class_sigs sigs = dict(find_unique_signatures(all_sigs)) class_sigs = dict(find_unique_signatures(all_class_sigs)) for module in (options.modules if not options.recursive else walk_packages(options.modules)): try: generate_stub_for_module(module, output_dir=options.output_dir, add_header=True, sigs=sigs, class_sigs=class_sigs, pyversion=options.pyversion, no_import=options.no_import, search_path=options.search_path, interpreter=options.interpreter, include_private=options.include_private) except Exception as e: if not options.ignore_errors: raise e else: print("Stub generation failed for", module, file=sys.stderr) def parse_options(args: List[str]) -> Options: # TODO: why not use click and reduce the amount of code to maintain # within this module. pyversion = defaults.PYTHON3_VERSION no_import = False recursive = False ignore_errors = False doc_dir = '' search_path = [] # type: List[str] interpreter = '' include_private = False output_dir = 'out' while args and args[0].startswith('-'): if args[0] in '-o': output_dir = args[1] args = args[1:] elif args[0] == '--doc-dir': doc_dir = args[1] args = args[1:] elif args[0] == '--search-path': if not args[1]: usage() search_path = args[1].split(':') args = args[1:] elif args[0] == '-p': interpreter = args[1] args = args[1:] elif args[0] == '--recursive': recursive = True elif args[0] == '--ignore-errors': ignore_errors = True elif args[0] == '--py2': pyversion = defaults.PYTHON2_VERSION elif args[0] == '--no-import': no_import = True elif args[0] == '--include-private': include_private = True elif args[0] in ('-h', '--help'): usage(exit_nonzero=False) else: raise SystemExit('Unrecognized option %s' % args[0]) args = args[1:] if not args: usage() if not interpreter: interpreter = sys.executable if pyversion[0] == 3 else default_python2_interpreter() # Create the output folder if it doesn't already exist. if not os.path.exists(output_dir): os.makedirs(output_dir) return Options(pyversion=pyversion, no_import=no_import, doc_dir=doc_dir, search_path=search_path, interpreter=interpreter, modules=args, ignore_errors=ignore_errors, recursive=recursive, include_private=include_private, output_dir=output_dir) def default_python2_interpreter() -> str: # TODO: Make this do something reasonable in Windows. for candidate in ('/usr/bin/python2', '/usr/bin/python'): if not os.path.exists(candidate): continue output = subprocess.check_output([candidate, '--version'], stderr=subprocess.STDOUT).strip() if b'Python 2' in output: return candidate raise SystemExit("Can't find a Python 2 interpreter -- please use the -p option") def usage(exit_nonzero: bool=True) -> None: usage = textwrap.dedent("""\ usage: stubgen [--py2] [--no-import] [--doc-dir PATH] [--search-path PATH] [-p PATH] [-o PATH] MODULE ... Generate draft stubs for modules. Stubs are generated in directory ./out, to avoid overriding files with manual changes. This directory is assumed to exist. Options: --py2 run in Python 2 mode (default: Python 3 mode) --recursive traverse listed modules to generate inner package modules as well --ignore-errors ignore errors when trying to generate stubs for modules --no-import don't import the modules, just parse and analyze them (doesn't work with C extension modules and doesn't respect __all__) --include-private generate stubs for objects and members considered private (single leading undescore and no trailing underscores) --doc-dir PATH use .rst documentation in PATH (this may result in better stubs in some cases; consider setting this to DIR/Python-X.Y.Z/Doc/library) --search-path PATH specify module search directories, separated by ':' (currently only used if --no-import is given) -p PATH use Python interpreter at PATH (only works for Python 2 right now) -o PATH Change the output folder [default: out] -h, --help print this help message and exit """.rstrip()) if exit_nonzero: # The user made a mistake, so we should return with an error code raise SystemExit(usage) else: # The user asked for help specifically, so we should exit with success print(usage, file=sys.stderr) sys.exit() if __name__ == '__main__': main() mypy-0.560/mypy/stubgenc.py0000644€tŠÔÚ€2›s®0000002034413215007205022054 0ustar jukkaDROPBOX\Domain Users00000000000000"""Stub generator for C modules. The public interface is via the mypy.stubgen module. """ import importlib import os.path import re from typing import List, Dict, Tuple, Optional from types import ModuleType from mypy.stubutil import ( parse_all_signatures, find_unique_signatures, is_c_module, write_header, infer_sig_from_docstring ) def generate_stub_for_c_module(module_name: str, target: str, add_header: bool = True, sigs: Dict[str, str] = {}, class_sigs: Dict[str, str] = {}, ) -> None: module = importlib.import_module(module_name) assert is_c_module(module), '%s is not a C module' % module_name subdir = os.path.dirname(target) if subdir and not os.path.isdir(subdir): os.makedirs(subdir) functions = [] # type: List[str] done = set() items = sorted(module.__dict__.items(), key=lambda x: x[0]) for name, obj in items: if is_c_function(obj): generate_c_function_stub(module, name, obj, functions, sigs=sigs) done.add(name) types = [] # type: List[str] for name, obj in items: if name.startswith('__') and name.endswith('__'): continue if is_c_type(obj): generate_c_type_stub(module, name, obj, types, sigs=sigs, class_sigs=class_sigs) done.add(name) variables = [] for name, obj in items: if name.startswith('__') and name.endswith('__'): continue if name not in done: type_str = type(obj).__name__ if type_str not in ('int', 'str', 'bytes', 'float', 'bool'): type_str = 'Any' variables.append('%s = ... # type: %s' % (name, type_str)) output = [] for line in variables: output.append(line) if output and functions: output.append('') for line in functions: output.append(line) for line in types: if line.startswith('class') and output and output[-1]: output.append('') output.append(line) output = add_typing_import(output) with open(target, 'w') as file: if add_header: write_header(file, module_name) for line in output: file.write('%s\n' % line) def add_typing_import(output: List[str]) -> List[str]: names = [] for name in ['Any']: if any(re.search(r'\b%s\b' % name, line) for line in output): names.append(name) if names: return ['from typing import %s' % ', '.join(names), ''] + output else: return output[:] def is_c_function(obj: object) -> bool: return type(obj) is type(ord) def is_c_method(obj: object) -> bool: return type(obj) in (type(str.index), type(str.__add__), type(str.__new__)) def is_c_classmethod(obj: object) -> bool: type_str = type(obj).__name__ return type_str == 'classmethod_descriptor' def is_c_type(obj: object) -> bool: return type(obj) is type(int) def generate_c_function_stub(module: ModuleType, name: str, obj: object, output: List[str], self_var: Optional[str] = None, sigs: Dict[str, str] = {}, class_name: Optional[str] = None, class_sigs: Dict[str, str] = {}, ) -> None: if self_var: self_arg = '%s, ' % self_var else: self_arg = '' if (name in ('__new__', '__init__') and name not in sigs and class_name and class_name in class_sigs): sig = class_sigs[class_name] else: docstr = getattr(obj, '__doc__', None) inferred = infer_sig_from_docstring(docstr, name) if inferred: sig = inferred else: if class_name and name not in sigs: sig = infer_method_sig(name) else: sig = sigs.get(name, '(*args, **kwargs)') sig = sig[1:-1] if sig: if sig.split(',', 1)[0] == self_var: self_arg = '' else: self_arg = self_arg.replace(', ', '') output.append('def %s(%s%s): ...' % (name, self_arg, sig)) def generate_c_type_stub(module: ModuleType, class_name: str, obj: type, output: List[str], sigs: Dict[str, str] = {}, class_sigs: Dict[str, str] = {}, ) -> None: items = sorted(obj.__dict__.items(), key=lambda x: method_name_sort_key(x[0])) methods = [] done = set() for attr, value in items: if is_c_method(value) or is_c_classmethod(value): done.add(attr) if not is_skipped_attribute(attr): if is_c_classmethod(value): methods.append('@classmethod') self_var = 'cls' else: self_var = 'self' if attr == '__new__': # TODO: We should support __new__. if '__init__' in obj.__dict__: # Avoid duplicate functions if both are present. # But is there any case where .__new__() has a # better signature than __init__() ? continue attr = '__init__' generate_c_function_stub(module, attr, value, methods, self_var, sigs=sigs, class_name=class_name, class_sigs=class_sigs) variables = [] for attr, value in items: if is_skipped_attribute(attr): continue if attr not in done: variables.append('%s = ... # type: Any' % attr) all_bases = obj.mro() if all_bases[-1] is object: # TODO: Is this always object? del all_bases[-1] # remove the class itself all_bases = all_bases[1:] # Remove base classes of other bases as redundant. bases = [] # type: List[type] for base in all_bases: if not any(issubclass(b, base) for b in bases): bases.append(base) if bases: bases_str = '(%s)' % ', '.join(base.__name__ for base in bases) else: bases_str = '' if not methods and not variables: output.append('class %s%s: ...' % (class_name, bases_str)) else: output.append('class %s%s:' % (class_name, bases_str)) for variable in variables: output.append(' %s' % variable) for method in methods: output.append(' %s' % method) def method_name_sort_key(name: str) -> Tuple[int, str]: if name in ('__new__', '__init__'): return (0, name) if name.startswith('__') and name.endswith('__'): return (2, name) return (1, name) def is_skipped_attribute(attr: str) -> bool: return attr in ('__getattribute__', '__str__', '__repr__', '__doc__', '__dict__', '__module__', '__weakref__') # For pickling def infer_method_sig(name: str) -> str: if name.startswith('__') and name.endswith('__'): name = name[2:-2] if name in ('hash', 'iter', 'next', 'sizeof', 'copy', 'deepcopy', 'reduce', 'getinitargs', 'int', 'float', 'trunc', 'complex', 'bool'): return '()' if name == 'getitem': return '(index)' if name == 'setitem': return '(index, object)' if name in ('delattr', 'getattr'): return '(name)' if name == 'setattr': return '(name, value)' if name == 'getstate': return '()' if name == 'setstate': return '(state)' if name in ('eq', 'ne', 'lt', 'le', 'gt', 'ge', 'add', 'radd', 'sub', 'rsub', 'mul', 'rmul', 'mod', 'rmod', 'floordiv', 'rfloordiv', 'truediv', 'rtruediv', 'divmod', 'rdivmod', 'pow', 'rpow'): return '(other)' if name in ('neg', 'pos'): return '()' return '(*args, **kwargs)' mypy-0.560/mypy/stubutil.py0000644€tŠÔÚ€2›s®0000000673713215007205022127 0ustar jukkaDROPBOX\Domain Users00000000000000import re import sys from typing import Any, Optional, Tuple, Sequence, MutableSequence, List, MutableMapping, IO from types import ModuleType # Type Alias for Signatures Sig = Tuple[str, str] def parse_signature(sig: str) -> Optional[Tuple[str, List[str], List[str]]]: m = re.match(r'([.a-zA-Z0-9_]+)\(([^)]*)\)', sig) if not m: return None name = m.group(1) name = name.split('.')[-1] arg_string = m.group(2) if not arg_string.strip(): return (name, [], []) args = [arg.strip() for arg in arg_string.split(',')] fixed = [] optional = [] i = 0 while i < len(args): if args[i].startswith('[') or '=' in args[i]: break fixed.append(args[i].rstrip('[')) i += 1 if args[i - 1].endswith('['): break while i < len(args): arg = args[i] arg = arg.strip('[]') arg = arg.split('=')[0] optional.append(arg) i += 1 return (name, fixed, optional) def build_signature(fixed: Sequence[str], optional: Sequence[str]) -> str: args = [] # type: MutableSequence[str] args.extend(fixed) for arg in optional: if arg.startswith('*'): args.append(arg) else: args.append('%s=...' % arg) sig = '(%s)' % ', '.join(args) # Ad-hoc fixes. sig = sig.replace('(self)', '') return sig def parse_all_signatures(lines: Sequence[str]) -> Tuple[List[Sig], List[Sig]]: sigs = [] class_sigs = [] for line in lines: line = line.strip() m = re.match(r'\.\. *(function|method|class) *:: *[a-zA-Z_]', line) if m: sig = line.split('::')[1].strip() parsed = parse_signature(sig) if parsed: name, fixed, optional = parsed if m.group(1) != 'class': sigs.append((name, build_signature(fixed, optional))) else: class_sigs.append((name, build_signature(fixed, optional))) return sorted(sigs), sorted(class_sigs) def find_unique_signatures(sigs: Sequence[Sig]) -> List[Sig]: sig_map = {} # type: MutableMapping[str, List[str]] for name, sig in sigs: sig_map.setdefault(name, []).append(sig) result = [] for name, name_sigs in sig_map.items(): if len(set(name_sigs)) == 1: result.append((name, name_sigs[0])) return sorted(result) def is_c_module(module: ModuleType) -> bool: return '__file__' not in module.__dict__ or module.__dict__['__file__'].endswith('.so') def write_header(file: IO[str], module_name: Optional[str] = None, pyversion: Tuple[int, int] = (3, 5)) -> None: if module_name: if pyversion[0] >= 3: version = '%d.%d' % (sys.version_info.major, sys.version_info.minor) else: version = '2' file.write('# Stubs for %s (Python %s)\n' % (module_name, version)) file.write( '#\n' '# NOTE: This dynamically typed stub was automatically generated by stubgen.\n\n') def infer_sig_from_docstring(docstr: str, name: str) -> Optional[str]: if not docstr: return None docstr = docstr.lstrip() m = re.match(r'%s(\([a-zA-Z0-9_=, ]*\))' % name, docstr) if m: return m.group(1) else: return None mypy-0.560/mypy/subtypes.py0000644€tŠÔÚ€2›s®0000012271313215007205022123 0ustar jukkaDROPBOX\Domain Users00000000000000from typing import List, Optional, Dict, Callable, Tuple, Iterator, Set, Union, cast from contextlib import contextmanager from mypy.types import ( Type, AnyType, UnboundType, TypeVisitor, FormalArgument, NoneTyp, function_type, Instance, TypeVarType, CallableType, TupleType, TypedDictType, UnionType, Overloaded, ErasedType, TypeList, PartialType, DeletedType, UninhabitedType, TypeType, is_named_instance, FunctionLike, TypeOfAny ) import mypy.applytype import mypy.constraints from mypy.erasetype import erase_type # Circular import; done in the function instead. # import mypy.solve from mypy import messages, sametypes from mypy.nodes import ( FuncBase, Var, Decorator, OverloadedFuncDef, TypeInfo, CONTRAVARIANT, COVARIANT, ARG_POS, ARG_OPT, ARG_NAMED, ARG_NAMED_OPT, ARG_STAR, ARG_STAR2 ) from mypy.maptype import map_instance_to_supertype from mypy.expandtype import expand_type_by_instance from mypy.sametypes import is_same_type from mypy import experiments # Flags for detected protocol members IS_SETTABLE = 1 IS_CLASSVAR = 2 IS_CLASS_OR_STATIC = 3 TypeParameterChecker = Callable[[Type, Type, int], bool] def check_type_parameter(lefta: Type, righta: Type, variance: int) -> bool: if variance == COVARIANT: return is_subtype(lefta, righta, check_type_parameter) elif variance == CONTRAVARIANT: return is_subtype(righta, lefta, check_type_parameter) else: return is_equivalent(lefta, righta, check_type_parameter) def is_subtype(left: Type, right: Type, type_parameter_checker: TypeParameterChecker = check_type_parameter, *, ignore_pos_arg_names: bool = False, ignore_declared_variance: bool = False) -> bool: """Is 'left' subtype of 'right'? Also consider Any to be a subtype of any type, and vice versa. This recursively applies to components of composite types (List[int] is subtype of List[Any], for example). type_parameter_checker is used to check the type parameters (for example, A with B in is_subtype(C[A], C[B]). The default checks for subtype relation between the type arguments (e.g., A and B), taking the variance of the type var into account. """ if (isinstance(right, AnyType) or isinstance(right, UnboundType) or isinstance(right, ErasedType)): return True elif isinstance(right, UnionType) and not isinstance(left, UnionType): # Normally, when 'left' is not itself a union, the only way # 'left' can be a subtype of the union 'right' is if it is a # subtype of one of the items making up the union. is_subtype_of_item = any(is_subtype(left, item, type_parameter_checker, ignore_pos_arg_names=ignore_pos_arg_names) for item in right.items) # However, if 'left' is a type variable T, T might also have # an upper bound which is itself a union. This case will be # handled below by the SubtypeVisitor. We have to check both # possibilities, to handle both cases like T <: Union[T, U] # and cases like T <: B where B is the upper bound of T and is # a union. (See #2314.) if not isinstance(left, TypeVarType): return is_subtype_of_item elif is_subtype_of_item: return True # otherwise, fall through return left.accept(SubtypeVisitor(right, type_parameter_checker, ignore_pos_arg_names=ignore_pos_arg_names, ignore_declared_variance=ignore_declared_variance)) def is_subtype_ignoring_tvars(left: Type, right: Type) -> bool: def ignore_tvars(s: Type, t: Type, v: int) -> bool: return True return is_subtype(left, right, ignore_tvars) def is_equivalent(a: Type, b: Type, type_parameter_checker: TypeParameterChecker = check_type_parameter, *, ignore_pos_arg_names: bool = False ) -> bool: return ( is_subtype(a, b, type_parameter_checker, ignore_pos_arg_names=ignore_pos_arg_names) and is_subtype(b, a, type_parameter_checker, ignore_pos_arg_names=ignore_pos_arg_names)) class SubtypeVisitor(TypeVisitor[bool]): def __init__(self, right: Type, type_parameter_checker: TypeParameterChecker, *, ignore_pos_arg_names: bool = False, ignore_declared_variance: bool = False) -> None: self.right = right self.check_type_parameter = type_parameter_checker self.ignore_pos_arg_names = ignore_pos_arg_names self.ignore_declared_variance = ignore_declared_variance # visit_x(left) means: is left (which is an instance of X) a subtype of # right? def visit_unbound_type(self, left: UnboundType) -> bool: return True def visit_any(self, left: AnyType) -> bool: return True def visit_none_type(self, left: NoneTyp) -> bool: if experiments.STRICT_OPTIONAL: return (isinstance(self.right, NoneTyp) or is_named_instance(self.right, 'builtins.object') or isinstance(self.right, Instance) and self.right.type.is_protocol and not self.right.type.protocol_members) else: return True def visit_uninhabited_type(self, left: UninhabitedType) -> bool: return True def visit_erased_type(self, left: ErasedType) -> bool: return True def visit_deleted_type(self, left: DeletedType) -> bool: return True def visit_instance(self, left: Instance) -> bool: if left.type.fallback_to_any: return True right = self.right if isinstance(right, TupleType) and right.fallback.type.is_enum: return is_subtype(left, right.fallback) if isinstance(right, Instance): if right.type.is_cached_subtype_check(left, right): return True # NOTE: left.type.mro may be None in quick mode if there # was an error somewhere. if left.type.mro is not None: for base in left.type.mro: # TODO: Also pass recursively ignore_declared_variance if base._promote and is_subtype( base._promote, self.right, self.check_type_parameter, ignore_pos_arg_names=self.ignore_pos_arg_names): right.type.record_subtype_cache_entry(left, right) return True rname = right.type.fullname() # Always try a nominal check if possible, # there might be errors that a user wants to silence *once*. if ((left.type.has_base(rname) or rname == 'builtins.object') and not self.ignore_declared_variance): # Map left type to corresponding right instances. t = map_instance_to_supertype(left, right.type) nominal = all(self.check_type_parameter(lefta, righta, tvar.variance) for lefta, righta, tvar in zip(t.args, right.args, right.type.defn.type_vars)) if nominal: right.type.record_subtype_cache_entry(left, right) return nominal if right.type.is_protocol and is_protocol_implementation(left, right): return True return False if isinstance(right, TypeType): item = right.item if isinstance(item, TupleType): item = item.fallback if is_named_instance(left, 'builtins.type'): return is_subtype(TypeType(AnyType(TypeOfAny.special_form)), right) if left.type.is_metaclass(): if isinstance(item, AnyType): return True if isinstance(item, Instance): # Special-case enum since we don't have better way of expressing it if (is_named_instance(left, 'enum.EnumMeta') and is_named_instance(item, 'enum.Enum')): return True return is_named_instance(item, 'builtins.object') if isinstance(right, CallableType): # Special case: Instance can be a subtype of Callable. call = find_member('__call__', left, left) if call: return is_subtype(call, right) return False else: return False def visit_type_var(self, left: TypeVarType) -> bool: right = self.right if isinstance(right, TypeVarType) and left.id == right.id: return True return is_subtype(left.upper_bound, self.right) def visit_callable_type(self, left: CallableType) -> bool: right = self.right if isinstance(right, CallableType): return is_callable_subtype( left, right, ignore_pos_arg_names=self.ignore_pos_arg_names) elif isinstance(right, Overloaded): return all(is_subtype(left, item, self.check_type_parameter, ignore_pos_arg_names=self.ignore_pos_arg_names) for item in right.items()) elif isinstance(right, Instance): return is_subtype(left.fallback, right, ignore_pos_arg_names=self.ignore_pos_arg_names) elif isinstance(right, TypeType): # This is unsound, we don't check the __init__ signature. return left.is_type_obj() and is_subtype(left.ret_type, right.item) else: return False def visit_tuple_type(self, left: TupleType) -> bool: right = self.right if isinstance(right, Instance): if is_named_instance(right, 'typing.Sized'): return True elif (is_named_instance(right, 'builtins.tuple') or is_named_instance(right, 'typing.Iterable') or is_named_instance(right, 'typing.Container') or is_named_instance(right, 'typing.Sequence') or is_named_instance(right, 'typing.Reversible')): if right.args: iter_type = right.args[0] else: iter_type = AnyType(TypeOfAny.special_form) return all(is_subtype(li, iter_type) for li in left.items) elif is_subtype(left.fallback, right, self.check_type_parameter): return True return False elif isinstance(right, TupleType): if len(left.items) != len(right.items): return False for l, r in zip(left.items, right.items): if not is_subtype(l, r, self.check_type_parameter): return False if not is_subtype(left.fallback, right.fallback, self.check_type_parameter): return False return True else: return False def visit_typeddict_type(self, left: TypedDictType) -> bool: right = self.right if isinstance(right, Instance): return is_subtype(left.fallback, right, self.check_type_parameter) elif isinstance(right, TypedDictType): if not left.names_are_wider_than(right): return False for name, l, r in left.zip(right): if not is_equivalent(l, r, self.check_type_parameter): return False # Non-required key is not compatible with a required key since # indexing may fail unexpectedly if a required key is missing. # Required key is not compatible with a non-required key since # the prior doesn't support 'del' but the latter should support # it. # # NOTE: 'del' support is currently not implemented (#3550). We # don't want to have to change subtyping after 'del' support # lands so here we are anticipating that change. if (name in left.required_keys) != (name in right.required_keys): return False # (NOTE: Fallbacks don't matter.) return True else: return False def visit_overloaded(self, left: Overloaded) -> bool: right = self.right if isinstance(right, Instance): return is_subtype(left.fallback, right) elif isinstance(right, CallableType): for item in left.items(): if is_subtype(item, right, self.check_type_parameter, ignore_pos_arg_names=self.ignore_pos_arg_names): return True return False elif isinstance(right, Overloaded): # TODO: this may be too restrictive if len(left.items()) != len(right.items()): return False for i in range(len(left.items())): if not is_subtype(left.items()[i], right.items()[i], self.check_type_parameter, ignore_pos_arg_names=self.ignore_pos_arg_names): return False return True elif isinstance(right, UnboundType): return True elif isinstance(right, TypeType): # All the items must have the same type object status, so # it's sufficient to query only (any) one of them. # This is unsound, we don't check all the __init__ signatures. return left.is_type_obj() and is_subtype(left.items()[0], right) else: return False def visit_union_type(self, left: UnionType) -> bool: return all(is_subtype(item, self.right, self.check_type_parameter) for item in left.items) def visit_partial_type(self, left: PartialType) -> bool: # This is indeterminate as we don't really know the complete type yet. raise RuntimeError def visit_type_type(self, left: TypeType) -> bool: right = self.right if isinstance(right, TypeType): return is_subtype(left.item, right.item) if isinstance(right, CallableType): # This is unsound, we don't check the __init__ signature. return is_subtype(left.item, right.ret_type) if isinstance(right, Instance): if right.type.fullname() in ['builtins.object', 'builtins.type']: return True item = left.item if isinstance(item, TypeVarType): item = item.upper_bound if isinstance(item, Instance): metaclass = item.type.metaclass_type return metaclass is not None and is_subtype(metaclass, right) return False @contextmanager def pop_on_exit(stack: List[Tuple[Instance, Instance]], left: Instance, right: Instance) -> Iterator[None]: stack.append((left, right)) yield stack.pop() def is_protocol_implementation(left: Instance, right: Instance, proper_subtype: bool = False) -> bool: """Check whether 'left' implements the protocol 'right'. If 'proper_subtype' is True, then check for a proper subtype. Treat recursive protocols by using the 'assuming' structural subtype matrix (in sparse representation, i.e. as a list of pairs (subtype, supertype)), see also comment in nodes.TypeInfo. When we enter a check for classes (A, P), defined as following:: class P(Protocol): def f(self) -> P: ... class A: def f(self) -> A: ... this results in A being a subtype of P without infinite recursion. On every false result, we pop the assumption, thus avoiding an infinite recursion as well. """ assert right.type.is_protocol assuming = right.type.assuming_proper if proper_subtype else right.type.assuming for (l, r) in reversed(assuming): if sametypes.is_same_type(l, left) and sametypes.is_same_type(r, right): return True with pop_on_exit(assuming, left, right): for member in right.type.protocol_members: # nominal subtyping currently ignores '__init__' and '__new__' signatures if member in ('__init__', '__new__'): continue # The third argument below indicates to what self type is bound. # We always bind self to the subtype. (Similarly to nominal types). supertype = find_member(member, right, left) assert supertype is not None subtype = find_member(member, left, left) # Useful for debugging: # print(member, 'of', left, 'has type', subtype) # print(member, 'of', right, 'has type', supertype) if not subtype: return False if not proper_subtype: # Nominal check currently ignores arg names is_compat = is_subtype(subtype, supertype, ignore_pos_arg_names=True) else: is_compat = is_proper_subtype(subtype, supertype) if not is_compat: return False if isinstance(subtype, NoneTyp) and isinstance(supertype, CallableType): # We want __hash__ = None idiom to work even without --strict-optional return False subflags = get_member_flags(member, left.type) superflags = get_member_flags(member, right.type) if IS_SETTABLE in superflags: # Check opposite direction for settable attributes. if not is_subtype(supertype, subtype): return False if (IS_CLASSVAR in subflags) != (IS_CLASSVAR in superflags): return False if IS_SETTABLE in superflags and IS_SETTABLE not in subflags: return False # This rule is copied from nominal check in checker.py if IS_CLASS_OR_STATIC in superflags and IS_CLASS_OR_STATIC not in subflags: return False right.type.record_subtype_cache_entry(left, right, proper_subtype) return True def find_member(name: str, itype: Instance, subtype: Type) -> Optional[Type]: """Find the type of member by 'name' in 'itype's TypeInfo. Fin the member type after applying type arguments from 'itype', and binding 'self' to 'subtype'. Return None if member was not found. """ # TODO: this code shares some logic with checkmember.analyze_member_access, # consider refactoring. info = itype.type method = info.get_method(name) if method: if method.is_property: assert isinstance(method, OverloadedFuncDef) dec = method.items[0] assert isinstance(dec, Decorator) return find_node_type(dec.var, itype, subtype) return find_node_type(method, itype, subtype) else: # don't have such method, maybe variable or decorator? node = info.get(name) if not node: v = None else: v = node.node if isinstance(v, Decorator): v = v.var if isinstance(v, Var): return find_node_type(v, itype, subtype) if not v and name not in ['__getattr__', '__setattr__', '__getattribute__']: for method_name in ('__getattribute__', '__getattr__'): # Normally, mypy assumes that instances that define __getattr__ have all # attributes with the corresponding return type. If this will produce # many false negatives, then this could be prohibited for # structural subtyping. method = info.get_method(method_name) if method and method.info.fullname() != 'builtins.object': getattr_type = find_node_type(method, itype, subtype) if isinstance(getattr_type, CallableType): return getattr_type.ret_type if itype.type.fallback_to_any: return AnyType(TypeOfAny.special_form) return None def get_member_flags(name: str, info: TypeInfo) -> Set[int]: """Detect whether a member 'name' is settable, whether it is an instance or class variable, and whether it is class or static method. The flags are defined as following: * IS_SETTABLE: whether this attribute can be set, not set for methods and non-settable properties; * IS_CLASSVAR: set if the variable is annotated as 'x: ClassVar[t]'; * IS_CLASS_OR_STATIC: set for methods decorated with @classmethod or with @staticmethod. """ method = info.get_method(name) setattr_meth = info.get_method('__setattr__') if method: # this could be settable property if method.is_property: assert isinstance(method, OverloadedFuncDef) dec = method.items[0] assert isinstance(dec, Decorator) if dec.var.is_settable_property or setattr_meth: return {IS_SETTABLE} return set() node = info.get(name) if not node: if setattr_meth: return {IS_SETTABLE} return set() v = node.node if isinstance(v, Decorator): if v.var.is_staticmethod or v.var.is_classmethod: return {IS_CLASS_OR_STATIC} # just a variable if isinstance(v, Var): flags = {IS_SETTABLE} if v.is_classvar: flags.add(IS_CLASSVAR) return flags return set() def find_node_type(node: Union[Var, FuncBase], itype: Instance, subtype: Type) -> Type: """Find type of a variable or method 'node' (maybe also a decorated method). Apply type arguments from 'itype', and bind 'self' to 'subtype'. """ from mypy.checkmember import bind_self if isinstance(node, FuncBase): typ = function_type(node, fallback=Instance(itype.type.mro[-1], [])) # type: Optional[Type] else: typ = node.type if typ is None: return AnyType(TypeOfAny.from_error) # We don't need to bind 'self' for static methods, since there is no 'self'. if isinstance(node, FuncBase) or isinstance(typ, FunctionLike) and not node.is_staticmethod: assert isinstance(typ, FunctionLike) signature = bind_self(typ, subtype) if node.is_property: assert isinstance(signature, CallableType) typ = signature.ret_type else: typ = signature itype = map_instance_to_supertype(itype, node.info) typ = expand_type_by_instance(typ, itype) return typ def non_method_protocol_members(tp: TypeInfo) -> List[str]: """Find all non-callable members of a protocol.""" assert tp.is_protocol result = [] # type: List[str] anytype = AnyType(TypeOfAny.special_form) instance = Instance(tp, [anytype] * len(tp.defn.type_vars)) for member in tp.protocol_members: typ = find_member(member, instance, instance) if not isinstance(typ, CallableType): result.append(member) return result def is_callable_subtype(left: CallableType, right: CallableType, ignore_return: bool = False, ignore_pos_arg_names: bool = False, use_proper_subtype: bool = False) -> bool: """Is left a subtype of right?""" if use_proper_subtype: is_compat = is_proper_subtype else: is_compat = is_subtype # If either function is implicitly typed, ignore positional arg names too if left.implicit or right.implicit: ignore_pos_arg_names = True # Non-type cannot be a subtype of type. if right.is_type_obj() and not left.is_type_obj(): return False # A callable L is a subtype of a generic callable R if L is a # subtype of every type obtained from R by substituting types for # the variables of R. We can check this by simply leaving the # generic variables of R as type variables, effectively varying # over all possible values. # It's okay even if these variables share ids with generic # type variables of L, because generating and solving # constraints for the variables of L to make L a subtype of R # (below) treats type variables on the two sides as independent. if left.variables: # Apply generic type variables away in left via type inference. unified = unify_generic_callable(left, right, ignore_return=ignore_return) if unified is None: return False else: left = unified # Check return types. if not ignore_return and not is_compat(left.ret_type, right.ret_type): return False if right.is_ellipsis_args: return True right_star_type = None # type: Optional[Type] right_star2_type = None # type: Optional[Type] # Match up corresponding arguments and check them for compatibility. In # every pair (argL, argR) of corresponding arguments from L and R, argL must # be "more general" than argR if L is to be a subtype of R. # Arguments are corresponding if they either share a name, share a position, # or both. If L's corresponding argument is ambiguous, L is not a subtype of # R. # If left has one corresponding argument by name and another by position, # consider them to be one "merged" argument (and not ambiguous) if they're # both optional, they're name-only and position-only respectively, and they # have the same type. This rule allows functions with (*args, **kwargs) to # properly stand in for the full domain of formal arguments that they're # used for in practice. # Every argument in R must have a corresponding argument in L, and every # required argument in L must have a corresponding argument in R. done_with_positional = False for i in range(len(right.arg_types)): right_kind = right.arg_kinds[i] if right_kind in (ARG_STAR, ARG_STAR2, ARG_NAMED, ARG_NAMED_OPT): done_with_positional = True right_required = right_kind in (ARG_POS, ARG_NAMED) right_pos = None if done_with_positional else i right_arg = FormalArgument( right.arg_names[i], right_pos, right.arg_types[i], right_required) if right_kind == ARG_STAR: right_star_type = right_arg.typ # Right has an infinite series of optional positional arguments # here. Get all further positional arguments of left, and make sure # they're more general than their corresponding member in this # series. Also make sure left has its own inifite series of # optional positional arguments. if not left.is_var_arg: return False j = i while j < len(left.arg_kinds) and left.arg_kinds[j] in (ARG_POS, ARG_OPT): left_by_position = left.argument_by_position(j) assert left_by_position is not None # This fetches the synthetic argument that's from the *args right_by_position = right.argument_by_position(j) assert right_by_position is not None if not are_args_compatible(left_by_position, right_by_position, ignore_pos_arg_names, use_proper_subtype): return False j += 1 continue if right_kind == ARG_STAR2: right_star2_type = right_arg.typ # Right has an infinite set of optional named arguments here. Get # all further named arguments of left and make sure they're more # general than their corresponding member in this set. Also make # sure left has its own infinite set of optional named arguments. if not left.is_kw_arg: return False left_names = {name for name in left.arg_names if name is not None} right_names = {name for name in right.arg_names if name is not None} left_only_names = left_names - right_names for name in left_only_names: left_by_name = left.argument_by_name(name) assert left_by_name is not None # This fetches the synthetic argument that's from the **kwargs right_by_name = right.argument_by_name(name) assert right_by_name is not None if not are_args_compatible(left_by_name, right_by_name, ignore_pos_arg_names, use_proper_subtype): return False continue # Left must have some kind of corresponding argument. left_arg = left.corresponding_argument(right_arg) if left_arg is None: return False if not are_args_compatible(left_arg, right_arg, ignore_pos_arg_names, use_proper_subtype): return False done_with_positional = False for i in range(len(left.arg_types)): left_kind = left.arg_kinds[i] if left_kind in (ARG_STAR, ARG_STAR2, ARG_NAMED, ARG_NAMED_OPT): done_with_positional = True left_arg = FormalArgument( left.arg_names[i], None if done_with_positional else i, left.arg_types[i], left_kind in (ARG_POS, ARG_NAMED)) # Check that *args and **kwargs types match in this loop if left_kind == ARG_STAR: if right_star_type is not None and not is_compat(right_star_type, left_arg.typ): return False continue elif left_kind == ARG_STAR2: if right_star2_type is not None and not is_compat(right_star2_type, left_arg.typ): return False continue right_by_name = (right.argument_by_name(left_arg.name) if left_arg.name is not None else None) right_by_pos = (right.argument_by_position(left_arg.pos) if left_arg.pos is not None else None) # If the left hand argument corresponds to two right-hand arguments, # neither of them can be required. if (right_by_name is not None and right_by_pos is not None and right_by_name != right_by_pos and (right_by_pos.required or right_by_name.required)): return False # All *required* left-hand arguments must have a corresponding # right-hand argument. Optional args it does not matter. if left_arg.required and right_by_pos is None and right_by_name is None: return False return True def are_args_compatible( left: FormalArgument, right: FormalArgument, ignore_pos_arg_names: bool, use_proper_subtype: bool) -> bool: # If right has a specific name it wants this argument to be, left must # have the same. if right.name is not None and left.name != right.name: # But pay attention to whether we're ignoring positional arg names if not ignore_pos_arg_names or right.pos is None: return False # If right is at a specific position, left must have the same: if right.pos is not None and left.pos != right.pos: return False # Left must have a more general type if use_proper_subtype: if not is_proper_subtype(right.typ, left.typ): return False else: if not is_subtype(right.typ, left.typ): return False # If right's argument is optional, left's must also be. if not right.required and left.required: return False return True def unify_generic_callable(type: CallableType, target: CallableType, ignore_return: bool) -> Optional[CallableType]: """Try to unify a generic callable type with another callable type. Return unified CallableType if successful; otherwise, return None. """ import mypy.solve constraints = [] # type: List[mypy.constraints.Constraint] for arg_type, target_arg_type in zip(type.arg_types, target.arg_types): c = mypy.constraints.infer_constraints( arg_type, target_arg_type, mypy.constraints.SUPERTYPE_OF) constraints.extend(c) if not ignore_return: c = mypy.constraints.infer_constraints( type.ret_type, target.ret_type, mypy.constraints.SUBTYPE_OF) constraints.extend(c) type_var_ids = [tvar.id for tvar in type.variables] inferred_vars = mypy.solve.solve_constraints(type_var_ids, constraints) if None in inferred_vars: return None non_none_inferred_vars = cast(List[Type], inferred_vars) msg = messages.temp_message_builder() applied = mypy.applytype.apply_generic_arguments(type, non_none_inferred_vars, msg, context=target) if msg.is_errors(): return None return applied def restrict_subtype_away(t: Type, s: Type) -> Type: """Return t minus s. If we can't determine a precise result, return a supertype of the ideal result (just t is a valid result). This is used for type inference of runtime type checks such as isinstance. Currently this just removes elements of a union type. """ if isinstance(t, UnionType): # Since runtime type checks will ignore type arguments, erase the types. erased_s = erase_type(s) # TODO: Implement more robust support for runtime isinstance() checks, # see issue #3827 new_items = [item for item in t.relevant_items() if (not (is_proper_subtype(erase_type(item), erased_s) or is_proper_subtype(item, erased_s)) or isinstance(item, AnyType))] return UnionType.make_union(new_items) else: return t def is_proper_subtype(left: Type, right: Type) -> bool: """Is left a proper subtype of right? For proper subtypes, there's no need to rely on compatibility due to Any types. Every usable type is a proper subtype of itself. """ if isinstance(right, UnionType) and not isinstance(left, UnionType): return any([is_proper_subtype(left, item) for item in right.items]) return left.accept(ProperSubtypeVisitor(right)) class ProperSubtypeVisitor(TypeVisitor[bool]): def __init__(self, right: Type) -> None: self.right = right def visit_unbound_type(self, left: UnboundType) -> bool: # This can be called if there is a bad type annotation. The result probably # doesn't matter much but by returning True we simplify these bad types away # from unions, which could filter out some bogus messages. return True def visit_any(self, left: AnyType) -> bool: return isinstance(self.right, AnyType) def visit_none_type(self, left: NoneTyp) -> bool: if experiments.STRICT_OPTIONAL: return (isinstance(self.right, NoneTyp) or is_named_instance(self.right, 'builtins.object')) return True def visit_uninhabited_type(self, left: UninhabitedType) -> bool: return True def visit_erased_type(self, left: ErasedType) -> bool: # This may be encountered during type inference. The result probably doesn't # matter much. return True def visit_deleted_type(self, left: DeletedType) -> bool: return True def visit_instance(self, left: Instance) -> bool: right = self.right if isinstance(right, Instance): if right.type.is_cached_subtype_check(left, right, proper_subtype=True): return True for base in left.type.mro: if base._promote and is_proper_subtype(base._promote, right): right.type.record_subtype_cache_entry(left, right, proper_subtype=True) return True if left.type.has_base(right.type.fullname()): def check_argument(leftarg: Type, rightarg: Type, variance: int) -> bool: if variance == COVARIANT: return is_proper_subtype(leftarg, rightarg) elif variance == CONTRAVARIANT: return is_proper_subtype(rightarg, leftarg) else: return sametypes.is_same_type(leftarg, rightarg) # Map left type to corresponding right instances. left = map_instance_to_supertype(left, right.type) nominal = all(check_argument(ta, ra, tvar.variance) for ta, ra, tvar in zip(left.args, right.args, right.type.defn.type_vars)) if nominal: right.type.record_subtype_cache_entry(left, right, proper_subtype=True) return nominal if (right.type.is_protocol and is_protocol_implementation(left, right, proper_subtype=True)): return True return False if isinstance(right, CallableType): call = find_member('__call__', left, left) if call: return is_proper_subtype(call, right) return False return False def visit_type_var(self, left: TypeVarType) -> bool: if isinstance(self.right, TypeVarType) and left.id == self.right.id: return True # TODO: Value restrictions return is_proper_subtype(left.upper_bound, self.right) def visit_callable_type(self, left: CallableType) -> bool: right = self.right if isinstance(right, CallableType): return is_callable_subtype( left, right, ignore_pos_arg_names=False, use_proper_subtype=True) elif isinstance(right, Overloaded): return all(is_proper_subtype(left, item) for item in right.items()) elif isinstance(right, Instance): return is_proper_subtype(left.fallback, right) elif isinstance(right, TypeType): # This is unsound, we don't check the __init__ signature. return left.is_type_obj() and is_proper_subtype(left.ret_type, right.item) return False def visit_tuple_type(self, left: TupleType) -> bool: right = self.right if isinstance(right, Instance): if (is_named_instance(right, 'builtins.tuple') or is_named_instance(right, 'typing.Iterable') or is_named_instance(right, 'typing.Container') or is_named_instance(right, 'typing.Sequence') or is_named_instance(right, 'typing.Reversible')): if not right.args: return False iter_type = right.args[0] if is_named_instance(right, 'builtins.tuple') and isinstance(iter_type, AnyType): # TODO: We shouldn't need this special case. This is currently needed # for isinstance(x, tuple), though it's unclear why. return True return all(is_proper_subtype(li, iter_type) for li in left.items) return is_proper_subtype(left.fallback, right) elif isinstance(right, TupleType): if len(left.items) != len(right.items): return False for l, r in zip(left.items, right.items): if not is_proper_subtype(l, r): return False return is_proper_subtype(left.fallback, right.fallback) return False def visit_typeddict_type(self, left: TypedDictType) -> bool: right = self.right if isinstance(right, TypedDictType): for name, typ in left.items.items(): if name in right.items and not is_same_type(typ, right.items[name]): return False for name, typ in right.items.items(): if name not in left.items: return False return True return is_proper_subtype(left.fallback, right) def visit_overloaded(self, left: Overloaded) -> bool: # TODO: What's the right thing to do here? return False def visit_union_type(self, left: UnionType) -> bool: return all([is_proper_subtype(item, self.right) for item in left.items]) def visit_partial_type(self, left: PartialType) -> bool: # TODO: What's the right thing to do here? return False def visit_type_type(self, left: TypeType) -> bool: # TODO: Handle metaclasses? right = self.right if isinstance(right, TypeType): # This is unsound, we don't check the __init__ signature. return is_proper_subtype(left.item, right.item) if isinstance(right, CallableType): # This is also unsound because of __init__. return right.is_type_obj() and is_proper_subtype(left.item, right.ret_type) if isinstance(right, Instance): if right.type.fullname() == 'builtins.type': # TODO: Strictly speaking, the type builtins.type is considered equivalent to # Type[Any]. However, this would break the is_proper_subtype check in # conditional_type_map for cases like isinstance(x, type) when the type # of x is Type[int]. It's unclear what's the right way to address this. return True if right.type.fullname() == 'builtins.object': return True return False def is_more_precise(left: Type, right: Type) -> bool: """Check if left is a more precise type than right. A left is a proper subtype of right, left is also more precise than right. Also, if right is Any, left is more precise than right, for any left. """ # TODO Should List[int] be more precise than List[Any]? if isinstance(right, AnyType): return True return is_proper_subtype(left, right) mypy-0.560/mypy/test/0000755€tŠÔÚ€2›s®0000000000013215007242020645 5ustar jukkaDROPBOX\Domain Users00000000000000mypy-0.560/mypy/test/__init__.py0000644€tŠÔÚ€2›s®0000000000013215007205022743 0ustar jukkaDROPBOX\Domain Users00000000000000mypy-0.560/mypy/test/collect.py0000644€tŠÔÚ€2›s®0000000000013215007205022631 0ustar jukkaDROPBOX\Domain Users00000000000000mypy-0.560/mypy/test/config.py0000644€tŠÔÚ€2›s®0000000120113215007206022456 0ustar jukkaDROPBOX\Domain Users00000000000000import os import os.path import typing this_file_dir = os.path.dirname(os.path.realpath(__file__)) PREFIX = os.path.dirname(os.path.dirname(this_file_dir)) # Location of test data files such as test case descriptions. test_data_prefix = os.path.join(PREFIX, 'test-data', 'unit') assert os.path.isdir(test_data_prefix), \ 'Test data prefix ({}) not set correctly'.format(test_data_prefix) # Temp directory used for the temp files created when running test cases. # This is *within* the tempfile.TemporaryDirectory that is chroot'ed per testcase. # It is also hard-coded in numerous places, so don't change it. test_temp_dir = 'tmp' mypy-0.560/mypy/test/data.py0000644€tŠÔÚ€2›s®0000005603213215007206022136 0ustar jukkaDROPBOX\Domain Users00000000000000"""Utilities for processing .test files containing test case descriptions.""" import os.path import os import posixpath import re from os import remove, rmdir import shutil import pytest # type: ignore # no pytest in typeshed from typing import Callable, List, Tuple, Set, Optional, Iterator, Any, Dict, NamedTuple, Union from mypy.myunit import TestCase, SkipTestCaseException from mypy.test.config import test_temp_dir root_dir = os.path.normpath(os.path.join(os.path.dirname(__file__), '..', '..')) # File modify/create operation: copy module contents from source_path. UpdateFile = NamedTuple('UpdateFile', [('module', str), ('source_path', str), ('target_path', str)]) # File delete operation: delete module file. DeleteFile = NamedTuple('DeleteFile', [('module', str), ('path', str)]) FileOperation = Union[UpdateFile, DeleteFile] def parse_test_cases( path: str, perform: Optional[Callable[['DataDrivenTestCase'], None]], base_path: str = '.', optional_out: bool = False, include_path: Optional[str] = None, native_sep: bool = False) -> List['DataDrivenTestCase']: """Parse a file with test case descriptions. Return an array of test cases. NB this function and DataDrivenTestCase are shared between the myunit and pytest codepaths -- if something looks redundant, that's likely the reason. """ if native_sep: join = os.path.join else: join = posixpath.join # type: ignore if not include_path: include_path = os.path.dirname(path) with open(path, encoding='utf-8') as f: lst = f.readlines() for i in range(len(lst)): lst[i] = lst[i].rstrip('\n') p = parse_test_data(lst, path) out = [] # type: List[DataDrivenTestCase] # Process the parsed items. Each item has a header of form [id args], # optionally followed by lines of text. i = 0 while i < len(p): ok = False i0 = i if p[i].id == 'case': i += 1 files = [] # type: List[Tuple[str, str]] # path and contents output_files = [] # type: List[Tuple[str, str]] # path and contents for output files tcout = [] # type: List[str] # Regular output errors tcout2 = {} # type: Dict[int, List[str]] # Output errors for incremental, runs 2+ deleted_paths = {} # type: Dict[int, Set[str]] # from run number of paths stale_modules = {} # type: Dict[int, Set[str]] # from run number to module names rechecked_modules = {} # type: Dict[ int, Set[str]] # from run number module names while i < len(p) and p[i].id != 'case': if p[i].id == 'file' or p[i].id == 'outfile': # Record an extra file needed for the test case. arg = p[i].arg assert arg is not None contents = '\n'.join(p[i].data) contents = expand_variables(contents) file_entry = (join(base_path, arg), contents) if p[i].id == 'file': files.append(file_entry) elif p[i].id == 'outfile': output_files.append(file_entry) elif p[i].id in ('builtins', 'builtins_py2'): # Use an alternative stub file for the builtins module. arg = p[i].arg assert arg is not None mpath = join(os.path.dirname(path), arg) if p[i].id == 'builtins': fnam = 'builtins.pyi' else: # Python 2 fnam = '__builtin__.pyi' with open(mpath) as f: files.append((join(base_path, fnam), f.read())) elif p[i].id == 'typing': # Use an alternative stub file for the typing module. arg = p[i].arg assert arg is not None src_path = join(os.path.dirname(path), arg) with open(src_path) as f: files.append((join(base_path, 'typing.pyi'), f.read())) elif re.match(r'stale[0-9]*$', p[i].id): if p[i].id == 'stale': passnum = 1 else: passnum = int(p[i].id[len('stale'):]) assert passnum > 0 arg = p[i].arg if arg is None: stale_modules[passnum] = set() else: stale_modules[passnum] = {item.strip() for item in arg.split(',')} elif re.match(r'rechecked[0-9]*$', p[i].id): if p[i].id == 'rechecked': passnum = 1 else: passnum = int(p[i].id[len('rechecked'):]) arg = p[i].arg if arg is None: rechecked_modules[passnum] = set() else: rechecked_modules[passnum] = {item.strip() for item in arg.split(',')} elif p[i].id == 'delete': # File to delete during a multi-step test case arg = p[i].arg assert arg is not None m = re.match(r'(.*)\.([0-9]+)$', arg) assert m, 'Invalid delete section: {}'.format(arg) num = int(m.group(2)) assert num >= 2, "Can't delete during step {}".format(num) full = join(base_path, m.group(1)) deleted_paths.setdefault(num, set()).add(full) elif p[i].id == 'out' or p[i].id == 'out1': tcout = p[i].data tcout = [expand_variables(line) for line in tcout] if os.path.sep == '\\': tcout = [fix_win_path(line) for line in tcout] ok = True elif re.match(r'out[0-9]*$', p[i].id): passnum = int(p[i].id[3:]) assert passnum > 1 output = p[i].data output = [expand_variables(line) for line in output] if native_sep and os.path.sep == '\\': output = [fix_win_path(line) for line in output] tcout2[passnum] = output ok = True else: raise ValueError( 'Invalid section header {} in {} at line {}'.format( p[i].id, path, p[i].line)) i += 1 for passnum in stale_modules.keys(): if passnum not in rechecked_modules: # If the set of rechecked modules isn't specified, make it the same as the set # of modules with a stale public interface. rechecked_modules[passnum] = stale_modules[passnum] if (passnum in stale_modules and passnum in rechecked_modules and not stale_modules[passnum].issubset(rechecked_modules[passnum])): raise ValueError( ('Stale modules after pass {} must be a subset of rechecked ' 'modules ({}:{})').format(passnum, path, p[i0].line)) if optional_out: ok = True if ok: input = expand_includes(p[i0].data, include_path) expand_errors(input, tcout, 'main') for file_path, contents in files: expand_errors(contents.split('\n'), tcout, file_path) lastline = p[i].line if i < len(p) else p[i - 1].line + 9999 arg0 = p[i0].arg assert arg0 is not None tc = DataDrivenTestCase(arg0, input, tcout, tcout2, path, p[i0].line, lastline, perform, files, output_files, stale_modules, rechecked_modules, deleted_paths, native_sep) out.append(tc) if not ok: raise ValueError( '{}, line {}: Error in test case description'.format( path, p[i0].line)) return out class DataDrivenTestCase(TestCase): input = None # type: List[str] output = None # type: List[str] # Output for the first pass output2 = None # type: Dict[int, List[str]] # Output for runs 2+, indexed by run number file = '' line = 0 # (file path, file content) tuples files = None # type: List[Tuple[str, str]] expected_stale_modules = None # type: Dict[int, Set[str]] expected_rechecked_modules = None # type: Dict[int, Set[str]] # Files/directories to clean up after test case; (is directory, path) tuples clean_up = None # type: List[Tuple[bool, str]] def __init__(self, name: str, input: List[str], output: List[str], output2: Dict[int, List[str]], file: str, line: int, lastline: int, perform: Optional[Callable[['DataDrivenTestCase'], None]], files: List[Tuple[str, str]], output_files: List[Tuple[str, str]], expected_stale_modules: Dict[int, Set[str]], expected_rechecked_modules: Dict[int, Set[str]], deleted_paths: Dict[int, Set[str]], native_sep: bool = False, ) -> None: super().__init__(name) self.input = input self.output = output self.output2 = output2 self.lastline = lastline self.file = file self.line = line self.perform = perform self.files = files self.output_files = output_files self.expected_stale_modules = expected_stale_modules self.expected_rechecked_modules = expected_rechecked_modules self.deleted_paths = deleted_paths self.native_sep = native_sep def set_up(self) -> None: super().set_up() encountered_files = set() self.clean_up = [] for paths in self.deleted_paths.values(): for path in paths: self.clean_up.append((False, path)) encountered_files.add(path) for path, content in self.files: dir = os.path.dirname(path) for d in self.add_dirs(dir): self.clean_up.append((True, d)) with open(path, 'w') as f: f.write(content) if path not in encountered_files: self.clean_up.append((False, path)) encountered_files.add(path) if re.search(r'\.[2-9]$', path): # Make sure new files introduced in the second and later runs are accounted for renamed_path = path[:-2] if renamed_path not in encountered_files: encountered_files.add(renamed_path) self.clean_up.append((False, renamed_path)) for path, _ in self.output_files: # Create directories for expected output and mark them to be cleaned up at the end # of the test case. dir = os.path.dirname(path) for d in self.add_dirs(dir): self.clean_up.append((True, d)) self.clean_up.append((False, path)) def add_dirs(self, dir: str) -> List[str]: """Add all subdirectories required to create dir. Return an array of the created directories in the order of creation. """ if dir == '' or os.path.isdir(dir): return [] else: dirs = self.add_dirs(os.path.dirname(dir)) + [dir] os.mkdir(dir) return dirs def run(self) -> None: if self.name.endswith('-skip'): raise SkipTestCaseException() else: assert self.perform is not None, 'Tests without `perform` should not be `run`' self.perform(self) def tear_down(self) -> None: # First remove files. for is_dir, path in reversed(self.clean_up): if not is_dir: try: remove(path) except FileNotFoundError: # Breaking early using Ctrl+C may happen before file creation. Also, some # files may be deleted by a test case. pass # Then remove directories. for is_dir, path in reversed(self.clean_up): if is_dir: pycache = os.path.join(path, '__pycache__') if os.path.isdir(pycache): shutil.rmtree(pycache) try: rmdir(path) except OSError as error: print(' ** Error removing directory %s -- contents:' % path) for item in os.listdir(path): print(' ', item) # Most likely, there are some files in the # directory. Use rmtree to nuke the directory, but # fail the test case anyway, since this seems like # a bug in a test case -- we shouldn't leave # garbage lying around. By nuking the directory, # the next test run hopefully passes. path = error.filename # Be defensive -- only call rmtree if we're sure we aren't removing anything # valuable. if path.startswith(test_temp_dir + '/') and os.path.isdir(path): shutil.rmtree(path) raise super().tear_down() def find_steps(self) -> List[List[FileOperation]]: """Return a list of descriptions of file operations for each incremental step. The first list item corresponds to the first incremental step, the second for the second step, etc. Each operation can either be a file modification/creation (UpdateFile) or deletion (DeleteFile). """ steps = {} # type: Dict[int, List[FileOperation]] for path, _ in self.files: m = re.match(r'.*\.([0-9]+)$', path) if m: num = int(m.group(1)) assert num >= 2 target_path = re.sub(r'\.[0-9]+$', '', path) module = module_from_path(target_path) operation = UpdateFile(module, path, target_path) steps.setdefault(num, []).append(operation) for num, paths in self.deleted_paths.items(): assert num >= 2 for path in paths: module = module_from_path(path) steps.setdefault(num, []).append(DeleteFile(module, path)) max_step = max(steps) return [steps[num] for num in range(2, max_step + 1)] def module_from_path(path: str) -> str: path = re.sub(r'\.py$', '', path) # We can have a mix of Unix-style and Windows-style separators. parts = re.split(r'[/\\]', path) assert parts[0] == test_temp_dir del parts[0] module = '.'.join(parts) module = re.sub(r'\.__init__$', '', module) return module class TestItem: """Parsed test caseitem. An item is of the form [id arg] .. data .. """ id = '' arg = '' # type: Optional[str] # Text data, array of 8-bit strings data = None # type: List[str] file = '' line = 0 # Line number in file def __init__(self, id: str, arg: Optional[str], data: List[str], file: str, line: int) -> None: self.id = id self.arg = arg self.data = data self.file = file self.line = line def parse_test_data(l: List[str], fnam: str) -> List[TestItem]: """Parse a list of lines that represent a sequence of test items.""" ret = [] # type: List[TestItem] data = [] # type: List[str] id = None # type: Optional[str] arg = None # type: Optional[str] i = 0 i0 = 0 while i < len(l): s = l[i].strip() if l[i].startswith('[') and s.endswith(']') and not s.startswith('[['): if id: data = collapse_line_continuation(data) data = strip_list(data) ret.append(TestItem(id, arg, strip_list(data), fnam, i0 + 1)) i0 = i id = s[1:-1] arg = None if ' ' in id: arg = id[id.index(' ') + 1:] id = id[:id.index(' ')] data = [] elif l[i].startswith('[['): data.append(l[i][1:]) elif not l[i].startswith('--'): data.append(l[i]) elif l[i].startswith('----'): data.append(l[i][2:]) i += 1 # Process the last item. if id: data = collapse_line_continuation(data) data = strip_list(data) ret.append(TestItem(id, arg, data, fnam, i0 + 1)) return ret def strip_list(l: List[str]) -> List[str]: """Return a stripped copy of l. Strip whitespace at the end of all lines, and strip all empty lines from the end of the array. """ r = [] # type: List[str] for s in l: # Strip spaces at end of line r.append(re.sub(r'\s+$', '', s)) while len(r) > 0 and r[-1] == '': r.pop() return r def collapse_line_continuation(l: List[str]) -> List[str]: r = [] # type: List[str] cont = False for s in l: ss = re.sub(r'\\$', '', s) if cont: r[-1] += re.sub('^ +', '', ss) else: r.append(ss) cont = s.endswith('\\') return r def expand_includes(a: List[str], base_path: str) -> List[str]: """Expand @includes within a list of lines. Replace all lies starting with @include with the contents of the file name following the prefix. Look for the files in base_path. """ res = [] # type: List[str] for s in a: if s.startswith('@include '): fn = s.split(' ', 1)[1].strip() with open(os.path.join(base_path, fn)) as f: res.extend(f.readlines()) else: res.append(s) return res def expand_variables(s: str) -> str: return s.replace('', root_dir) def expand_errors(input: List[str], output: List[str], fnam: str) -> None: """Transform comments such as '# E: message' or '# E:3: message' in input. The result is lines like 'fnam:line: error: message'. """ for i in range(len(input)): # The first in the split things isn't a comment for possible_err_comment in input[i].split(' # ')[1:]: m = re.search( '^([ENW]):((?P\d+):)? (?P.*)$', possible_err_comment.strip()) if m: if m.group(1) == 'E': severity = 'error' elif m.group(1) == 'N': severity = 'note' elif m.group(1) == 'W': severity = 'warning' col = m.group('col') if col is None: output.append( '{}:{}: {}: {}'.format(fnam, i + 1, severity, m.group('message'))) else: output.append('{}:{}:{}: {}: {}'.format( fnam, i + 1, col, severity, m.group('message'))) def fix_win_path(line: str) -> str: r"""Changes Windows paths to Linux paths in error messages. E.g. foo\bar.py -> foo/bar.py. """ line = line.replace(root_dir, root_dir.replace('\\', '/')) m = re.match(r'^([\S/]+):(\d+:)?(\s+.*)', line) if not m: return line else: filename, lineno, message = m.groups() return '{}:{}{}'.format(filename.replace('\\', '/'), lineno or '', message) def fix_cobertura_filename(line: str) -> str: r"""Changes filename paths to Linux paths in Cobertura output files. E.g. filename="pkg\subpkg\a.py" -> filename="pkg/subpkg/a.py". """ m = re.search(r' None: group = parser.getgroup('mypy') group.addoption('--update-data', action='store_true', default=False, help='Update test data to reflect actual output' ' (supported only for certain tests)') # This function name is special to pytest. See # http://doc.pytest.org/en/latest/writing_plugins.html#collection-hooks def pytest_pycollect_makeitem(collector: Any, name: str, obj: Any) -> Any: if not isinstance(obj, type) or not issubclass(obj, DataSuite): return None return MypyDataSuite(name, parent=collector) class MypyDataSuite(pytest.Class): # type: ignore # inheriting from Any def collect(self) -> Iterator['MypyDataCase']: for case in self.obj.cases(): yield MypyDataCase(case.name, self, case) class MypyDataCase(pytest.Item): # type: ignore # inheriting from Any def __init__(self, name: str, parent: MypyDataSuite, obj: DataDrivenTestCase) -> None: self.skip = False if name.endswith('-skip'): self.skip = True name = name[:-len('-skip')] super().__init__(name, parent) self.obj = obj def runtest(self) -> None: if self.skip: pytest.skip() update_data = self.config.getoption('--update-data', False) self.parent.obj(update_data=update_data).run_case(self.obj) def setup(self) -> None: self.obj.set_up() def teardown(self) -> None: self.obj.tear_down() def reportinfo(self) -> Tuple[str, int, str]: return self.obj.file, self.obj.line, self.obj.name def repr_failure(self, excinfo: Any) -> str: if excinfo.errisinstance(SystemExit): # We assume that before doing exit() (which raises SystemExit) we've printed # enough context about what happened so that a stack trace is not useful. # In particular, uncaught exceptions during semantic analysis or type checking # call exit() and they already print out a stack trace. excrepr = excinfo.exconly() else: self.parent._prunetraceback(excinfo) excrepr = excinfo.getrepr(style='short') return "data: {}:{}:\n{}".format(self.obj.file, self.obj.line, excrepr) class DataSuite: def __init__(self, *, update_data: bool) -> None: self.update_data = update_data @classmethod def cases(cls) -> List[DataDrivenTestCase]: return [] def run_case(self, testcase: DataDrivenTestCase) -> None: raise NotImplementedError mypy-0.560/mypy/test/helpers.py0000644€tŠÔÚ€2›s®0000002355413215007205022671 0ustar jukkaDROPBOX\Domain Users00000000000000import os import re import sys import time from typing import List, Dict, Tuple, Callable, Any from mypy import defaults from mypy.myunit import AssertionFailure from mypy.test.data import DataDrivenTestCase # AssertStringArraysEqual displays special line alignment helper messages if # the first different line has at least this many characters, MIN_LINE_LENGTH_FOR_ALIGNMENT = 5 def assert_string_arrays_equal(expected: List[str], actual: List[str], msg: str) -> None: """Assert that two string arrays are equal. Display any differences in a human-readable form. """ actual = clean_up(actual) if actual != expected: num_skip_start = num_skipped_prefix_lines(expected, actual) num_skip_end = num_skipped_suffix_lines(expected, actual) sys.stderr.write('Expected:\n') # If omit some lines at the beginning, indicate it by displaying a line # with '...'. if num_skip_start > 0: sys.stderr.write(' ...\n') # Keep track of the first different line. first_diff = -1 # Display only this many first characters of identical lines. width = 75 for i in range(num_skip_start, len(expected) - num_skip_end): if i >= len(actual) or expected[i] != actual[i]: if first_diff < 0: first_diff = i sys.stderr.write(' {:<45} (diff)'.format(expected[i])) else: e = expected[i] sys.stderr.write(' ' + e[:width]) if len(e) > width: sys.stderr.write('...') sys.stderr.write('\n') if num_skip_end > 0: sys.stderr.write(' ...\n') sys.stderr.write('Actual:\n') if num_skip_start > 0: sys.stderr.write(' ...\n') for j in range(num_skip_start, len(actual) - num_skip_end): if j >= len(expected) or expected[j] != actual[j]: sys.stderr.write(' {:<45} (diff)'.format(actual[j])) else: a = actual[j] sys.stderr.write(' ' + a[:width]) if len(a) > width: sys.stderr.write('...') sys.stderr.write('\n') if actual == []: sys.stderr.write(' (empty)\n') if num_skip_end > 0: sys.stderr.write(' ...\n') sys.stderr.write('\n') if first_diff >= 0 and first_diff < len(actual) and ( len(expected[first_diff]) >= MIN_LINE_LENGTH_FOR_ALIGNMENT or len(actual[first_diff]) >= MIN_LINE_LENGTH_FOR_ALIGNMENT): # Display message that helps visualize the differences between two # long lines. show_align_message(expected[first_diff], actual[first_diff]) raise AssertionFailure(msg) def update_testcase_output(testcase: DataDrivenTestCase, output: List[str]) -> None: assert testcase.old_cwd is not None, "test was not properly set up" testcase_path = os.path.join(testcase.old_cwd, testcase.file) with open(testcase_path) as f: data_lines = f.read().splitlines() test = '\n'.join(data_lines[testcase.line:testcase.lastline]) mapping = {} # type: Dict[str, List[str]] for old, new in zip(testcase.output, output): PREFIX = 'error:' ind = old.find(PREFIX) if ind != -1 and old[:ind] == new[:ind]: old, new = old[ind + len(PREFIX):], new[ind + len(PREFIX):] mapping.setdefault(old, []).append(new) for old in mapping: if test.count(old) == len(mapping[old]): betweens = test.split(old) # Interleave betweens and mapping[old] from itertools import chain interleaved = [betweens[0]] + \ list(chain.from_iterable(zip(mapping[old], betweens[1:]))) test = ''.join(interleaved) data_lines[testcase.line:testcase.lastline] = [test] data = '\n'.join(data_lines) with open(testcase_path, 'w') as f: print(data, file=f) def show_align_message(s1: str, s2: str) -> None: """Align s1 and s2 so that the their first difference is highlighted. For example, if s1 is 'foobar' and s2 is 'fobar', display the following lines: E: foobar A: fobar ^ If s1 and s2 are long, only display a fragment of the strings around the first difference. If s1 is very short, do nothing. """ # Seeing what went wrong is trivial even without alignment if the expected # string is very short. In this case do nothing to simplify output. if len(s1) < 4: return maxw = 72 # Maximum number of characters shown sys.stderr.write('Alignment of first line difference:\n') trunc = False while s1[:30] == s2[:30]: s1 = s1[10:] s2 = s2[10:] trunc = True if trunc: s1 = '...' + s1 s2 = '...' + s2 max_len = max(len(s1), len(s2)) extra = '' if max_len > maxw: extra = '...' # Write a chunk of both lines, aligned. sys.stderr.write(' E: {}{}\n'.format(s1[:maxw], extra)) sys.stderr.write(' A: {}{}\n'.format(s2[:maxw], extra)) # Write an indicator character under the different columns. sys.stderr.write(' ') for j in range(min(maxw, max(len(s1), len(s2)))): if s1[j:j + 1] != s2[j:j + 1]: sys.stderr.write('^') # Difference break else: sys.stderr.write(' ') # Equal sys.stderr.write('\n') def assert_string_arrays_equal_wildcards(expected: List[str], actual: List[str], msg: str) -> None: # Like above, but let a line with only '...' in expected match any number # of lines in actual. actual = clean_up(actual) while actual != [] and actual[-1] == '': actual = actual[:-1] # Expand "..." wildcards away. expected = match_array(expected, actual) assert_string_arrays_equal(expected, actual, msg) def clean_up(a: List[str]) -> List[str]: """Remove common directory prefix from all strings in a. This uses a naive string replace; it seems to work well enough. Also remove trailing carriage returns. """ res = [] for s in a: prefix = os.sep ss = s for p in prefix, prefix.replace(os.sep, '/'): if p != '/' and p != '//' and p != '\\' and p != '\\\\': ss = ss.replace(p, '') # Ignore spaces at end of line. ss = re.sub(' +$', '', ss) res.append(re.sub('\\r$', '', ss)) return res def match_array(pattern: List[str], target: List[str]) -> List[str]: """Expand '...' wildcards in pattern by matching against target.""" res = [] # type: List[str] i = 0 j = 0 while i < len(pattern): if pattern[i] == '...': # Wildcard in pattern. if i + 1 == len(pattern): # Wildcard at end of pattern; match the rest of target. res.extend(target[j:]) # Finished. break else: # Must find the instance of the next pattern line in target. jj = j while jj < len(target): if target[jj] == pattern[i + 1]: break jj += 1 if jj == len(target): # No match. Get out. res.extend(pattern[i:]) break res.extend(target[j:jj]) i += 1 j = jj elif (j < len(target) and (pattern[i] == target[j] or (i + 1 < len(pattern) and j + 1 < len(target) and pattern[i + 1] == target[j + 1]))): # In sync; advance one line. The above condition keeps sync also if # only a single line is different, but loses it if two consecutive # lines fail to match. res.append(pattern[i]) i += 1 j += 1 else: # Out of sync. Get out. res.extend(pattern[i:]) break return res def num_skipped_prefix_lines(a1: List[str], a2: List[str]) -> int: num_eq = 0 while num_eq < min(len(a1), len(a2)) and a1[num_eq] == a2[num_eq]: num_eq += 1 return max(0, num_eq - 4) def num_skipped_suffix_lines(a1: List[str], a2: List[str]) -> int: num_eq = 0 while (num_eq < min(len(a1), len(a2)) and a1[-num_eq - 1] == a2[-num_eq - 1]): num_eq += 1 return max(0, num_eq - 4) def testfile_pyversion(path: str) -> Tuple[int, int]: if path.endswith('python2.test'): return defaults.PYTHON2_VERSION else: return defaults.PYTHON3_VERSION def testcase_pyversion(path: str, testcase_name: str) -> Tuple[int, int]: if testcase_name.endswith('python2'): return defaults.PYTHON2_VERSION else: return testfile_pyversion(path) def normalize_error_messages(messages: List[str]) -> List[str]: """Translate an array of error messages to use / as path separator.""" a = [] for m in messages: a.append(m.replace(os.sep, '/')) return a def retry_on_error(func: Callable[[], Any], max_wait: float = 1.0) -> None: """Retry callback with exponential backoff when it raises OSError. If the function still generates an error after max_wait seconds, propagate the exception. This can be effective against random file system operation failures on Windows. """ t0 = time.time() wait_time = 0.01 while True: try: func() return except OSError: wait_time = min(wait_time * 2, t0 + max_wait - time.time()) if wait_time <= 0.01: # Done enough waiting, the error seems persistent. raise time.sleep(wait_time) mypy-0.560/mypy/test/testargs.py0000644€tŠÔÚ€2›s®0000000111113215007206023045 0ustar jukkaDROPBOX\Domain Users00000000000000"""Ensure the argparse parser and Options class are in sync. In particular, verify that the argparse defaults are the same as the Options defaults, and that argparse doesn't assign any new members to the Options object it creates. """ import typing from mypy.myunit import Suite, assert_equal from mypy.options import Options, BuildType from mypy.main import process_options class ArgSuite(Suite): def test_coherence(self) -> None: options = Options() _, parsed_options = process_options([], require_targets=False) assert_equal(options, parsed_options) mypy-0.560/mypy/test/testcheck.py0000644€tŠÔÚ€2›s®0000003635713215007206023212 0ustar jukkaDROPBOX\Domain Users00000000000000"""Type checker test cases""" import os import re import shutil import sys import time import typed_ast from typing import Dict, List, Optional, Set, Tuple from mypy import build, defaults from mypy.main import process_options from mypy.build import BuildSource, find_module_clear_caches from mypy.myunit import AssertionFailure from mypy.test.config import test_temp_dir, test_data_prefix from mypy.test.data import parse_test_cases, DataDrivenTestCase, DataSuite from mypy.test.helpers import ( assert_string_arrays_equal, normalize_error_messages, retry_on_error, testcase_pyversion, update_testcase_output, ) from mypy.errors import CompileError from mypy.options import Options from mypy import experiments # List of files that contain test case descriptions. files = [ 'check-basic.test', 'check-callable.test', 'check-classes.test', 'check-statements.test', 'check-generics.test', 'check-dynamic-typing.test', 'check-inference.test', 'check-inference-context.test', 'check-kwargs.test', 'check-overloading.test', 'check-type-checks.test', 'check-abstract.test', 'check-multiple-inheritance.test', 'check-super.test', 'check-modules.test', 'check-typevar-values.test', 'check-unsupported.test', 'check-unreachable-code.test', 'check-unions.test', 'check-isinstance.test', 'check-lists.test', 'check-namedtuple.test', 'check-typeddict.test', 'check-type-aliases.test', 'check-ignore.test', 'check-type-promotion.test', 'check-semanal-error.test', 'check-flags.test', 'check-incremental.test', 'check-serialize.test', 'check-bound.test', 'check-optional.test', 'check-fastparse.test', 'check-warnings.test', 'check-async-await.test', 'check-newtype.test', 'check-class-namedtuple.test', 'check-selftype.test', 'check-python2.test', 'check-columns.test', 'check-functions.test', 'check-tuples.test', 'check-expressions.test', 'check-generic-subtyping.test', 'check-varargs.test', 'check-newsyntax.test', 'check-protocols.test', 'check-underscores.test', 'check-classvar.test', 'check-enum.test', 'check-incomplete-fixture.test', 'check-custom-plugin.test', 'check-default-plugin.test', ] class TypeCheckSuite(DataSuite): @classmethod def cases(cls) -> List[DataDrivenTestCase]: c = [] # type: List[DataDrivenTestCase] for f in files: c += parse_test_cases(os.path.join(test_data_prefix, f), None, test_temp_dir, True) return c def run_case(self, testcase: DataDrivenTestCase) -> None: incremental = ('incremental' in testcase.name.lower() or 'incremental' in testcase.file or 'serialize' in testcase.file) optional = 'optional' in testcase.file old_strict_optional = experiments.STRICT_OPTIONAL try: if incremental: # Incremental tests are run once with a cold cache, once with a warm cache. # Expect success on first run, errors from testcase.output (if any) on second run. # We briefly sleep to make sure file timestamps are distinct. self.clear_cache() num_steps = max([2] + list(testcase.output2.keys())) # Check that there are no file changes beyond the last run (they would be ignored). for dn, dirs, files in os.walk(os.curdir): for file in files: m = re.search(r'\.([2-9])$', file) if m and int(m.group(1)) > num_steps: raise ValueError( 'Output file {} exists though test case only has {} runs'.format( file, num_steps)) for step in range(1, num_steps + 1): self.run_case_once(testcase, step) elif optional: experiments.STRICT_OPTIONAL = True self.run_case_once(testcase) else: self.run_case_once(testcase) finally: experiments.STRICT_OPTIONAL = old_strict_optional def clear_cache(self) -> None: dn = defaults.CACHE_DIR if os.path.exists(dn): shutil.rmtree(dn) def run_case_once(self, testcase: DataDrivenTestCase, incremental_step: int = 0) -> None: find_module_clear_caches() original_program_text = '\n'.join(testcase.input) module_data = self.parse_module(original_program_text, incremental_step) if incremental_step: if incremental_step == 1: # In run 1, copy program text to program file. for module_name, program_path, program_text in module_data: if module_name == '__main__': with open(program_path, 'w') as f: f.write(program_text) break elif incremental_step > 1: # In runs 2+, copy *.[num] files to * files. for dn, dirs, files in os.walk(os.curdir): for file in files: if file.endswith('.' + str(incremental_step)): full = os.path.join(dn, file) target = full[:-2] # Use retries to work around potential flakiness on Windows (AppVeyor). retry_on_error(lambda: shutil.copy(full, target)) # In some systems, mtime has a resolution of 1 second which can cause # annoying-to-debug issues when a file has the same size after a # change. We manually set the mtime to circumvent this. new_time = os.stat(target).st_mtime + 1 os.utime(target, times=(new_time, new_time)) # Delete files scheduled to be deleted in [delete .num] sections. for path in testcase.deleted_paths.get(incremental_step, set()): # Use retries to work around potential flakiness on Windows (AppVeyor). retry_on_error(lambda: os.remove(path)) # Parse options after moving files (in case mypy.ini is being moved). options = self.parse_options(original_program_text, testcase, incremental_step) options.use_builtins_fixtures = True options.show_traceback = True if 'optional' in testcase.file: options.strict_optional = True if incremental_step: options.incremental = True else: options.cache_dir = os.devnull # Don't waste time writing cache sources = [] for module_name, program_path, program_text in module_data: # Always set to none so we're forced to reread the module in incremental mode sources.append(BuildSource(program_path, module_name, None if incremental_step else program_text)) res = None try: res = build.build(sources=sources, options=options, alt_lib_path=test_temp_dir) a = res.errors except CompileError as e: a = e.messages a = normalize_error_messages(a) # Make sure error messages match if incremental_step == 0: # Not incremental msg = 'Unexpected type checker output ({}, line {})' output = testcase.output elif incremental_step == 1: msg = 'Unexpected type checker output in incremental, run 1 ({}, line {})' output = testcase.output elif incremental_step > 1: msg = ('Unexpected type checker output in incremental, run {}'.format( incremental_step) + ' ({}, line {})') output = testcase.output2.get(incremental_step, []) else: raise AssertionError() if output != a and self.update_data: update_testcase_output(testcase, a) assert_string_arrays_equal(output, a, msg.format(testcase.file, testcase.line)) if incremental_step and res: if options.follow_imports == 'normal' and testcase.output is None: self.verify_cache(module_data, a, res.manager) if incremental_step > 1: suffix = '' if incremental_step == 2 else str(incremental_step - 1) self.check_module_equivalence( 'rechecked' + suffix, testcase.expected_rechecked_modules.get(incremental_step - 1), res.manager.rechecked_modules) self.check_module_equivalence( 'stale' + suffix, testcase.expected_stale_modules.get(incremental_step - 1), res.manager.stale_modules) def check_module_equivalence(self, name: str, expected: Optional[Set[str]], actual: Set[str]) -> None: if expected is not None: expected_normalized = sorted(expected) actual_normalized = sorted(actual.difference({"__main__"})) assert_string_arrays_equal( expected_normalized, actual_normalized, ('Actual modules ({}) do not match expected modules ({}) ' 'for "[{} ...]"').format( ', '.join(actual_normalized), ', '.join(expected_normalized), name)) def verify_cache(self, module_data: List[Tuple[str, str, str]], a: List[str], manager: build.BuildManager) -> None: # There should be valid cache metadata for each module except # those in error_paths; for those there should not be. # # NOTE: When A imports B and there's an error in B, the cache # data for B is invalidated, but the cache data for A remains. # However build.process_graphs() will ignore A's cache data. # # Also note that when A imports B, and there's an error in A # _due to a valid change in B_, the cache data for B will be # invalidated and updated, but the old cache data for A will # remain unchanged. As before, build.process_graphs() will # ignore A's (old) cache data. error_paths = self.find_error_paths(a) modules = self.find_module_files() modules.update({module_name: path for module_name, path, text in module_data}) missing_paths = self.find_missing_cache_files(modules, manager) if not missing_paths.issubset(error_paths): raise AssertionFailure("cache data discrepancy %s != %s" % (missing_paths, error_paths)) def find_error_paths(self, a: List[str]) -> Set[str]: hits = set() for line in a: m = re.match(r'([^\s:]+):\d+: error:', line) if m: # Normalize to Linux paths. p = m.group(1).replace(os.path.sep, '/') hits.add(p) return hits def find_module_files(self) -> Dict[str, str]: modules = {} for dn, dirs, files in os.walk(test_temp_dir): dnparts = dn.split(os.sep) assert dnparts[0] == test_temp_dir del dnparts[0] for file in files: if file.endswith('.py'): if file == "__init__.py": # If the file path is `a/b/__init__.py`, exclude the file name # and make sure the module id is just `a.b`, not `a.b.__init__`. id = '.'.join(dnparts) else: base, ext = os.path.splitext(file) id = '.'.join(dnparts + [base]) modules[id] = os.path.join(dn, file) return modules def find_missing_cache_files(self, modules: Dict[str, str], manager: build.BuildManager) -> Set[str]: ignore_errors = True missing = {} for id, path in modules.items(): meta = build.find_cache_meta(id, path, manager) if not build.validate_meta(meta, id, path, ignore_errors, manager): missing[id] = path return set(missing.values()) def parse_module(self, program_text: str, incremental_step: int = 0) -> List[Tuple[str, str, str]]: """Return the module and program names for a test case. Normally, the unit tests will parse the default ('__main__') module and follow all the imports listed there. You can override this behavior and instruct the tests to check multiple modules by using a comment like this in the test case input: # cmd: mypy -m foo.bar foo.baz You can also use `# cmdN:` to have a different cmd for incremental step N (2, 3, ...). Return a list of tuples (module name, file name, program text). """ m = re.search('# cmd: mypy -m ([a-zA-Z0-9_. ]+)$', program_text, flags=re.MULTILINE) regex = '# cmd{}: mypy -m ([a-zA-Z0-9_. ]+)$'.format(incremental_step) alt_m = re.search(regex, program_text, flags=re.MULTILINE) if alt_m is not None and incremental_step > 1: # Optionally return a different command if in a later step # of incremental mode, otherwise default to reusing the # original cmd. m = alt_m if m: # The test case wants to use a non-default main # module. Look up the module and give it as the thing to # analyze. module_names = m.group(1) out = [] for module_name in module_names.split(' '): path = build.find_module(module_name, [test_temp_dir]) assert path is not None, "Can't find ad hoc case file" with open(path) as f: program_text = f.read() out.append((module_name, path, program_text)) return out else: return [('__main__', 'main', program_text)] def parse_options(self, program_text: str, testcase: DataDrivenTestCase, incremental_step: int) -> Options: options = Options() flags = re.search('# flags: (.*)$', program_text, flags=re.MULTILINE) if incremental_step > 1: flags2 = re.search('# flags{}: (.*)$'.format(incremental_step), program_text, flags=re.MULTILINE) if flags2: flags = flags2 flag_list = None if flags: flag_list = flags.group(1).split() targets, options = process_options(flag_list, require_targets=False) if targets: # TODO: support specifying targets via the flags pragma raise RuntimeError('Specifying targets via the flags pragma is not supported.') else: options = Options() # Allow custom python version to override testcase_pyversion if (not flag_list or all(flag not in flag_list for flag in ['--python-version', '-2', '--py2'])): options.python_version = testcase_pyversion(testcase.file, testcase.name) return options mypy-0.560/mypy/test/testcmdline.py0000644€tŠÔÚ€2›s®0000001065413215007206023540 0ustar jukkaDROPBOX\Domain Users00000000000000"""Test cases for the command line. To begin we test that "mypy [/]" always recurses down the whole tree. """ import os import re import subprocess import sys from typing import Tuple, List, Dict, Set from mypy.myunit import Suite, SkipTestCaseException, AssertionFailure from mypy.test.config import test_data_prefix, test_temp_dir from mypy.test.data import fix_cobertura_filename from mypy.test.data import parse_test_cases, DataDrivenTestCase, DataSuite from mypy.test.helpers import assert_string_arrays_equal, normalize_error_messages from mypy.version import __version__, base_version # Path to Python 3 interpreter python3_path = sys.executable # Files containing test case descriptions. cmdline_files = [ 'cmdline.test', 'reports.test', ] class PythonEvaluationSuite(DataSuite): @classmethod def cases(cls) -> List[DataDrivenTestCase]: c = [] # type: List[DataDrivenTestCase] for f in cmdline_files: c += parse_test_cases(os.path.join(test_data_prefix, f), test_python_evaluation, base_path=test_temp_dir, optional_out=True, native_sep=True) return c def run_case(self, testcase: DataDrivenTestCase) -> None: test_python_evaluation(testcase) def test_python_evaluation(testcase: DataDrivenTestCase) -> None: assert testcase.old_cwd is not None, "test was not properly set up" # Write the program to a file. program = '_program.py' program_path = os.path.join(test_temp_dir, program) with open(program_path, 'w') as file: for s in testcase.input: file.write('{}\n'.format(s)) args = parse_args(testcase.input[0]) args.append('--show-traceback') # Type check the program. fixed = [python3_path, os.path.join(testcase.old_cwd, 'scripts', 'mypy')] process = subprocess.Popen(fixed + args, stdout=subprocess.PIPE, stderr=subprocess.STDOUT, cwd=test_temp_dir) outb = process.stdout.read() # Split output into lines. out = [s.rstrip('\n\r') for s in str(outb, 'utf8').splitlines()] # Remove temp file. os.remove(program_path) # Compare actual output to expected. if testcase.output_files: for path, expected_content in testcase.output_files: if not os.path.exists(path): raise AssertionFailure( 'Expected file {} was not produced by test case'.format(path)) with open(path, 'r') as output_file: actual_output_content = output_file.read().splitlines() normalized_output = normalize_file_output(actual_output_content, os.path.abspath(test_temp_dir)) if testcase.native_sep and os.path.sep == '\\': normalized_output = [fix_cobertura_filename(line) for line in normalized_output] normalized_output = normalize_error_messages(normalized_output) assert_string_arrays_equal(expected_content.splitlines(), normalized_output, 'Output file {} did not match its expected output'.format( path)) else: out = normalize_error_messages(out) assert_string_arrays_equal(testcase.output, out, 'Invalid output ({}, line {})'.format( testcase.file, testcase.line)) def parse_args(line: str) -> List[str]: """Parse the first line of the program for the command line. This should have the form # cmd: mypy For example: # cmd: mypy pkg/ """ m = re.match('# cmd: mypy (.*)$', line) if not m: return [] # No args; mypy will spit out an error. return m.group(1).split() def normalize_file_output(content: List[str], current_abs_path: str) -> List[str]: """Normalize file output for comparison.""" timestamp_regex = re.compile('\d{10}') result = [x.replace(current_abs_path, '$PWD') for x in content] result = [re.sub(r'\b' + re.escape(__version__) + r'\b', '$VERSION', x) for x in result] result = [re.sub(r'\b' + re.escape(base_version) + r'\b', '$VERSION', x) for x in result] result = [timestamp_regex.sub('$TIMESTAMP', x) for x in result] return result mypy-0.560/mypy/test/testdeps.py0000644€tŠÔÚ€2›s®0000000574513215007206023065 0ustar jukkaDROPBOX\Domain Users00000000000000"""Test cases for generating node-level dependencies (for fine-grained incremental checking)""" import os from typing import List, Tuple, Dict, Optional from mypy import build, defaults from mypy.build import BuildSource from mypy.errors import CompileError from mypy.nodes import MypyFile, Expression from mypy.options import Options from mypy.server.deps import get_dependencies from mypy.test.config import test_temp_dir, test_data_prefix from mypy.test.data import parse_test_cases, DataDrivenTestCase, DataSuite from mypy.test.helpers import assert_string_arrays_equal from mypy.types import Type files = [ 'deps.test', 'deps-types.test', 'deps-generics.test', 'deps-expressions.test', 'deps-statements.test', 'deps-classes.test', ] class GetDependenciesSuite(DataSuite): @classmethod def cases(cls) -> List[DataDrivenTestCase]: c = [] # type: List[DataDrivenTestCase] for f in files: c += parse_test_cases(os.path.join(test_data_prefix, f), None, test_temp_dir, True) return c def run_case(self, testcase: DataDrivenTestCase) -> None: src = '\n'.join(testcase.input) if testcase.name.endswith('python2'): python_version = defaults.PYTHON2_VERSION else: python_version = defaults.PYTHON3_VERSION messages, files, type_map = self.build(src, python_version) a = messages if files is None or type_map is None: if not a: a = ['Unknown compile error (likely syntax error in test case or fixture)'] else: deps = get_dependencies(files['__main__'], type_map, python_version) for source, targets in sorted(deps.items()): line = '%s -> %s' % (source, ', '.join(sorted(targets))) # Clean up output a bit line = line.replace('__main__', 'm') a.append(line) assert_string_arrays_equal( testcase.output, a, 'Invalid output ({}, line {})'.format(testcase.file, testcase.line)) def build(self, source: str, python_version: Tuple[int, int]) -> Tuple[List[str], Optional[Dict[str, MypyFile]], Optional[Dict[Expression, Type]]]: options = Options() options.use_builtins_fixtures = True options.show_traceback = True options.cache_dir = os.devnull options.python_version = python_version try: result = build.build(sources=[BuildSource('main', None, source)], options=options, alt_lib_path=test_temp_dir) except CompileError as e: # TODO: Should perhaps not return None here. return e.messages, None, None return result.errors, result.files, result.types mypy-0.560/mypy/test/testdiff.py0000644€tŠÔÚ€2›s®0000000516713215007206023040 0ustar jukkaDROPBOX\Domain Users00000000000000"""Test cases for AST diff (used for fine-grained incremental checking)""" import os from typing import List, Tuple, Dict, Optional from mypy import build from mypy.build import BuildSource from mypy.errors import CompileError from mypy.nodes import MypyFile from mypy.options import Options from mypy.server.astdiff import snapshot_symbol_table, compare_symbol_table_snapshots from mypy.test.config import test_temp_dir, test_data_prefix from mypy.test.data import parse_test_cases, DataDrivenTestCase, DataSuite from mypy.test.helpers import assert_string_arrays_equal files = [ 'diff.test' ] class ASTDiffSuite(DataSuite): @classmethod def cases(cls) -> List[DataDrivenTestCase]: c = [] # type: List[DataDrivenTestCase] for f in files: c += parse_test_cases(os.path.join(test_data_prefix, f), None, test_temp_dir, True) return c def run_case(self, testcase: DataDrivenTestCase) -> None: first_src = '\n'.join(testcase.input) files_dict = dict(testcase.files) second_src = files_dict['tmp/next.py'] messages1, files1 = self.build(first_src) messages2, files2 = self.build(second_src) a = [] if messages1: a.extend(messages1) if messages2: a.append('== next ==') a.extend(messages2) assert files1 is not None and files2 is not None, ('cases where CompileError' ' occurred should not be run') prefix = '__main__' snapshot1 = snapshot_symbol_table(prefix, files1['__main__'].names) snapshot2 = snapshot_symbol_table(prefix, files2['__main__'].names) diff = compare_symbol_table_snapshots(prefix, snapshot1, snapshot2) for trigger in sorted(diff): a.append(trigger) assert_string_arrays_equal( testcase.output, a, 'Invalid output ({}, line {})'.format(testcase.file, testcase.line)) def build(self, source: str) -> Tuple[List[str], Optional[Dict[str, MypyFile]]]: options = Options() options.use_builtins_fixtures = True options.show_traceback = True options.cache_dir = os.devnull try: result = build.build(sources=[BuildSource('main', None, source)], options=options, alt_lib_path=test_temp_dir) except CompileError as e: # TODO: Is it okay to return None? return e.messages, None return result.errors, result.files mypy-0.560/mypy/test/testdmypy.py0000644€tŠÔÚ€2›s®0000003304413215007206023265 0ustar jukkaDROPBOX\Domain Users00000000000000"""Type checker test cases""" import os import re import shutil import sys import time import typed_ast from typing import Dict, List, Optional, Set, Tuple from mypy import build from mypy import defaults from mypy.main import process_options from mypy.myunit import AssertionFailure from mypy.test.config import test_temp_dir, test_data_prefix from mypy.test.data import parse_test_cases, DataDrivenTestCase, DataSuite from mypy.test.helpers import ( assert_string_arrays_equal, normalize_error_messages, retry_on_error, testcase_pyversion, update_testcase_output, ) from mypy.errors import CompileError from mypy.options import Options from mypy import experiments from mypy import dmypy from mypy import dmypy_server # List of files that contain test case descriptions. files = [ 'check-enum.test', 'check-incremental.test', 'check-newtype.test', ] class TypeCheckSuite(DataSuite): @classmethod def cases(cls) -> List[DataDrivenTestCase]: if sys.platform == 'win32': return [] # Nothing here works on Windows. c = [] # type: List[DataDrivenTestCase] for f in files: tc = parse_test_cases(os.path.join(test_data_prefix, f), None, test_temp_dir, True) c += [case for case in tc if cls.has_stable_flags(case) and cls.is_incremental(case)] return c def run_case(self, testcase: DataDrivenTestCase) -> None: assert self.is_incremental(testcase), "Testcase is not incremental" assert self.has_stable_flags(testcase), "Testcase has varying flags" # All tests run once with a cold cache, then at least once # with a warm cache and maybe changed files. Expected output # is specified separately for each run. self.clear_cache() num_steps = max([2] + list(testcase.output2.keys())) # Check that there are no file changes beyond the last run (they would be ignored). for dn, dirs, files in os.walk(os.curdir): for file in files: m = re.search(r'\.([2-9])$', file) if m and int(m.group(1)) > num_steps: raise ValueError( 'Output file {} exists though test case only has {} runs'.format( file, num_steps)) self.server = None # type: Optional[dmypy_server.Server] for step in range(1, num_steps + 1): self.run_case_once(testcase, step) @classmethod def is_incremental(cls, testcase: DataDrivenTestCase) -> bool: return 'incremental' in testcase.name.lower() or 'incremental' in testcase.file @classmethod def has_stable_flags(cls, testcase: DataDrivenTestCase) -> bool: if any(re.match(r'# flags[2-9]:', line) for line in testcase.input): return False for filename, contents in testcase.files: if os.path.basename(filename).startswith('mypy.ini.'): return False return True def clear_cache(self) -> None: dn = defaults.CACHE_DIR if os.path.exists(dn): shutil.rmtree(dn) def run_case_once(self, testcase: DataDrivenTestCase, incremental_step: int) -> None: assert incremental_step >= 1 build.find_module_clear_caches() original_program_text = '\n'.join(testcase.input) module_data = self.parse_module(original_program_text, incremental_step) if incremental_step == 1: # In run 1, copy program text to program file. for module_name, program_path, program_text in module_data: if module_name == '__main__': with open(program_path, 'w') as f: f.write(program_text) break elif incremental_step > 1: # In runs 2+, copy *.[num] files to * files. for dn, dirs, files in os.walk(os.curdir): for file in files: if file.endswith('.' + str(incremental_step)): full = os.path.join(dn, file) target = full[:-2] # Use retries to work around potential flakiness on Windows (AppVeyor). retry_on_error(lambda: shutil.copy(full, target)) # In some systems, mtime has a resolution of 1 second which can cause # annoying-to-debug issues when a file has the same size after a # change. We manually set the mtime to circumvent this. new_time = os.stat(target).st_mtime + 1 os.utime(target, times=(new_time, new_time)) # Delete files scheduled to be deleted in [delete .num] sections. for path in testcase.deleted_paths.get(incremental_step, set()): # Use retries to work around potential flakiness on Windows (AppVeyor). retry_on_error(lambda: os.remove(path)) # Parse options after moving files (in case mypy.ini is being moved). options = self.parse_options(original_program_text, testcase, incremental_step) if incremental_step == 1: self.server = dmypy_server.Server([]) # TODO: Fix ugly API self.server.options = options assert self.server is not None # Set in step 1 and survives into next steps sources = [] for module_name, program_path, program_text in module_data: # Always set to none so we're forced to reread the module in incremental mode sources.append(build.BuildSource(program_path, module_name, None)) response = self.server.check(sources, alt_lib_path=test_temp_dir) a = (response['out'] or response['err']).splitlines() a = normalize_error_messages(a) # Make sure error messages match if incremental_step == 1: msg = 'Unexpected type checker output in incremental, run 1 ({}, line {})' output = testcase.output elif incremental_step > 1: msg = ('Unexpected type checker output in incremental, run {}'.format( incremental_step) + ' ({}, line {})') output = testcase.output2.get(incremental_step, []) else: raise AssertionError() if output != a and self.update_data: update_testcase_output(testcase, a) assert_string_arrays_equal(output, a, msg.format(testcase.file, testcase.line)) manager = self.server.last_manager if manager is not None: if options.follow_imports == 'normal' and testcase.output is None: self.verify_cache(module_data, a, manager) if incremental_step > 1: suffix = '' if incremental_step == 2 else str(incremental_step - 1) self.check_module_equivalence( 'rechecked' + suffix, testcase.expected_rechecked_modules.get(incremental_step - 1), manager.rechecked_modules) self.check_module_equivalence( 'stale' + suffix, testcase.expected_stale_modules.get(incremental_step - 1), manager.stale_modules) def check_module_equivalence(self, name: str, expected: Optional[Set[str]], actual: Set[str]) -> None: if expected is not None: expected_normalized = sorted(expected) actual_normalized = sorted(actual.difference({"__main__"})) assert_string_arrays_equal( expected_normalized, actual_normalized, ('Actual modules ({}) do not match expected modules ({}) ' 'for "[{} ...]"').format( ', '.join(actual_normalized), ', '.join(expected_normalized), name)) def verify_cache(self, module_data: List[Tuple[str, str, str]], a: List[str], manager: build.BuildManager) -> None: # There should be valid cache metadata for each module except # those in error_paths; for those there should not be. # # NOTE: When A imports B and there's an error in B, the cache # data for B is invalidated, but the cache data for A remains. # However build.process_graphs() will ignore A's cache data. # # Also note that when A imports B, and there's an error in A # _due to a valid change in B_, the cache data for B will be # invalidated and updated, but the old cache data for A will # remain unchanged. As before, build.process_graphs() will # ignore A's (old) cache data. error_paths = self.find_error_paths(a) modules = self.find_module_files() modules.update({module_name: path for module_name, path, text in module_data}) missing_paths = self.find_missing_cache_files(modules, manager) if not missing_paths.issubset(error_paths): raise AssertionFailure("cache data discrepancy %s != %s" % (missing_paths, error_paths)) def find_error_paths(self, a: List[str]) -> Set[str]: hits = set() for line in a: m = re.match(r'([^\s:]+):\d+: error:', line) if m: # Normalize to Linux paths. p = m.group(1).replace(os.path.sep, '/') hits.add(p) return hits def find_module_files(self) -> Dict[str, str]: modules = {} for dn, dirs, files in os.walk(test_temp_dir): dnparts = dn.split(os.sep) assert dnparts[0] == test_temp_dir del dnparts[0] for file in files: if file.endswith('.py'): if file == "__init__.py": # If the file path is `a/b/__init__.py`, exclude the file name # and make sure the module id is just `a.b`, not `a.b.__init__`. id = '.'.join(dnparts) else: base, ext = os.path.splitext(file) id = '.'.join(dnparts + [base]) modules[id] = os.path.join(dn, file) return modules def find_missing_cache_files(self, modules: Dict[str, str], manager: build.BuildManager) -> Set[str]: ignore_errors = True missing = {} for id, path in modules.items(): meta = build.find_cache_meta(id, path, manager) if not build.validate_meta(meta, id, path, ignore_errors, manager): missing[id] = path return set(missing.values()) def parse_module(self, program_text: str, incremental_step: int) -> List[Tuple[str, str, str]]: """Return the module and program names for a test case. Normally, the unit tests will parse the default ('__main__') module and follow all the imports listed there. You can override this behavior and instruct the tests to check multiple modules by using a comment like this in the test case input: # cmd: mypy -m foo.bar foo.baz You can also use `# cmdN:` to have a different cmd for incremental step N (2, 3, ...). Return a list of tuples (module name, file name, program text). """ m = re.search('# cmd: mypy -m ([a-zA-Z0-9_. ]+)$', program_text, flags=re.MULTILINE) regex = '# cmd{}: mypy -m ([a-zA-Z0-9_. ]+)$'.format(incremental_step) alt_m = re.search(regex, program_text, flags=re.MULTILINE) if alt_m is not None and incremental_step > 1: # Optionally return a different command if in a later step # of incremental mode, otherwise default to reusing the # original cmd. m = alt_m if m: # The test case wants to use a non-default main # module. Look up the module and give it as the thing to # analyze. module_names = m.group(1) out = [] for module_name in module_names.split(' '): path = build.find_module(module_name, [test_temp_dir]) assert path is not None, "Can't find ad hoc case file" with open(path) as f: program_text = f.read() out.append((module_name, path, program_text)) return out else: return [('__main__', 'main', program_text)] def parse_options(self, program_text: str, testcase: DataDrivenTestCase, incremental_step: int) -> Options: options = Options() flags = re.search('# flags: (.*)$', program_text, flags=re.MULTILINE) if incremental_step > 1: flags2 = re.search('# flags{}: (.*)$'.format(incremental_step), program_text, flags=re.MULTILINE) if flags2: flags = flags2 flag_list = None if flags: flag_list = flags.group(1).split() targets, options = process_options(flag_list, require_targets=False) if targets: raise RuntimeError('Specifying targets via the flags pragma is not supported.') else: options = Options() # Allow custom python version to override testcase_pyversion if (not flag_list or all(flag not in flag_list for flag in ['--python-version', '-2', '--py2'])): options.python_version = testcase_pyversion(testcase.file, testcase.name) options.use_builtins_fixtures = True options.show_traceback = True options.incremental = True return options mypy-0.560/mypy/test/testextensions.py0000644€tŠÔÚ€2›s®0000001241013215007205024313 0ustar jukkaDROPBOX\Domain Users00000000000000import sys import pickle import typing try: import collections.abc as collections_abc except ImportError: import collections as collections_abc # type: ignore # PY32 and earlier from unittest import TestCase, main, skipUnless sys.path[0:0] = ['extensions'] from mypy_extensions import TypedDict class BaseTestCase(TestCase): def assertIsSubclass(self, cls, class_or_tuple, msg=None): if not issubclass(cls, class_or_tuple): message = '%r is not a subclass of %r' % (cls, class_or_tuple) if msg is not None: message += ' : %s' % msg raise self.failureException(message) def assertNotIsSubclass(self, cls, class_or_tuple, msg=None): if issubclass(cls, class_or_tuple): message = '%r is a subclass of %r' % (cls, class_or_tuple) if msg is not None: message += ' : %s' % msg raise self.failureException(message) PY36 = sys.version_info[:2] >= (3, 6) PY36_TESTS = """ Label = TypedDict('Label', [('label', str)]) class Point2D(TypedDict): x: int y: int class LabelPoint2D(Point2D, Label): ... class Options(TypedDict, total=False): log_level: int log_path: str """ if PY36: exec(PY36_TESTS) class TypedDictTests(BaseTestCase): def test_basics_iterable_syntax(self): Emp = TypedDict('Emp', {'name': str, 'id': int}) self.assertIsSubclass(Emp, dict) self.assertIsSubclass(Emp, typing.MutableMapping) self.assertNotIsSubclass(Emp, collections_abc.Sequence) jim = Emp(name='Jim', id=1) self.assertIs(type(jim), dict) self.assertEqual(jim['name'], 'Jim') self.assertEqual(jim['id'], 1) self.assertEqual(Emp.__name__, 'Emp') self.assertEqual(Emp.__module__, 'mypy.test.testextensions') self.assertEqual(Emp.__bases__, (dict,)) self.assertEqual(Emp.__annotations__, {'name': str, 'id': int}) self.assertEqual(Emp.__total__, True) def test_basics_keywords_syntax(self): Emp = TypedDict('Emp', name=str, id=int) self.assertIsSubclass(Emp, dict) self.assertIsSubclass(Emp, typing.MutableMapping) self.assertNotIsSubclass(Emp, collections_abc.Sequence) jim = Emp(name='Jim', id=1) # type: ignore # mypy doesn't support keyword syntax yet self.assertIs(type(jim), dict) self.assertEqual(jim['name'], 'Jim') self.assertEqual(jim['id'], 1) self.assertEqual(Emp.__name__, 'Emp') self.assertEqual(Emp.__module__, 'mypy.test.testextensions') self.assertEqual(Emp.__bases__, (dict,)) self.assertEqual(Emp.__annotations__, {'name': str, 'id': int}) self.assertEqual(Emp.__total__, True) def test_typeddict_errors(self): Emp = TypedDict('Emp', {'name': str, 'id': int}) self.assertEqual(TypedDict.__module__, 'mypy_extensions') jim = Emp(name='Jim', id=1) with self.assertRaises(TypeError): isinstance({}, Emp) # type: ignore with self.assertRaises(TypeError): isinstance(jim, Emp) # type: ignore with self.assertRaises(TypeError): issubclass(dict, Emp) # type: ignore with self.assertRaises(TypeError): TypedDict('Hi', x=1) with self.assertRaises(TypeError): TypedDict('Hi', [('x', int), ('y', 1)]) with self.assertRaises(TypeError): TypedDict('Hi', [('x', int)], y=int) @skipUnless(PY36, 'Python 3.6 required') def test_py36_class_syntax_usage(self): self.assertEqual(LabelPoint2D.__annotations__, {'x': int, 'y': int, 'label': str}) # noqa self.assertEqual(LabelPoint2D.__bases__, (dict,)) # noqa self.assertEqual(LabelPoint2D.__total__, True) # noqa self.assertNotIsSubclass(LabelPoint2D, typing.Sequence) # noqa not_origin = Point2D(x=0, y=1) # noqa self.assertEqual(not_origin['x'], 0) self.assertEqual(not_origin['y'], 1) other = LabelPoint2D(x=0, y=1, label='hi') # noqa self.assertEqual(other['label'], 'hi') def test_pickle(self): global EmpD # pickle wants to reference the class by name EmpD = TypedDict('EmpD', name=str, id=int) jane = EmpD({'name': 'jane', 'id': 37}) for proto in range(pickle.HIGHEST_PROTOCOL + 1): z = pickle.dumps(jane, proto) jane2 = pickle.loads(z) self.assertEqual(jane2, jane) self.assertEqual(jane2, {'name': 'jane', 'id': 37}) ZZ = pickle.dumps(EmpD, proto) EmpDnew = pickle.loads(ZZ) self.assertEqual(EmpDnew({'name': 'jane', 'id': 37}), jane) def test_optional(self): EmpD = TypedDict('EmpD', name=str, id=int) self.assertEqual(typing.Optional[EmpD], typing.Union[None, EmpD]) self.assertNotEqual(typing.List[EmpD], typing.Tuple[EmpD]) def test_total(self): D = TypedDict('D', {'x': int}, total=False) self.assertEqual(D(), {}) self.assertEqual(D(x=1), {'x': 1}) self.assertEqual(D.__total__, False) if PY36: self.assertEqual(Options(), {}) # noqa self.assertEqual(Options(log_level=2), {'log_level': 2}) # noqa self.assertEqual(Options.__total__, False) # noqa if __name__ == '__main__': main() mypy-0.560/mypy/test/testfinegrained.py0000644€tŠÔÚ€2›s®0000000746013215007206024401 0ustar jukkaDROPBOX\Domain Users00000000000000"""Test cases for fine-grained incremental checking. Each test cases runs a batch build followed by one or more fine-grained incremental steps. We verify that each step produces the expected output. See the comment at the top of test-data/unit/fine-grained.test for more information. """ import os import re import shutil from typing import List, Tuple, Dict, Optional, Set from mypy import build from mypy.build import BuildManager, BuildSource, Graph from mypy.errors import Errors, CompileError from mypy.nodes import Node, MypyFile, SymbolTable, SymbolTableNode, TypeInfo, Expression from mypy.options import Options from mypy.server.astmerge import merge_asts from mypy.server.subexpr import get_subexpressions from mypy.server.update import FineGrainedBuildManager from mypy.strconv import StrConv, indent from mypy.test.config import test_temp_dir, test_data_prefix from mypy.test.data import parse_test_cases, DataDrivenTestCase, DataSuite, UpdateFile from mypy.test.helpers import assert_string_arrays_equal from mypy.test.testtypegen import ignore_node from mypy.types import TypeStrVisitor, Type from mypy.util import short_type files = [ 'fine-grained.test', 'fine-grained-cycles.test', 'fine-grained-blockers.test', 'fine-grained-modules.test', ] class FineGrainedSuite(DataSuite): @classmethod def cases(cls) -> List[DataDrivenTestCase]: c = [] # type: List[DataDrivenTestCase] for f in files: c += parse_test_cases(os.path.join(test_data_prefix, f), None, test_temp_dir, True) return c def run_case(self, testcase: DataDrivenTestCase) -> None: main_src = '\n'.join(testcase.input) messages, manager, graph = self.build(main_src) a = [] if messages: a.extend(normalize_messages(messages)) fine_grained_manager = FineGrainedBuildManager(manager, graph) steps = testcase.find_steps() for operations in steps: modules = [] for op in operations: if isinstance(op, UpdateFile): # Modify/create file shutil.copy(op.source_path, op.target_path) modules.append((op.module, op.target_path)) else: # Delete file os.remove(op.path) modules.append((op.module, op.path)) new_messages = fine_grained_manager.update(modules) new_messages = normalize_messages(new_messages) a.append('==') a.extend(new_messages) # Normalize paths in test output (for Windows). a = [line.replace('\\', '/') for line in a] assert_string_arrays_equal( testcase.output, a, 'Invalid output ({}, line {})'.format(testcase.file, testcase.line)) def build(self, source: str) -> Tuple[List[str], BuildManager, Graph]: options = Options() options.incremental = True options.use_builtins_fixtures = True options.show_traceback = True main_path = os.path.join(test_temp_dir, 'main') with open(main_path, 'w') as f: f.write(source) try: result = build.build(sources=[BuildSource(main_path, None, None)], options=options, alt_lib_path=test_temp_dir) except CompileError as e: # TODO: We need a manager and a graph in this case as well assert False, str('\n'.join(e.messages)) return e.messages, None, None return result.errors, result.manager, result.graph def normalize_messages(messages: List[str]) -> List[str]: return [re.sub('^tmp' + re.escape(os.sep), '', message) for message in messages] mypy-0.560/mypy/test/testgraph.py0000644€tŠÔÚ€2›s®0000000537513215007205023231 0ustar jukkaDROPBOX\Domain Users00000000000000"""Test cases for graph processing code in build.py.""" from typing import AbstractSet, Dict, Set, List from mypy.myunit import Suite, assert_equal from mypy.build import BuildManager, State, BuildSourceSet from mypy.build import topsort, strongly_connected_components, sorted_components, order_ascc from mypy.version import __version__ from mypy.options import Options from mypy.report import Reports from mypy.plugin import Plugin from mypy import defaults from mypy.errors import Errors class GraphSuite(Suite): def test_topsort(self) -> None: a = frozenset({'A'}) b = frozenset({'B'}) c = frozenset({'C'}) d = frozenset({'D'}) data = {a: {b, c}, b: {d}, c: {d}} # type: Dict[AbstractSet[str], Set[AbstractSet[str]]] res = list(topsort(data)) assert_equal(res, [{d}, {b, c}, {a}]) def test_scc(self) -> None: vertices = {'A', 'B', 'C', 'D'} edges = {'A': ['B', 'C'], 'B': ['C'], 'C': ['B', 'D'], 'D': []} # type: Dict[str, List[str]] sccs = set(frozenset(x) for x in strongly_connected_components(vertices, edges)) assert_equal(sccs, {frozenset({'A'}), frozenset({'B', 'C'}), frozenset({'D'})}) def _make_manager(self) -> BuildManager: errors = Errors() options = Options() manager = BuildManager( data_dir='', lib_path=[], ignore_prefix='', source_set=BuildSourceSet([]), reports=Reports('', {}), options=options, version_id=__version__, plugin=Plugin(options), errors=errors, ) return manager def test_sorted_components(self) -> None: manager = self._make_manager() graph = {'a': State('a', None, 'import b, c', manager), 'd': State('d', None, 'pass', manager), 'b': State('b', None, 'import c', manager), 'c': State('c', None, 'import b, d', manager), } res = sorted_components(graph) assert_equal(res, [frozenset({'d'}), frozenset({'c', 'b'}), frozenset({'a'})]) def test_order_ascc(self) -> None: manager = self._make_manager() graph = {'a': State('a', None, 'import b, c', manager), 'd': State('d', None, 'def f(): import a', manager), 'b': State('b', None, 'import c', manager), 'c': State('c', None, 'import b, d', manager), } res = sorted_components(graph) assert_equal(res, [frozenset({'a', 'd', 'c', 'b'})]) ascc = res[0] scc = order_ascc(graph, ascc) assert_equal(scc, ['d', 'c', 'b', 'a']) mypy-0.560/mypy/test/testinfer.py0000644€tŠÔÚ€2›s®0000001572613215007205023234 0ustar jukkaDROPBOX\Domain Users00000000000000"""Test cases for type inference helper functions.""" from typing import List, Optional, Tuple, Union from mypy.myunit import Suite, assert_equal, assert_true from mypy.checkexpr import map_actuals_to_formals from mypy.nodes import ARG_POS, ARG_OPT, ARG_STAR, ARG_STAR2, ARG_NAMED from mypy.types import AnyType, TupleType, Type, TypeOfAny from mypy.test.typefixture import TypeFixture class MapActualsToFormalsSuite(Suite): """Test cases for checkexpr.map_actuals_to_formals.""" def test_basic(self) -> None: self.assert_map([], [], []) def test_positional_only(self) -> None: self.assert_map([ARG_POS], [ARG_POS], [[0]]) self.assert_map([ARG_POS, ARG_POS], [ARG_POS, ARG_POS], [[0], [1]]) def test_optional(self) -> None: self.assert_map([], [ARG_OPT], [[]]) self.assert_map([ARG_POS], [ARG_OPT], [[0]]) self.assert_map([ARG_POS], [ARG_OPT, ARG_OPT], [[0], []]) def test_callee_star(self) -> None: self.assert_map([], [ARG_STAR], [[]]) self.assert_map([ARG_POS], [ARG_STAR], [[0]]) self.assert_map([ARG_POS, ARG_POS], [ARG_STAR], [[0, 1]]) def test_caller_star(self) -> None: self.assert_map([ARG_STAR], [ARG_STAR], [[0]]) self.assert_map([ARG_POS, ARG_STAR], [ARG_STAR], [[0, 1]]) self.assert_map([ARG_STAR], [ARG_POS, ARG_STAR], [[0], [0]]) self.assert_map([ARG_STAR], [ARG_OPT, ARG_STAR], [[0], [0]]) def test_too_many_caller_args(self) -> None: self.assert_map([ARG_POS], [], []) self.assert_map([ARG_STAR], [], []) self.assert_map([ARG_STAR], [ARG_POS], [[0]]) def test_tuple_star(self) -> None: any_type = AnyType(TypeOfAny.special_form) self.assert_vararg_map( [ARG_STAR], [ARG_POS], [[0]], self.tuple(any_type)) self.assert_vararg_map( [ARG_STAR], [ARG_POS, ARG_POS], [[0], [0]], self.tuple(any_type, any_type)) self.assert_vararg_map( [ARG_STAR], [ARG_POS, ARG_OPT, ARG_OPT], [[0], [0], []], self.tuple(any_type, any_type)) def tuple(self, *args: Type) -> TupleType: return TupleType(list(args), TypeFixture().std_tuple) def test_named_args(self) -> None: self.assert_map( ['x'], [(ARG_POS, 'x')], [[0]]) self.assert_map( ['y', 'x'], [(ARG_POS, 'x'), (ARG_POS, 'y')], [[1], [0]]) def test_some_named_args(self) -> None: self.assert_map( ['y'], [(ARG_OPT, 'x'), (ARG_OPT, 'y'), (ARG_OPT, 'z')], [[], [0], []]) def test_missing_named_arg(self) -> None: self.assert_map( ['y'], [(ARG_OPT, 'x')], [[]]) def test_duplicate_named_arg(self) -> None: self.assert_map( ['x', 'x'], [(ARG_OPT, 'x')], [[0, 1]]) def test_varargs_and_bare_asterisk(self) -> None: self.assert_map( [ARG_STAR], [ARG_STAR, (ARG_NAMED, 'x')], [[0], []]) self.assert_map( [ARG_STAR, 'x'], [ARG_STAR, (ARG_NAMED, 'x')], [[0], [1]]) def test_keyword_varargs(self) -> None: self.assert_map( ['x'], [ARG_STAR2], [[0]]) self.assert_map( ['x', ARG_STAR2], [ARG_STAR2], [[0, 1]]) self.assert_map( ['x', ARG_STAR2], [(ARG_POS, 'x'), ARG_STAR2], [[0], [1]]) self.assert_map( [ARG_POS, ARG_STAR2], [(ARG_POS, 'x'), ARG_STAR2], [[0], [1]]) def test_both_kinds_of_varargs(self) -> None: self.assert_map( [ARG_STAR, ARG_STAR2], [(ARG_POS, 'x'), (ARG_POS, 'y')], [[0, 1], [0, 1]]) def test_special_cases(self) -> None: self.assert_map([ARG_STAR], [ARG_STAR, ARG_STAR2], [[0], []]) self.assert_map([ARG_STAR, ARG_STAR2], [ARG_STAR, ARG_STAR2], [[0], [1]]) self.assert_map([ARG_STAR2], [(ARG_POS, 'x'), ARG_STAR2], [[0], [0]]) self.assert_map([ARG_STAR2], [ARG_STAR2], [[0]]) def assert_map(self, caller_kinds_: List[Union[int, str]], callee_kinds_: List[Union[int, Tuple[int, str]]], expected: List[List[int]], ) -> None: caller_kinds, caller_names = expand_caller_kinds(caller_kinds_) callee_kinds, callee_names = expand_callee_kinds(callee_kinds_) result = map_actuals_to_formals( caller_kinds, caller_names, callee_kinds, callee_names, lambda i: AnyType(TypeOfAny.special_form)) assert_equal(result, expected) def assert_vararg_map(self, caller_kinds: List[int], callee_kinds: List[int], expected: List[List[int]], vararg_type: Type, ) -> None: result = map_actuals_to_formals( caller_kinds, [], callee_kinds, [], lambda i: vararg_type) assert_equal(result, expected) def expand_caller_kinds(kinds_or_names: List[Union[int, str]] ) -> Tuple[List[int], List[Optional[str]]]: kinds = [] names = [] # type: List[Optional[str]] for k in kinds_or_names: if isinstance(k, str): kinds.append(ARG_NAMED) names.append(k) else: kinds.append(k) names.append(None) return kinds, names def expand_callee_kinds(kinds_and_names: List[Union[int, Tuple[int, str]]] ) -> Tuple[List[int], List[Optional[str]]]: kinds = [] names = [] # type: List[Optional[str]] for v in kinds_and_names: if isinstance(v, tuple): kinds.append(v[0]) names.append(v[1]) else: kinds.append(v) names.append(None) return kinds, names mypy-0.560/mypy/test/testmerge.py0000644€tŠÔÚ€2›s®0000002072213215007206023221 0ustar jukkaDROPBOX\Domain Users00000000000000"""Test cases for AST merge (used for fine-grained incremental checking)""" import os import shutil from typing import List, Tuple, Dict, Optional from mypy import build from mypy.build import BuildManager, BuildSource, State from mypy.errors import Errors, CompileError from mypy.nodes import ( Node, MypyFile, SymbolTable, SymbolTableNode, TypeInfo, Expression, UNBOUND_IMPORTED ) from mypy.options import Options from mypy.server.astmerge import merge_asts from mypy.server.subexpr import get_subexpressions from mypy.server.update import FineGrainedBuildManager from mypy.strconv import StrConv, indent from mypy.test.config import test_temp_dir, test_data_prefix from mypy.test.data import parse_test_cases, DataDrivenTestCase, DataSuite from mypy.test.helpers import assert_string_arrays_equal, normalize_error_messages from mypy.test.testtypegen import ignore_node from mypy.types import TypeStrVisitor, Type from mypy.util import short_type, IdMapper files = [ 'merge.test' ] # Which data structures to dump in a test case? SYMTABLE = 'SYMTABLE' TYPEINFO = ' TYPEINFO' TYPES = 'TYPES' AST = 'AST' NOT_DUMPED_MODULES = ('builtins', 'typing', 'abc') class ASTMergeSuite(DataSuite): def __init__(self, *, update_data: bool) -> None: super().__init__(update_data=update_data) self.str_conv = StrConv(show_ids=True) assert self.str_conv.id_mapper is not None self.id_mapper = self.str_conv.id_mapper # type: IdMapper self.type_str_conv = TypeStrVisitor(self.id_mapper) @classmethod def cases(cls) -> List[DataDrivenTestCase]: c = [] # type: List[DataDrivenTestCase] for f in files: c += parse_test_cases(os.path.join(test_data_prefix, f), None, test_temp_dir, True) return c def run_case(self, testcase: DataDrivenTestCase) -> None: name = testcase.name # We use the test case name to decide which data structures to dump. # Dumping everything would result in very verbose test cases. if name.endswith('_symtable'): kind = SYMTABLE elif name.endswith('_typeinfo'): kind = TYPEINFO elif name.endswith('_types'): kind = TYPES else: kind = AST main_src = '\n'.join(testcase.input) messages, manager, graph = self.build(main_src) assert manager is not None, 'cases where CompileError occurred should not be run' fine_grained_manager = FineGrainedBuildManager(manager, graph) a = [] if messages: a.extend(messages) target_path = os.path.join(test_temp_dir, 'target.py') shutil.copy(os.path.join(test_temp_dir, 'target.py.next'), target_path) a.extend(self.dump(manager, kind)) old_subexpr = get_subexpressions(manager.modules['target']) a.append('==>') new_file, new_types = self.build_increment(fine_grained_manager, 'target', target_path) a.extend(self.dump(manager, kind)) for expr in old_subexpr: # Verify that old AST nodes are removed from the expression type map. assert expr not in new_types a = normalize_error_messages(a) assert_string_arrays_equal( testcase.output, a, 'Invalid output ({}, line {})'.format(testcase.file, testcase.line)) def build(self, source: str) -> Tuple[List[str], Optional[BuildManager], Dict[str, State]]: options = Options() options.incremental = True options.use_builtins_fixtures = True options.show_traceback = True main_path = os.path.join(test_temp_dir, 'main') with open(main_path, 'w') as f: f.write(source) try: result = build.build(sources=[BuildSource(main_path, None, None)], options=options, alt_lib_path=test_temp_dir) except CompileError as e: # TODO: Is it okay to return None? return e.messages, None, {} return result.errors, result.manager, result.graph def build_increment(self, manager: FineGrainedBuildManager, module_id: str, path: str) -> Tuple[MypyFile, Dict[Expression, Type]]: manager.update([(module_id, path)]) module = manager.manager.modules[module_id] type_map = manager.type_maps[module_id] return module, type_map def dump(self, manager: BuildManager, kind: str) -> List[str]: modules = manager.modules if kind == AST: return self.dump_asts(modules) elif kind == TYPEINFO: return self.dump_typeinfos(modules) elif kind == SYMTABLE: return self.dump_symbol_tables(modules) elif kind == TYPES: return self.dump_types(manager) assert False, 'Invalid kind %s' % kind def dump_asts(self, modules: Dict[str, MypyFile]) -> List[str]: a = [] for m in sorted(modules): if m in NOT_DUMPED_MODULES: # We don't support incremental checking of changes to builtins, etc. continue s = modules[m].accept(self.str_conv) a.extend(s.splitlines()) return a def dump_symbol_tables(self, modules: Dict[str, MypyFile]) -> List[str]: a = [] for id in sorted(modules): if not is_dumped_module(id): # We don't support incremental checking of changes to builtins, etc. continue a.extend(self.dump_symbol_table(id, modules[id].names)) return a def dump_symbol_table(self, module_id: str, symtable: SymbolTable) -> List[str]: a = ['{}:'.format(module_id)] for name in sorted(symtable): if name.startswith('__'): continue a.append(' {}: {}'.format(name, self.format_symbol_table_node(symtable[name]))) return a def format_symbol_table_node(self, node: SymbolTableNode) -> str: if node.node is None: if node.kind == UNBOUND_IMPORTED: return 'UNBOUND_IMPORTED' return 'None' if isinstance(node.node, Node): s = '{}<{}>'.format(str(type(node.node).__name__), self.id_mapper.id(node.node)) else: s = '? ({})'.format(type(node.node)) if node.type_override: override = self.format_type(node.type_override) s += '(type_override={})'.format(override) return s def dump_typeinfos(self, modules: Dict[str, MypyFile]) -> List[str]: a = [] for id in sorted(modules): if not is_dumped_module(id): continue a.extend(self.dump_typeinfos_recursive(modules[id].names)) return a def dump_typeinfos_recursive(self, names: SymbolTable) -> List[str]: a = [] for name, node in sorted(names.items(), key=lambda x: x[0]): if isinstance(node.node, TypeInfo): a.extend(self.dump_typeinfo(node.node)) a.extend(self.dump_typeinfos_recursive(node.node.names)) return a def dump_typeinfo(self, info: TypeInfo) -> List[str]: s = info.dump(str_conv=self.str_conv, type_str_conv=self.type_str_conv) return s.splitlines() def dump_types(self, manager: BuildManager) -> List[str]: a = [] # To make the results repeatable, we try to generate unique and # deterministic sort keys. for module_id in sorted(manager.modules): if not is_dumped_module(module_id): continue type_map = manager.saved_cache[module_id][2] if type_map: a.append('## {}'.format(module_id)) for expr in sorted(type_map, key=lambda n: (n.line, short_type(n), str(n) + str(type_map[n]))): typ = type_map[expr] a.append('{}:{}: {}'.format(short_type(expr), expr.line, self.format_type(typ))) return a def format_type(self, typ: Type) -> str: return typ.accept(self.type_str_conv) def is_dumped_module(id: str) -> bool: return id not in NOT_DUMPED_MODULES and (not id.startswith('_') or id == '__main__') mypy-0.560/mypy/test/testmoduleinfo.py0000644€tŠÔÚ€2›s®0000000121413215007205024255 0ustar jukkaDROPBOX\Domain Users00000000000000from mypy import moduleinfo from mypy.myunit import ( Suite, assert_equal, assert_true, assert_false ) class ModuleInfoSuite(Suite): def test_is_in_module_collection(self) -> None: assert_true(moduleinfo.is_in_module_collection({'foo'}, 'foo')) assert_true(moduleinfo.is_in_module_collection({'foo'}, 'foo.bar')) assert_false(moduleinfo.is_in_module_collection({'foo'}, 'fo')) assert_true(moduleinfo.is_in_module_collection({'foo.bar'}, 'foo.bar')) assert_true(moduleinfo.is_in_module_collection({'foo.bar'}, 'foo.bar.zar')) assert_false(moduleinfo.is_in_module_collection({'foo.bar'}, 'foo')) mypy-0.560/mypy/test/testparse.py0000644€tŠÔÚ€2›s®0000000607613215007206023242 0ustar jukkaDROPBOX\Domain Users00000000000000"""Tests for the mypy parser.""" import os.path from typing import List from mypy import defaults from mypy.myunit import Suite, AssertionFailure from mypy.test.helpers import assert_string_arrays_equal from mypy.test.data import parse_test_cases, DataDrivenTestCase, DataSuite from mypy.test import config from mypy.parse import parse from mypy.errors import CompileError from mypy.options import Options class ParserSuite(DataSuite): parse_files = ['parse.test', 'parse-python2.test'] @classmethod def cases(cls) -> List[DataDrivenTestCase]: # The test case descriptions are stored in data files. c = [] # type: List[DataDrivenTestCase] for f in cls.parse_files: c += parse_test_cases( os.path.join(config.test_data_prefix, f), test_parser) return c def run_case(self, testcase: DataDrivenTestCase) -> None: test_parser(testcase) def test_parser(testcase: DataDrivenTestCase) -> None: """Perform a single parser test case. The argument contains the description of the test case. """ options = Options() if testcase.file.endswith('python2.test'): options.python_version = defaults.PYTHON2_VERSION else: options.python_version = defaults.PYTHON3_VERSION try: n = parse(bytes('\n'.join(testcase.input), 'ascii'), fnam='main', module='__main__', errors=None, options=options) a = str(n).split('\n') except CompileError as e: a = e.messages assert_string_arrays_equal(testcase.output, a, 'Invalid parser output ({}, line {})'.format( testcase.file, testcase.line)) # The file name shown in test case output. This is displayed in error # messages, and must match the file name in the test case descriptions. INPUT_FILE_NAME = 'file' class ParseErrorSuite(DataSuite): @classmethod def cases(cls) -> List[DataDrivenTestCase]: # Test case descriptions are in an external file. return parse_test_cases(os.path.join(config.test_data_prefix, 'parse-errors.test'), test_parse_error) def run_case(self, testcase: DataDrivenTestCase) -> None: test_parse_error(testcase) def test_parse_error(testcase: DataDrivenTestCase) -> None: try: # Compile temporary file. The test file contains non-ASCII characters. parse(bytes('\n'.join(testcase.input), 'utf-8'), INPUT_FILE_NAME, '__main__', None, Options()) raise AssertionFailure('No errors reported') except CompileError as e: assert e.module_with_blocker == '__main__' # Verify that there was a compile error and that the error messages # are equivalent. assert_string_arrays_equal( testcase.output, e.messages, 'Invalid compiler output ({}, line {})'.format(testcase.file, testcase.line)) mypy-0.560/mypy/test/testpythoneval.py0000644€tŠÔÚ€2›s®0000001115413215007206024312 0ustar jukkaDROPBOX\Domain Users00000000000000"""Test cases for running mypy programs using a Python interpreter. Each test case type checks a program then runs it using Python. The output (stdout) of the program is compared to expected output. Type checking uses full builtins and other stubs. Note: Currently Python interpreter paths are hard coded. Note: These test cases are *not* included in the main test suite, as including this suite would slow down the main suite too much. """ import os import os.path import re import subprocess import sys import pytest # type: ignore # no pytest in typeshed from typing import Dict, List, Tuple, Optional from mypy.test.config import test_data_prefix, test_temp_dir from mypy.test.data import DataDrivenTestCase, parse_test_cases, DataSuite from mypy.test.helpers import assert_string_arrays_equal from mypy.util import try_find_python2_interpreter from mypy import api # Files which contain test case descriptions. python_eval_files = ['pythoneval.test', 'python2eval.test'] python_34_eval_files = ['pythoneval-asyncio.test'] # Path to Python 3 interpreter python3_path = sys.executable program_re = re.compile(r'\b_program.py\b') class PythonEvaluationSuite(DataSuite): @classmethod def cases(cls) -> List[DataDrivenTestCase]: c = [] # type: List[DataDrivenTestCase] for f in python_eval_files: c += parse_test_cases(os.path.join(test_data_prefix, f), test_python_evaluation, test_temp_dir, True) if sys.version_info.major == 3 and sys.version_info.minor >= 4: for f in python_34_eval_files: c += parse_test_cases(os.path.join(test_data_prefix, f), test_python_evaluation, test_temp_dir, True) return c def run_case(self, testcase: DataDrivenTestCase) -> None: test_python_evaluation(testcase) def test_python_evaluation(testcase: DataDrivenTestCase) -> None: """Runs Mypy in a subprocess. If this passes without errors, executes the script again with a given Python version. """ assert testcase.old_cwd is not None, "test was not properly set up" mypy_cmdline = ['--show-traceback'] py2 = testcase.name.lower().endswith('python2') if py2: mypy_cmdline.append('--py2') interpreter = try_find_python2_interpreter() if interpreter is None: # Skip, can't find a Python 2 interpreter. pytest.skip() # placate the type checker return else: interpreter = python3_path # Write the program to a file. program = '_' + testcase.name + '.py' program_path = os.path.join(test_temp_dir, program) mypy_cmdline.append(program_path) with open(program_path, 'w') as file: for s in testcase.input: file.write('{}\n'.format(s)) output = [] # Type check the program. out, err, returncode = api.run(mypy_cmdline) # split lines, remove newlines, and remove directory of test case for line in (out + err).splitlines(): if line.startswith(test_temp_dir + os.sep): output.append(line[len(test_temp_dir + os.sep):].rstrip("\r\n")) else: output.append(line.rstrip("\r\n")) if returncode == 0: # Execute the program. returncode, interp_out = run([interpreter, program]) output.extend(interp_out) # Remove temp file. os.remove(program_path) assert_string_arrays_equal(adapt_output(testcase), output, 'Invalid output ({}, line {})'.format( testcase.file, testcase.line)) def split_lines(*streams: bytes) -> List[str]: """Returns a single list of string lines from the byte streams in args.""" return [ s.rstrip('\n\r') for stream in streams for s in str(stream, 'utf8').splitlines() ] def adapt_output(testcase: DataDrivenTestCase) -> List[str]: """Translates the generic _program.py into the actual filename.""" program = '_' + testcase.name + '.py' return [program_re.sub(program, line) for line in testcase.output] def run( cmdline: List[str], *, env: Optional[Dict[str, str]] = None, timeout: int = 300 ) -> Tuple[int, List[str]]: """A poor man's subprocess.run() for 3.3 and 3.4 compatibility.""" process = subprocess.Popen( cmdline, env=env, stdout=subprocess.PIPE, stderr=subprocess.PIPE, cwd=test_temp_dir, ) try: out, err = process.communicate(timeout=timeout) except subprocess.TimeoutExpired: out = err = b'' process.kill() return process.returncode, split_lines(out, err) mypy-0.560/mypy/test/testreports.py0000644€tŠÔÚ€2›s®0000000260013215007205023612 0ustar jukkaDROPBOX\Domain Users00000000000000"""Test cases for reports generated by mypy.""" import textwrap from mypy.myunit import Suite, assert_equal from mypy.report import CoberturaPackage, get_line_rate import lxml.etree as etree # type: ignore class CoberturaReportSuite(Suite): def test_get_line_rate(self) -> None: assert_equal('1.0', get_line_rate(0, 0)) assert_equal('0.3333', get_line_rate(1, 3)) def test_as_xml(self) -> None: cobertura_package = CoberturaPackage('foobar') cobertura_package.covered_lines = 21 cobertura_package.total_lines = 42 child_package = CoberturaPackage('raz') child_package.covered_lines = 10 child_package.total_lines = 10 child_package.classes['class'] = etree.Element('class') cobertura_package.packages['raz'] = child_package expected_output = textwrap.dedent('''\ ''').encode('ascii') assert_equal(expected_output, etree.tostring(cobertura_package.as_xml(), pretty_print=True)) mypy-0.560/mypy/test/testsemanal.py0000644€tŠÔÚ€2›s®0000002102613215007206023540 0ustar jukkaDROPBOX\Domain Users00000000000000"""Semantic analyzer test cases""" import os.path from typing import Dict, List from mypy import build from mypy.build import BuildSource from mypy.test.helpers import ( assert_string_arrays_equal, normalize_error_messages, testfile_pyversion, ) from mypy.test.data import parse_test_cases, DataDrivenTestCase, DataSuite from mypy.test.config import test_data_prefix, test_temp_dir from mypy.errors import CompileError from mypy.nodes import TypeInfo from mypy.options import Options # Semantic analyzer test cases: dump parse tree # Semantic analysis test case description files. semanal_files = ['semanal-basic.test', 'semanal-expressions.test', 'semanal-classes.test', 'semanal-types.test', 'semanal-typealiases.test', 'semanal-modules.test', 'semanal-statements.test', 'semanal-abstractclasses.test', 'semanal-namedtuple.test', 'semanal-typeddict.test', 'semanal-classvar.test', 'semanal-python2.test'] def get_semanal_options() -> Options: options = Options() options.use_builtins_fixtures = True options.semantic_analysis_only = True options.show_traceback = True return options class SemAnalSuite(DataSuite): @classmethod def cases(cls) -> List[DataDrivenTestCase]: c = [] # type: List[DataDrivenTestCase] for f in semanal_files: c += parse_test_cases(os.path.join(test_data_prefix, f), test_semanal, base_path=test_temp_dir, optional_out=True, native_sep=True) return c def run_case(self, testcase: DataDrivenTestCase) -> None: test_semanal(testcase) def test_semanal(testcase: DataDrivenTestCase) -> None: """Perform a semantic analysis test case. The testcase argument contains a description of the test case (inputs and output). """ try: src = '\n'.join(testcase.input) options = get_semanal_options() options.python_version = testfile_pyversion(testcase.file) result = build.build(sources=[BuildSource('main', None, src)], options=options, alt_lib_path=test_temp_dir) a = result.errors if a: raise CompileError(a) # Include string representations of the source files in the actual # output. for fnam in sorted(result.files.keys()): f = result.files[fnam] # Omit the builtins module and files with a special marker in the # path. # TODO the test is not reliable if (not f.path.endswith((os.sep + 'builtins.pyi', 'typing.pyi', 'mypy_extensions.pyi', 'abc.pyi', 'collections.pyi')) and not os.path.basename(f.path).startswith('_') and not os.path.splitext( os.path.basename(f.path))[0].endswith('_')): a += str(f).split('\n') except CompileError as e: a = e.messages a = normalize_error_messages(a) assert_string_arrays_equal( testcase.output, a, 'Invalid semantic analyzer output ({}, line {})'.format(testcase.file, testcase.line)) # Semantic analyzer error test cases # Paths to files containing test case descriptions. semanal_error_files = ['semanal-errors.test'] class SemAnalErrorSuite(DataSuite): @classmethod def cases(cls) -> List[DataDrivenTestCase]: # Read test cases from test case description files. c = [] # type: List[DataDrivenTestCase] for f in semanal_error_files: c += parse_test_cases(os.path.join(test_data_prefix, f), test_semanal_error, test_temp_dir, optional_out=True) return c def run_case(self, testcase: DataDrivenTestCase) -> None: test_semanal_error(testcase) def test_semanal_error(testcase: DataDrivenTestCase) -> None: """Perform a test case.""" try: src = '\n'.join(testcase.input) res = build.build(sources=[BuildSource('main', None, src)], options=get_semanal_options(), alt_lib_path=test_temp_dir) a = res.errors assert a, 'No errors reported in {}, line {}'.format(testcase.file, testcase.line) except CompileError as e: # Verify that there was a compile error and that the error messages # are equivalent. a = e.messages assert_string_arrays_equal( testcase.output, normalize_error_messages(a), 'Invalid compiler output ({}, line {})'.format(testcase.file, testcase.line)) # SymbolNode table export test cases # Test case descriptions semanal_symtable_files = ['semanal-symtable.test'] class SemAnalSymtableSuite(DataSuite): @classmethod def cases(cls) -> List[DataDrivenTestCase]: c = [] # type: List[DataDrivenTestCase] for f in semanal_symtable_files: c += parse_test_cases(os.path.join(test_data_prefix, f), None, test_temp_dir) return c def run_case(self, testcase: DataDrivenTestCase) -> None: """Perform a test case.""" try: # Build test case input. src = '\n'.join(testcase.input) result = build.build(sources=[BuildSource('main', None, src)], options=get_semanal_options(), alt_lib_path=test_temp_dir) # The output is the symbol table converted into a string. a = result.errors if a: raise CompileError(a) for f in sorted(result.files.keys()): if f not in ('builtins', 'typing', 'abc'): a.append('{}:'.format(f)) for s in str(result.files[f].names).split('\n'): a.append(' ' + s) except CompileError as e: a = e.messages assert_string_arrays_equal( testcase.output, a, 'Invalid semantic analyzer output ({}, line {})'.format( testcase.file, testcase.line)) # Type info export test cases semanal_typeinfo_files = ['semanal-typeinfo.test'] class SemAnalTypeInfoSuite(DataSuite): @classmethod def cases(cls) -> List[DataDrivenTestCase]: """Test case descriptions""" c = [] # type: List[DataDrivenTestCase] for f in semanal_typeinfo_files: c += parse_test_cases(os.path.join(test_data_prefix, f), None, test_temp_dir) return c def run_case(self, testcase: DataDrivenTestCase) -> None: """Perform a test case.""" try: # Build test case input. src = '\n'.join(testcase.input) result = build.build(sources=[BuildSource('main', None, src)], options=get_semanal_options(), alt_lib_path=test_temp_dir) a = result.errors if a: raise CompileError(a) # Collect all TypeInfos in top-level modules. typeinfos = TypeInfoMap() for f in result.files.values(): for n in f.names.values(): if isinstance(n.node, TypeInfo): assert n.fullname is not None typeinfos[n.fullname] = n.node # The output is the symbol table converted into a string. a = str(typeinfos).split('\n') except CompileError as e: a = e.messages assert_string_arrays_equal( testcase.output, a, 'Invalid semantic analyzer output ({}, line {})'.format( testcase.file, testcase.line)) class TypeInfoMap(Dict[str, TypeInfo]): def __str__(self) -> str: a = ['TypeInfoMap('] # type: List[str] for x, y in sorted(self.items()): if isinstance(x, str) and (not x.startswith('builtins.') and not x.startswith('typing.') and not x.startswith('abc.')): ti = ('\n' + ' ').join(str(y).split('\n')) a.append(' {} : {}'.format(x, ti)) a[-1] += ')' return '\n'.join(a) mypy-0.560/mypy/test/testsolve.py0000644€tŠÔÚ€2›s®0000001345713215007205023260 0ustar jukkaDROPBOX\Domain Users00000000000000"""Test cases for the constraint solver used in type inference.""" from typing import List, Union, Tuple, Optional from mypy.myunit import Suite, assert_equal from mypy.constraints import SUPERTYPE_OF, SUBTYPE_OF, Constraint from mypy.solve import solve_constraints from mypy.test.typefixture import TypeFixture from mypy.types import Type, TypeVarType, TypeVarId class SolveSuite(Suite): def __init__(self) -> None: super().__init__() self.fx = TypeFixture() def test_empty_input(self) -> None: self.assert_solve([], [], []) def test_simple_supertype_constraints(self) -> None: self.assert_solve([self.fx.t.id], [self.supc(self.fx.t, self.fx.a)], [(self.fx.a, self.fx.o)]) self.assert_solve([self.fx.t.id], [self.supc(self.fx.t, self.fx.a), self.supc(self.fx.t, self.fx.b)], [(self.fx.a, self.fx.o)]) def test_simple_subtype_constraints(self) -> None: self.assert_solve([self.fx.t.id], [self.subc(self.fx.t, self.fx.a)], [self.fx.a]) self.assert_solve([self.fx.t.id], [self.subc(self.fx.t, self.fx.a), self.subc(self.fx.t, self.fx.b)], [self.fx.b]) def test_both_kinds_of_constraints(self) -> None: self.assert_solve([self.fx.t.id], [self.supc(self.fx.t, self.fx.b), self.subc(self.fx.t, self.fx.a)], [(self.fx.b, self.fx.a)]) def test_unsatisfiable_constraints(self) -> None: # The constraints are impossible to satisfy. self.assert_solve([self.fx.t.id], [self.supc(self.fx.t, self.fx.a), self.subc(self.fx.t, self.fx.b)], [None]) def test_exactly_specified_result(self) -> None: self.assert_solve([self.fx.t.id], [self.supc(self.fx.t, self.fx.b), self.subc(self.fx.t, self.fx.b)], [(self.fx.b, self.fx.b)]) def test_multiple_variables(self) -> None: self.assert_solve([self.fx.t.id, self.fx.s.id], [self.supc(self.fx.t, self.fx.b), self.supc(self.fx.s, self.fx.c), self.subc(self.fx.t, self.fx.a)], [(self.fx.b, self.fx.a), (self.fx.c, self.fx.o)]) def test_no_constraints_for_var(self) -> None: self.assert_solve([self.fx.t.id], [], [self.fx.uninhabited]) self.assert_solve([self.fx.t.id, self.fx.s.id], [], [self.fx.uninhabited, self.fx.uninhabited]) self.assert_solve([self.fx.t.id, self.fx.s.id], [self.supc(self.fx.s, self.fx.a)], [self.fx.uninhabited, (self.fx.a, self.fx.o)]) def test_simple_constraints_with_dynamic_type(self) -> None: self.assert_solve([self.fx.t.id], [self.supc(self.fx.t, self.fx.anyt)], [(self.fx.anyt, self.fx.anyt)]) self.assert_solve([self.fx.t.id], [self.supc(self.fx.t, self.fx.anyt), self.supc(self.fx.t, self.fx.anyt)], [(self.fx.anyt, self.fx.anyt)]) self.assert_solve([self.fx.t.id], [self.supc(self.fx.t, self.fx.anyt), self.supc(self.fx.t, self.fx.a)], [(self.fx.anyt, self.fx.anyt)]) self.assert_solve([self.fx.t.id], [self.subc(self.fx.t, self.fx.anyt)], [(self.fx.anyt, self.fx.anyt)]) self.assert_solve([self.fx.t.id], [self.subc(self.fx.t, self.fx.anyt), self.subc(self.fx.t, self.fx.anyt)], [(self.fx.anyt, self.fx.anyt)]) # self.assert_solve([self.fx.t.id], # [self.subc(self.fx.t, self.fx.anyt), # self.subc(self.fx.t, self.fx.a)], # [(self.fx.anyt, self.fx.anyt)]) # TODO: figure out what this should be after changes to meet(any, X) def test_both_normal_and_any_types_in_results(self) -> None: # If one of the bounds is any, we promote the other bound to # any as well, since otherwise the type range does not make sense. self.assert_solve([self.fx.t.id], [self.supc(self.fx.t, self.fx.a), self.subc(self.fx.t, self.fx.anyt)], [(self.fx.anyt, self.fx.anyt)]) self.assert_solve([self.fx.t.id], [self.supc(self.fx.t, self.fx.anyt), self.subc(self.fx.t, self.fx.a)], [(self.fx.anyt, self.fx.anyt)]) def assert_solve(self, vars: List[TypeVarId], constraints: List[Constraint], results: List[Union[None, Type, Tuple[Type, Type]]], ) -> None: res = [] # type: List[Optional[Type]] for r in results: if isinstance(r, tuple): res.append(r[0]) else: res.append(r) actual = solve_constraints(vars, constraints) assert_equal(str(actual), str(res)) def supc(self, type_var: TypeVarType, bound: Type) -> Constraint: return Constraint(type_var.id, SUPERTYPE_OF, bound) def subc(self, type_var: TypeVarType, bound: Type) -> Constraint: return Constraint(type_var.id, SUBTYPE_OF, bound) mypy-0.560/mypy/test/teststubgen.py0000644€tŠÔÚ€2›s®0000002021613215007206023567 0ustar jukkaDROPBOX\Domain Users00000000000000import glob import importlib import os.path import random import shutil import sys import tempfile import time import re from types import ModuleType from typing import List, Tuple from mypy.myunit import Suite, AssertionFailure, assert_equal from mypy.test.helpers import assert_string_arrays_equal from mypy.test.data import DataSuite, parse_test_cases, DataDrivenTestCase from mypy.test import config from mypy.parse import parse from mypy.errors import CompileError from mypy.stubgen import generate_stub, generate_stub_for_module, parse_options, Options from mypy.stubgenc import generate_c_type_stub, infer_method_sig from mypy.stubutil import ( parse_signature, parse_all_signatures, build_signature, find_unique_signatures, infer_sig_from_docstring ) class StubgenUtilSuite(Suite): def test_parse_signature(self) -> None: self.assert_parse_signature('func()', ('func', [], [])) def test_parse_signature_with_args(self) -> None: self.assert_parse_signature('func(arg)', ('func', ['arg'], [])) self.assert_parse_signature('do(arg, arg2)', ('do', ['arg', 'arg2'], [])) def test_parse_signature_with_optional_args(self) -> None: self.assert_parse_signature('func([arg])', ('func', [], ['arg'])) self.assert_parse_signature('func(arg[, arg2])', ('func', ['arg'], ['arg2'])) self.assert_parse_signature('func([arg[, arg2]])', ('func', [], ['arg', 'arg2'])) def test_parse_signature_with_default_arg(self) -> None: self.assert_parse_signature('func(arg=None)', ('func', [], ['arg'])) self.assert_parse_signature('func(arg, arg2=None)', ('func', ['arg'], ['arg2'])) self.assert_parse_signature('func(arg=1, arg2="")', ('func', [], ['arg', 'arg2'])) def test_parse_signature_with_qualified_function(self) -> None: self.assert_parse_signature('ClassName.func(arg)', ('func', ['arg'], [])) def test_parse_signature_with_kw_only_arg(self) -> None: self.assert_parse_signature('ClassName.func(arg, *, arg2=1)', ('func', ['arg', '*'], ['arg2'])) def test_parse_signature_with_star_arg(self) -> None: self.assert_parse_signature('ClassName.func(arg, *args)', ('func', ['arg', '*args'], [])) def test_parse_signature_with_star_star_arg(self) -> None: self.assert_parse_signature('ClassName.func(arg, **args)', ('func', ['arg', '**args'], [])) def assert_parse_signature(self, sig: str, result: Tuple[str, List[str], List[str]]) -> None: assert_equal(parse_signature(sig), result) def test_build_signature(self) -> None: assert_equal(build_signature([], []), '()') assert_equal(build_signature(['arg'], []), '(arg)') assert_equal(build_signature(['arg', 'arg2'], []), '(arg, arg2)') assert_equal(build_signature(['arg'], ['arg2']), '(arg, arg2=...)') assert_equal(build_signature(['arg'], ['arg2', '**x']), '(arg, arg2=..., **x)') def test_parse_all_signatures(self) -> None: assert_equal(parse_all_signatures(['random text', '.. function:: fn(arg', '.. function:: fn()', ' .. method:: fn2(arg)']), ([('fn', '()'), ('fn2', '(arg)')], [])) def test_find_unique_signatures(self) -> None: assert_equal(find_unique_signatures( [('func', '()'), ('func', '()'), ('func2', '()'), ('func2', '(arg)'), ('func3', '(arg, arg2)')]), [('func', '()'), ('func3', '(arg, arg2)')]) def test_infer_sig_from_docstring(self) -> None: assert_equal(infer_sig_from_docstring('\nfunc(x) - y', 'func'), '(x)') assert_equal(infer_sig_from_docstring('\nfunc(x, Y_a=None)', 'func'), '(x, Y_a=None)') assert_equal(infer_sig_from_docstring('\nafunc(x) - y', 'func'), None) assert_equal(infer_sig_from_docstring('\nfunc(x, y', 'func'), None) assert_equal(infer_sig_from_docstring('\nfunc(x=z(y))', 'func'), None) assert_equal(infer_sig_from_docstring('\nfunc x', 'func'), None) class StubgenPythonSuite(DataSuite): test_data_files = ['stubgen.test'] @classmethod def cases(cls) -> List[DataDrivenTestCase]: c = [] # type: List[DataDrivenTestCase] for path in cls.test_data_files: c += parse_test_cases(os.path.join(config.test_data_prefix, path), test_stubgen) return c def run_case(self, testcase: DataDrivenTestCase) -> None: test_stubgen(testcase) def parse_flags(program_text: str) -> Options: flags = re.search('# flags: (.*)$', program_text, flags=re.MULTILINE) if flags: flag_list = flags.group(1).split() else: flag_list = [] return parse_options(flag_list + ['dummy.py']) def test_stubgen(testcase: DataDrivenTestCase) -> None: if 'stubgen-test-path' not in sys.path: sys.path.insert(0, 'stubgen-test-path') os.mkdir('stubgen-test-path') source = '\n'.join(testcase.input) options = parse_flags(source) handle = tempfile.NamedTemporaryFile(prefix='prog_', suffix='.py', dir='stubgen-test-path', delete=False) assert os.path.isabs(handle.name) path = os.path.basename(handle.name) name = path[:-3] path = os.path.join('stubgen-test-path', path) out_dir = '_out' os.mkdir(out_dir) try: handle.write(bytes(source, 'ascii')) handle.close() # Without this we may sometimes be unable to import the module below, as importlib # caches os.listdir() results in Python 3.3+ (Guido explained this to me). reset_importlib_caches() try: if testcase.name.endswith('_import'): generate_stub_for_module(name, out_dir, quiet=True, no_import=options.no_import, include_private=options.include_private) else: generate_stub(path, out_dir, include_private=options.include_private) a = load_output(out_dir) except CompileError as e: a = e.messages assert_string_arrays_equal(testcase.output, a, 'Invalid output ({}, line {})'.format( testcase.file, testcase.line)) finally: handle.close() os.unlink(handle.name) shutil.rmtree(out_dir) def reset_importlib_caches() -> None: try: importlib.invalidate_caches() except (ImportError, AttributeError): pass def load_output(dirname: str) -> List[str]: result = [] # type: List[str] entries = glob.glob('%s/*' % dirname) assert entries, 'No files generated' if len(entries) == 1: add_file(entries[0], result) else: for entry in entries: result.append('## %s ##' % entry) add_file(entry, result) return result def add_file(path: str, result: List[str]) -> None: with open(path) as file: result.extend(file.read().splitlines()) class StubgencSuite(Suite): def test_infer_hash_sig(self) -> None: assert_equal(infer_method_sig('__hash__'), '()') def test_infer_getitem_sig(self) -> None: assert_equal(infer_method_sig('__getitem__'), '(index)') def test_infer_setitem_sig(self) -> None: assert_equal(infer_method_sig('__setitem__'), '(index, object)') def test_infer_binary_op_sig(self) -> None: for op in ('eq', 'ne', 'lt', 'le', 'gt', 'ge', 'add', 'radd', 'sub', 'rsub', 'mul', 'rmul'): assert_equal(infer_method_sig('__%s__' % op), '(other)') def test_infer_unary_op_sig(self) -> None: for op in ('neg', 'pos'): assert_equal(infer_method_sig('__%s__' % op), '()') def test_generate_c_type_stub_no_crash_for_object(self) -> None: output = [] # type: List[str] mod = ModuleType('module', '') # any module is fine generate_c_type_stub(mod, 'alias', object, output) assert_equal(output[0], 'class alias:') mypy-0.560/mypy/test/testsubtypes.py0000644€tŠÔÚ€2›s®0000002121213215007206023773 0ustar jukkaDROPBOX\Domain Users00000000000000from mypy.myunit import Suite, assert_true from mypy.nodes import CONTRAVARIANT, INVARIANT, COVARIANT from mypy.subtypes import is_subtype from mypy.test.typefixture import TypeFixture, InterfaceTypeFixture from mypy.types import Type class SubtypingSuite(Suite): def set_up(self) -> None: self.fx = TypeFixture(INVARIANT) self.fx_contra = TypeFixture(CONTRAVARIANT) self.fx_co = TypeFixture(COVARIANT) def test_trivial_cases(self) -> None: for simple in self.fx_co.a, self.fx_co.o, self.fx_co.b: self.assert_subtype(simple, simple) def test_instance_subtyping(self) -> None: self.assert_strict_subtype(self.fx.a, self.fx.o) self.assert_strict_subtype(self.fx.b, self.fx.o) self.assert_strict_subtype(self.fx.b, self.fx.a) self.assert_not_subtype(self.fx.a, self.fx.d) self.assert_not_subtype(self.fx.b, self.fx.c) def test_simple_generic_instance_subtyping_invariant(self) -> None: self.assert_subtype(self.fx.ga, self.fx.ga) self.assert_subtype(self.fx.hab, self.fx.hab) self.assert_not_subtype(self.fx.ga, self.fx.g2a) self.assert_not_subtype(self.fx.ga, self.fx.gb) self.assert_not_subtype(self.fx.gb, self.fx.ga) def test_simple_generic_instance_subtyping_covariant(self) -> None: self.assert_subtype(self.fx_co.ga, self.fx_co.ga) self.assert_subtype(self.fx_co.hab, self.fx_co.hab) self.assert_not_subtype(self.fx_co.ga, self.fx_co.g2a) self.assert_not_subtype(self.fx_co.ga, self.fx_co.gb) self.assert_subtype(self.fx_co.gb, self.fx_co.ga) def test_simple_generic_instance_subtyping_contravariant(self) -> None: self.assert_subtype(self.fx_contra.ga, self.fx_contra.ga) self.assert_subtype(self.fx_contra.hab, self.fx_contra.hab) self.assert_not_subtype(self.fx_contra.ga, self.fx_contra.g2a) self.assert_subtype(self.fx_contra.ga, self.fx_contra.gb) self.assert_not_subtype(self.fx_contra.gb, self.fx_contra.ga) def test_generic_subtyping_with_inheritance_invariant(self) -> None: self.assert_subtype(self.fx.gsab, self.fx.gb) self.assert_not_subtype(self.fx.gsab, self.fx.ga) self.assert_not_subtype(self.fx.gsaa, self.fx.gb) def test_generic_subtyping_with_inheritance_covariant(self) -> None: self.assert_subtype(self.fx_co.gsab, self.fx_co.gb) self.assert_subtype(self.fx_co.gsab, self.fx_co.ga) self.assert_not_subtype(self.fx_co.gsaa, self.fx_co.gb) def test_generic_subtyping_with_inheritance_contravariant(self) -> None: self.assert_subtype(self.fx_contra.gsab, self.fx_contra.gb) self.assert_not_subtype(self.fx_contra.gsab, self.fx_contra.ga) self.assert_subtype(self.fx_contra.gsaa, self.fx_contra.gb) def test_interface_subtyping(self) -> None: self.assert_subtype(self.fx.e, self.fx.f) self.assert_equivalent(self.fx.f, self.fx.f) self.assert_not_subtype(self.fx.a, self.fx.f) def test_generic_interface_subtyping(self) -> None: # TODO make this work self.skip() fx2 = InterfaceTypeFixture() self.assert_subtype(fx2.m1, fx2.gfa) self.assert_not_subtype(fx2.m1, fx2.gfb) self.assert_equivalent(fx2.gfa, fx2.gfa) def test_basic_callable_subtyping(self) -> None: self.assert_strict_subtype(self.fx.callable(self.fx.o, self.fx.d), self.fx.callable(self.fx.a, self.fx.d)) self.assert_strict_subtype(self.fx.callable(self.fx.d, self.fx.b), self.fx.callable(self.fx.d, self.fx.a)) self.assert_strict_subtype(self.fx.callable(self.fx.a, self.fx.nonet), self.fx.callable(self.fx.a, self.fx.a)) self.assert_unrelated( self.fx.callable(self.fx.a, self.fx.a, self.fx.a), self.fx.callable(self.fx.a, self.fx.a)) def test_default_arg_callable_subtyping(self) -> None: self.assert_strict_subtype( self.fx.callable_default(1, self.fx.a, self.fx.d, self.fx.a), self.fx.callable(self.fx.a, self.fx.d, self.fx.a)) self.assert_strict_subtype( self.fx.callable_default(1, self.fx.a, self.fx.d, self.fx.a), self.fx.callable(self.fx.a, self.fx.a)) self.assert_strict_subtype( self.fx.callable_default(0, self.fx.a, self.fx.d, self.fx.a), self.fx.callable_default(1, self.fx.a, self.fx.d, self.fx.a)) self.assert_unrelated( self.fx.callable_default(1, self.fx.a, self.fx.d, self.fx.a), self.fx.callable(self.fx.d, self.fx.d, self.fx.a)) self.assert_unrelated( self.fx.callable_default(0, self.fx.a, self.fx.d, self.fx.a), self.fx.callable_default(1, self.fx.a, self.fx.a, self.fx.a)) self.assert_unrelated( self.fx.callable_default(1, self.fx.a, self.fx.a), self.fx.callable(self.fx.a, self.fx.a, self.fx.a)) def test_var_arg_callable_subtyping_1(self) -> None: self.assert_strict_subtype( self.fx.callable_var_arg(0, self.fx.a, self.fx.a), self.fx.callable_var_arg(0, self.fx.b, self.fx.a)) def test_var_arg_callable_subtyping_2(self) -> None: self.assert_strict_subtype( self.fx.callable_var_arg(0, self.fx.a, self.fx.a), self.fx.callable(self.fx.b, self.fx.a)) def test_var_arg_callable_subtyping_3(self) -> None: self.assert_strict_subtype( self.fx.callable_var_arg(0, self.fx.a, self.fx.a), self.fx.callable(self.fx.a)) def test_var_arg_callable_subtyping_4(self) -> None: self.assert_strict_subtype( self.fx.callable_var_arg(1, self.fx.a, self.fx.d, self.fx.a), self.fx.callable(self.fx.b, self.fx.a)) def test_var_arg_callable_subtyping_5(self) -> None: self.assert_strict_subtype( self.fx.callable_var_arg(0, self.fx.a, self.fx.d, self.fx.a), self.fx.callable(self.fx.b, self.fx.a)) def test_var_arg_callable_subtyping_6(self) -> None: self.assert_strict_subtype( self.fx.callable_var_arg(0, self.fx.a, self.fx.f, self.fx.d), self.fx.callable_var_arg(0, self.fx.b, self.fx.e, self.fx.d)) def test_var_arg_callable_subtyping_7(self) -> None: self.assert_not_subtype( self.fx.callable_var_arg(0, self.fx.b, self.fx.d), self.fx.callable(self.fx.a, self.fx.d)) def test_var_arg_callable_subtyping_8(self) -> None: self.assert_not_subtype( self.fx.callable_var_arg(0, self.fx.b, self.fx.d), self.fx.callable_var_arg(0, self.fx.a, self.fx.a, self.fx.d)) self.assert_subtype( self.fx.callable_var_arg(0, self.fx.a, self.fx.d), self.fx.callable_var_arg(0, self.fx.b, self.fx.b, self.fx.d)) def test_var_arg_callable_subtyping_9(self) -> None: self.assert_not_subtype( self.fx.callable_var_arg(0, self.fx.b, self.fx.b, self.fx.d), self.fx.callable_var_arg(0, self.fx.a, self.fx.d)) self.assert_subtype( self.fx.callable_var_arg(0, self.fx.a, self.fx.a, self.fx.d), self.fx.callable_var_arg(0, self.fx.b, self.fx.d)) def test_type_callable_subtyping(self) -> None: self.assert_subtype( self.fx.callable_type(self.fx.d, self.fx.a), self.fx.type_type) self.assert_strict_subtype( self.fx.callable_type(self.fx.d, self.fx.b), self.fx.callable(self.fx.d, self.fx.a)) self.assert_strict_subtype(self.fx.callable_type(self.fx.a, self.fx.b), self.fx.callable(self.fx.a, self.fx.b)) # IDEA: Maybe add these test cases (they are tested pretty well in type # checker tests already): # * more interface subtyping test cases # * more generic interface subtyping test cases # * type variables # * tuple types # * None type # * any type # * generic function types def assert_subtype(self, s: Type, t: Type) -> None: assert_true(is_subtype(s, t), '{} not subtype of {}'.format(s, t)) def assert_not_subtype(self, s: Type, t: Type) -> None: assert_true(not is_subtype(s, t), '{} subtype of {}'.format(s, t)) def assert_strict_subtype(self, s: Type, t: Type) -> None: self.assert_subtype(s, t) self.assert_not_subtype(t, s) def assert_equivalent(self, s: Type, t: Type) -> None: self.assert_subtype(s, t) self.assert_subtype(t, s) def assert_unrelated(self, s: Type, t: Type) -> None: self.assert_not_subtype(s, t) self.assert_not_subtype(t, s) mypy-0.560/mypy/test/testtransform.py0000644€tŠÔÚ€2›s®0000000644213215007206024140 0ustar jukkaDROPBOX\Domain Users00000000000000"""Identity AST transform test cases""" import os.path from typing import Dict, List from mypy import build from mypy.build import BuildSource from mypy.test.helpers import ( assert_string_arrays_equal, testfile_pyversion, normalize_error_messages ) from mypy.test.data import parse_test_cases, DataDrivenTestCase, DataSuite from mypy.test.config import test_data_prefix, test_temp_dir from mypy.errors import CompileError from mypy.treetransform import TransformVisitor from mypy.types import Type from mypy.options import Options class TransformSuite(DataSuite): # Reuse semantic analysis test cases. transform_files = ['semanal-basic.test', 'semanal-expressions.test', 'semanal-classes.test', 'semanal-types.test', 'semanal-modules.test', 'semanal-statements.test', 'semanal-abstractclasses.test', 'semanal-python2.test'] @classmethod def cases(cls) -> List[DataDrivenTestCase]: c = [] # type: List[DataDrivenTestCase] for f in cls.transform_files: c += parse_test_cases(os.path.join(test_data_prefix, f), test_transform, base_path=test_temp_dir, native_sep=True) return c def run_case(self, testcase: DataDrivenTestCase) -> None: test_transform(testcase) def test_transform(testcase: DataDrivenTestCase) -> None: """Perform an identity transform test case.""" try: src = '\n'.join(testcase.input) options = Options() options.use_builtins_fixtures = True options.semantic_analysis_only = True options.show_traceback = True options.python_version = testfile_pyversion(testcase.file) result = build.build(sources=[BuildSource('main', None, src)], options=options, alt_lib_path=test_temp_dir) a = result.errors if a: raise CompileError(a) # Include string representations of the source files in the actual # output. for fnam in sorted(result.files.keys()): f = result.files[fnam] # Omit the builtins module and files with a special marker in the # path. # TODO the test is not reliable if (not f.path.endswith((os.sep + 'builtins.pyi', 'typing.pyi', 'abc.pyi')) and not os.path.basename(f.path).startswith('_') and not os.path.splitext( os.path.basename(f.path))[0].endswith('_')): t = TestTransformVisitor() f = t.mypyfile(f) a += str(f).split('\n') except CompileError as e: a = e.messages a = normalize_error_messages(a) assert_string_arrays_equal( testcase.output, a, 'Invalid semantic analyzer output ({}, line {})'.format(testcase.file, testcase.line)) class TestTransformVisitor(TransformVisitor): def type(self, type: Type) -> Type: assert type is not None return type mypy-0.560/mypy/test/testtypegen.py0000644€tŠÔÚ€2›s®0000001135213215007206023574 0ustar jukkaDROPBOX\Domain Users00000000000000"""Test cases for the type checker: exporting inferred types""" import os.path import re from typing import Set, List from mypy import build from mypy.build import BuildSource from mypy.test import config from mypy.test.data import parse_test_cases, DataDrivenTestCase, DataSuite from mypy.test.helpers import assert_string_arrays_equal from mypy.util import short_type from mypy.nodes import ( NameExpr, TypeVarExpr, CallExpr, Expression, MypyFile, AssignmentStmt, IntExpr ) from mypy.traverser import TraverserVisitor from mypy.errors import CompileError from mypy.options import Options class TypeExportSuite(DataSuite): # List of files that contain test case descriptions. files = ['typexport-basic.test'] @classmethod def cases(cls) -> List[DataDrivenTestCase]: c = [] # type: List[DataDrivenTestCase] for f in cls.files: c += parse_test_cases(os.path.join(config.test_data_prefix, f), None, config.test_temp_dir) return c def run_case(self, testcase: DataDrivenTestCase) -> None: try: line = testcase.input[0] mask = '' if line.startswith('##'): mask = '(' + line[2:].strip() + ')$' src = '\n'.join(testcase.input) options = Options() options.use_builtins_fixtures = True options.show_traceback = True result = build.build(sources=[BuildSource('main', None, src)], options=options, alt_lib_path=config.test_temp_dir) a = result.errors map = result.types nodes = map.keys() # Ignore NameExpr nodes of variables with explicit (trivial) types # to simplify output. searcher = SkippedNodeSearcher() for file in result.files.values(): file.accept(searcher) ignored = searcher.nodes # Filter nodes that should be included in the output. keys = [] for node in nodes: if node.line is not None and node.line != -1 and map[node]: if ignore_node(node) or node in ignored: continue if (re.match(mask, short_type(node)) or (isinstance(node, NameExpr) and re.match(mask, node.name))): # Include node in output. keys.append(node) for key in sorted(keys, key=lambda n: (n.line, short_type(n), str(n) + str(map[n]))): ts = str(map[key]).replace('*', '') # Remove erased tags ts = ts.replace('__main__.', '') a.append('{}({}) : {}'.format(short_type(key), key.line, ts)) except CompileError as e: a = e.messages assert_string_arrays_equal( testcase.output, a, 'Invalid type checker output ({}, line {})'.format(testcase.file, testcase.line)) class SkippedNodeSearcher(TraverserVisitor): def __init__(self) -> None: self.nodes = set() # type: Set[Expression] self.is_typing = False def visit_mypy_file(self, f: MypyFile) -> None: self.is_typing = f.fullname() == 'typing' or f.fullname() == 'builtins' super().visit_mypy_file(f) def visit_assignment_stmt(self, s: AssignmentStmt) -> None: if s.type or ignore_node(s.rvalue): for lvalue in s.lvalues: if isinstance(lvalue, NameExpr): self.nodes.add(lvalue) super().visit_assignment_stmt(s) def visit_name_expr(self, n: NameExpr) -> None: self.skip_if_typing(n) def visit_int_expr(self, n: IntExpr) -> None: self.skip_if_typing(n) def skip_if_typing(self, n: Expression) -> None: if self.is_typing: self.nodes.add(n) def ignore_node(node: Expression) -> bool: """Return True if node is to be omitted from test case output.""" # We want to get rid of object() expressions in the typing module stub # and also TypeVar(...) expressions. Since detecting whether a node comes # from the typing module is not easy, we just to strip them all away. if isinstance(node, TypeVarExpr): return True if isinstance(node, NameExpr) and node.fullname == 'builtins.object': return True if isinstance(node, NameExpr) and node.fullname == 'builtins.None': return True if isinstance(node, CallExpr) and (ignore_node(node.callee) or node.analyzed): return True return False mypy-0.560/mypy/test/testtypes.py0000644€tŠÔÚ€2›s®0000010245213215007206023267 0ustar jukkaDROPBOX\Domain Users00000000000000"""Test cases for mypy types and type operations.""" from typing import List, Tuple from mypy.myunit import ( Suite, assert_equal, assert_true, assert_false, assert_type ) from mypy.erasetype import erase_type from mypy.expandtype import expand_type from mypy.join import join_types, join_simple from mypy.meet import meet_types from mypy.types import ( UnboundType, AnyType, CallableType, TupleType, TypeVarDef, Type, Instance, NoneTyp, Overloaded, TypeType, UnionType, UninhabitedType, true_only, false_only, TypeVarId, TypeOfAny ) from mypy.nodes import ARG_POS, ARG_OPT, ARG_STAR, CONTRAVARIANT, INVARIANT, COVARIANT from mypy.subtypes import is_subtype, is_more_precise, is_proper_subtype from mypy.test.typefixture import TypeFixture, InterfaceTypeFixture class TypesSuite(Suite): def __init__(self) -> None: super().__init__() self.x = UnboundType('X') # Helpers self.y = UnboundType('Y') self.fx = TypeFixture() self.function = self.fx.function def test_any(self) -> None: assert_equal(str(AnyType(TypeOfAny.special_form)), 'Any') def test_simple_unbound_type(self) -> None: u = UnboundType('Foo') assert_equal(str(u), 'Foo?') def test_generic_unbound_type(self) -> None: u = UnboundType('Foo', [UnboundType('T'), AnyType(TypeOfAny.special_form)]) assert_equal(str(u), 'Foo?[T?, Any]') def test_callable_type(self) -> None: c = CallableType([self.x, self.y], [ARG_POS, ARG_POS], [None, None], AnyType(TypeOfAny.special_form), self.function) assert_equal(str(c), 'def (X?, Y?) -> Any') c2 = CallableType([], [], [], NoneTyp(), self.fx.function) assert_equal(str(c2), 'def ()') def test_callable_type_with_default_args(self) -> None: c = CallableType([self.x, self.y], [ARG_POS, ARG_OPT], [None, None], AnyType(TypeOfAny.special_form), self.function) assert_equal(str(c), 'def (X?, Y? =) -> Any') c2 = CallableType([self.x, self.y], [ARG_OPT, ARG_OPT], [None, None], AnyType(TypeOfAny.special_form), self.function) assert_equal(str(c2), 'def (X? =, Y? =) -> Any') def test_callable_type_with_var_args(self) -> None: c = CallableType([self.x], [ARG_STAR], [None], AnyType(TypeOfAny.special_form), self.function) assert_equal(str(c), 'def (*X?) -> Any') c2 = CallableType([self.x, self.y], [ARG_POS, ARG_STAR], [None, None], AnyType(TypeOfAny.special_form), self.function) assert_equal(str(c2), 'def (X?, *Y?) -> Any') c3 = CallableType([self.x, self.y], [ARG_OPT, ARG_STAR], [None, None], AnyType(TypeOfAny.special_form), self.function) assert_equal(str(c3), 'def (X? =, *Y?) -> Any') def test_tuple_type(self) -> None: assert_equal(str(TupleType([], self.fx.std_tuple)), 'Tuple[]') assert_equal(str(TupleType([self.x], self.fx.std_tuple)), 'Tuple[X?]') assert_equal(str(TupleType([self.x, AnyType(TypeOfAny.special_form)], self.fx.std_tuple)), 'Tuple[X?, Any]') def test_type_variable_binding(self) -> None: assert_equal(str(TypeVarDef('X', 'X', 1, [], self.fx.o)), 'X') assert_equal(str(TypeVarDef('X', 'X', 1, [self.x, self.y], self.fx.o)), 'X in (X?, Y?)') def test_generic_function_type(self) -> None: c = CallableType([self.x, self.y], [ARG_POS, ARG_POS], [None, None], self.y, self.function, name=None, variables=[TypeVarDef('X', 'X', -1, [], self.fx.o)]) assert_equal(str(c), 'def [X] (X?, Y?) -> Y?') v = [TypeVarDef('Y', 'Y', -1, [], self.fx.o), TypeVarDef('X', 'X', -2, [], self.fx.o)] c2 = CallableType([], [], [], NoneTyp(), self.function, name=None, variables=v) assert_equal(str(c2), 'def [Y, X] ()') class TypeOpsSuite(Suite): def set_up(self) -> None: self.fx = TypeFixture(INVARIANT) self.fx_co = TypeFixture(COVARIANT) self.fx_contra = TypeFixture(CONTRAVARIANT) # expand_type def test_trivial_expand(self) -> None: for t in (self.fx.a, self.fx.o, self.fx.t, self.fx.nonet, self.tuple(self.fx.a), self.callable([], self.fx.a, self.fx.a), self.fx.anyt): self.assert_expand(t, [], t) self.assert_expand(t, [], t) self.assert_expand(t, [], t) def test_expand_naked_type_var(self) -> None: self.assert_expand(self.fx.t, [(self.fx.t.id, self.fx.a)], self.fx.a) self.assert_expand(self.fx.t, [(self.fx.s.id, self.fx.a)], self.fx.t) def test_expand_basic_generic_types(self) -> None: self.assert_expand(self.fx.gt, [(self.fx.t.id, self.fx.a)], self.fx.ga) # IDEA: Add test cases for # tuple types # callable types # multiple arguments def assert_expand(self, orig: Type, map_items: List[Tuple[TypeVarId, Type]], result: Type, ) -> None: lower_bounds = {} for id, t in map_items: lower_bounds[id] = t exp = expand_type(orig, lower_bounds) # Remove erased tags (asterisks). assert_equal(str(exp).replace('*', ''), str(result)) # erase_type def test_trivial_erase(self) -> None: for t in (self.fx.a, self.fx.o, self.fx.nonet, self.fx.anyt): self.assert_erase(t, t) def test_erase_with_type_variable(self) -> None: self.assert_erase(self.fx.t, self.fx.anyt) def test_erase_with_generic_type(self) -> None: self.assert_erase(self.fx.ga, self.fx.gdyn) self.assert_erase(self.fx.hab, Instance(self.fx.hi, [self.fx.anyt, self.fx.anyt])) def test_erase_with_tuple_type(self) -> None: self.assert_erase(self.tuple(self.fx.a), self.fx.std_tuple) def test_erase_with_function_type(self) -> None: self.assert_erase(self.fx.callable(self.fx.a, self.fx.b), self.fx.callable_type(self.fx.nonet)) def test_erase_with_type_object(self) -> None: self.assert_erase(self.fx.callable_type(self.fx.a, self.fx.b), self.fx.callable_type(self.fx.nonet)) def test_erase_with_type_type(self) -> None: self.assert_erase(self.fx.type_a, self.fx.type_a) self.assert_erase(self.fx.type_t, self.fx.type_any) def assert_erase(self, orig: Type, result: Type) -> None: assert_equal(str(erase_type(orig)), str(result)) # is_more_precise def test_is_more_precise(self) -> None: fx = self.fx assert_true(is_more_precise(fx.b, fx.a)) assert_true(is_more_precise(fx.b, fx.b)) assert_true(is_more_precise(fx.b, fx.b)) assert_true(is_more_precise(fx.b, fx.anyt)) assert_true(is_more_precise(self.tuple(fx.b, fx.a), self.tuple(fx.b, fx.a))) assert_true(is_more_precise(self.tuple(fx.b, fx.b), self.tuple(fx.b, fx.a))) assert_false(is_more_precise(fx.a, fx.b)) assert_false(is_more_precise(fx.anyt, fx.b)) # is_proper_subtype def test_is_proper_subtype(self) -> None: fx = self.fx assert_true(is_proper_subtype(fx.a, fx.a)) assert_true(is_proper_subtype(fx.b, fx.a)) assert_true(is_proper_subtype(fx.b, fx.o)) assert_true(is_proper_subtype(fx.b, fx.o)) assert_false(is_proper_subtype(fx.a, fx.b)) assert_false(is_proper_subtype(fx.o, fx.b)) assert_true(is_proper_subtype(fx.anyt, fx.anyt)) assert_false(is_proper_subtype(fx.a, fx.anyt)) assert_false(is_proper_subtype(fx.anyt, fx.a)) assert_true(is_proper_subtype(fx.ga, fx.ga)) assert_true(is_proper_subtype(fx.gdyn, fx.gdyn)) assert_false(is_proper_subtype(fx.ga, fx.gdyn)) assert_false(is_proper_subtype(fx.gdyn, fx.ga)) assert_true(is_proper_subtype(fx.t, fx.t)) assert_false(is_proper_subtype(fx.t, fx.s)) assert_true(is_proper_subtype(fx.a, UnionType([fx.a, fx.b]))) assert_true(is_proper_subtype(UnionType([fx.a, fx.b]), UnionType([fx.a, fx.b, fx.c]))) assert_false(is_proper_subtype(UnionType([fx.a, fx.b]), UnionType([fx.b, fx.c]))) def test_is_proper_subtype_covariance(self) -> None: fx_co = self.fx_co assert_true(is_proper_subtype(fx_co.gsab, fx_co.gb)) assert_true(is_proper_subtype(fx_co.gsab, fx_co.ga)) assert_false(is_proper_subtype(fx_co.gsaa, fx_co.gb)) assert_true(is_proper_subtype(fx_co.gb, fx_co.ga)) assert_false(is_proper_subtype(fx_co.ga, fx_co.gb)) def test_is_proper_subtype_contravariance(self) -> None: fx_contra = self.fx_contra assert_true(is_proper_subtype(fx_contra.gsab, fx_contra.gb)) assert_false(is_proper_subtype(fx_contra.gsab, fx_contra.ga)) assert_true(is_proper_subtype(fx_contra.gsaa, fx_contra.gb)) assert_false(is_proper_subtype(fx_contra.gb, fx_contra.ga)) assert_true(is_proper_subtype(fx_contra.ga, fx_contra.gb)) def test_is_proper_subtype_invariance(self) -> None: fx = self.fx assert_true(is_proper_subtype(fx.gsab, fx.gb)) assert_false(is_proper_subtype(fx.gsab, fx.ga)) assert_false(is_proper_subtype(fx.gsaa, fx.gb)) assert_false(is_proper_subtype(fx.gb, fx.ga)) assert_false(is_proper_subtype(fx.ga, fx.gb)) # can_be_true / can_be_false def test_empty_tuple_always_false(self) -> None: tuple_type = self.tuple() assert_true(tuple_type.can_be_false) assert_false(tuple_type.can_be_true) def test_nonempty_tuple_always_true(self) -> None: tuple_type = self.tuple(AnyType(TypeOfAny.special_form), AnyType(TypeOfAny.special_form)) assert_true(tuple_type.can_be_true) assert_false(tuple_type.can_be_false) def test_union_can_be_true_if_any_true(self) -> None: union_type = UnionType([self.fx.a, self.tuple()]) assert_true(union_type.can_be_true) def test_union_can_not_be_true_if_none_true(self) -> None: union_type = UnionType([self.tuple(), self.tuple()]) assert_false(union_type.can_be_true) def test_union_can_be_false_if_any_false(self) -> None: union_type = UnionType([self.fx.a, self.tuple()]) assert_true(union_type.can_be_false) def test_union_can_not_be_false_if_none_false(self) -> None: union_type = UnionType([self.tuple(self.fx.a), self.tuple(self.fx.d)]) assert_false(union_type.can_be_false) # true_only / false_only def test_true_only_of_false_type_is_uninhabited(self) -> None: to = true_only(NoneTyp()) assert_type(UninhabitedType, to) def test_true_only_of_true_type_is_idempotent(self) -> None: always_true = self.tuple(AnyType(TypeOfAny.special_form)) to = true_only(always_true) assert_true(always_true is to) def test_true_only_of_instance(self) -> None: to = true_only(self.fx.a) assert_equal(str(to), "A") assert_true(to.can_be_true) assert_false(to.can_be_false) assert_type(Instance, to) # The original class still can be false assert_true(self.fx.a.can_be_false) def test_true_only_of_union(self) -> None: tup_type = self.tuple(AnyType(TypeOfAny.special_form)) # Union of something that is unknown, something that is always true, something # that is always false union_type = UnionType([self.fx.a, tup_type, self.tuple()]) to = true_only(union_type) assert isinstance(to, UnionType) assert_equal(len(to.items), 2) assert_true(to.items[0].can_be_true) assert_false(to.items[0].can_be_false) assert_true(to.items[1] is tup_type) def test_false_only_of_true_type_is_uninhabited(self) -> None: fo = false_only(self.tuple(AnyType(TypeOfAny.special_form))) assert_type(UninhabitedType, fo) def test_false_only_of_false_type_is_idempotent(self) -> None: always_false = NoneTyp() fo = false_only(always_false) assert_true(always_false is fo) def test_false_only_of_instance(self) -> None: fo = false_only(self.fx.a) assert_equal(str(fo), "A") assert_false(fo.can_be_true) assert_true(fo.can_be_false) assert_type(Instance, fo) # The original class still can be true assert_true(self.fx.a.can_be_true) def test_false_only_of_union(self) -> None: tup_type = self.tuple() # Union of something that is unknown, something that is always true, something # that is always false union_type = UnionType([self.fx.a, self.tuple(AnyType(TypeOfAny.special_form)), tup_type]) assert_equal(len(union_type.items), 3) fo = false_only(union_type) assert isinstance(fo, UnionType) assert_equal(len(fo.items), 2) assert_false(fo.items[0].can_be_true) assert_true(fo.items[0].can_be_false) assert_true(fo.items[1] is tup_type) # Helpers def tuple(self, *a: Type) -> TupleType: return TupleType(list(a), self.fx.std_tuple) def callable(self, vars: List[str], *a: Type) -> CallableType: """callable(args, a1, ..., an, r) constructs a callable with argument types a1, ... an and return type r and type arguments vars. """ tv = [] # type: List[TypeVarDef] n = -1 for v in vars: tv.append(TypeVarDef(v, v, n, [], self.fx.o)) n -= 1 return CallableType(list(a[:-1]), [ARG_POS] * (len(a) - 1), [None] * (len(a) - 1), a[-1], self.fx.function, name=None, variables=tv) class JoinSuite(Suite): def set_up(self) -> None: self.fx = TypeFixture() def test_trivial_cases(self) -> None: for simple in self.fx.a, self.fx.o, self.fx.b: self.assert_join(simple, simple, simple) def test_class_subtyping(self) -> None: self.assert_join(self.fx.a, self.fx.o, self.fx.o) self.assert_join(self.fx.b, self.fx.o, self.fx.o) self.assert_join(self.fx.a, self.fx.d, self.fx.o) self.assert_join(self.fx.b, self.fx.c, self.fx.a) self.assert_join(self.fx.b, self.fx.d, self.fx.o) def test_tuples(self) -> None: self.assert_join(self.tuple(), self.tuple(), self.tuple()) self.assert_join(self.tuple(self.fx.a), self.tuple(self.fx.a), self.tuple(self.fx.a)) self.assert_join(self.tuple(self.fx.b, self.fx.c), self.tuple(self.fx.a, self.fx.d), self.tuple(self.fx.a, self.fx.o)) self.assert_join(self.tuple(self.fx.a, self.fx.a), self.fx.std_tuple, self.fx.o) self.assert_join(self.tuple(self.fx.a), self.tuple(self.fx.a, self.fx.a), self.fx.o) def test_function_types(self) -> None: self.assert_join(self.callable(self.fx.a, self.fx.b), self.callable(self.fx.a, self.fx.b), self.callable(self.fx.a, self.fx.b)) self.assert_join(self.callable(self.fx.a, self.fx.b), self.callable(self.fx.b, self.fx.b), self.callable(self.fx.b, self.fx.b)) self.assert_join(self.callable(self.fx.a, self.fx.b), self.callable(self.fx.a, self.fx.a), self.callable(self.fx.a, self.fx.a)) self.assert_join(self.callable(self.fx.a, self.fx.b), self.fx.function, self.fx.function) self.assert_join(self.callable(self.fx.a, self.fx.b), self.callable(self.fx.d, self.fx.b), self.fx.function) def test_type_vars(self) -> None: self.assert_join(self.fx.t, self.fx.t, self.fx.t) self.assert_join(self.fx.s, self.fx.s, self.fx.s) self.assert_join(self.fx.t, self.fx.s, self.fx.o) def test_none(self) -> None: # Any type t joined with None results in t. for t in [NoneTyp(), self.fx.a, self.fx.o, UnboundType('x'), self.fx.t, self.tuple(), self.callable(self.fx.a, self.fx.b), self.fx.anyt]: self.assert_join(t, NoneTyp(), t) def test_unbound_type(self) -> None: self.assert_join(UnboundType('x'), UnboundType('x'), self.fx.anyt) self.assert_join(UnboundType('x'), UnboundType('y'), self.fx.anyt) # Any type t joined with an unbound type results in dynamic. Unbound # type means that there is an error somewhere in the program, so this # does not affect type safety (whatever the result). for t in [self.fx.a, self.fx.o, self.fx.ga, self.fx.t, self.tuple(), self.callable(self.fx.a, self.fx.b)]: self.assert_join(t, UnboundType('X'), self.fx.anyt) def test_any_type(self) -> None: # Join against 'Any' type always results in 'Any'. for t in [self.fx.anyt, self.fx.a, self.fx.o, NoneTyp(), UnboundType('x'), self.fx.t, self.tuple(), self.callable(self.fx.a, self.fx.b)]: self.assert_join(t, self.fx.anyt, self.fx.anyt) def test_mixed_truth_restricted_type_simple(self) -> None: # join_simple against differently restricted truthiness types drops restrictions. true_a = true_only(self.fx.a) false_o = false_only(self.fx.o) j = join_simple(self.fx.o, true_a, false_o) assert_true(j.can_be_true) assert_true(j.can_be_false) def test_mixed_truth_restricted_type(self) -> None: # join_types against differently restricted truthiness types drops restrictions. true_any = true_only(AnyType(TypeOfAny.special_form)) false_o = false_only(self.fx.o) j = join_types(true_any, false_o) assert_true(j.can_be_true) assert_true(j.can_be_false) def test_other_mixed_types(self) -> None: # In general, joining unrelated types produces object. for t1 in [self.fx.a, self.fx.t, self.tuple(), self.callable(self.fx.a, self.fx.b)]: for t2 in [self.fx.a, self.fx.t, self.tuple(), self.callable(self.fx.a, self.fx.b)]: if str(t1) != str(t2): self.assert_join(t1, t2, self.fx.o) def test_simple_generics(self) -> None: self.assert_join(self.fx.ga, self.fx.ga, self.fx.ga) self.assert_join(self.fx.ga, self.fx.gb, self.fx.ga) self.assert_join(self.fx.ga, self.fx.gd, self.fx.o) self.assert_join(self.fx.ga, self.fx.g2a, self.fx.o) self.assert_join(self.fx.ga, self.fx.nonet, self.fx.ga) self.assert_join(self.fx.ga, self.fx.anyt, self.fx.anyt) for t in [self.fx.a, self.fx.o, self.fx.t, self.tuple(), self.callable(self.fx.a, self.fx.b)]: self.assert_join(t, self.fx.ga, self.fx.o) def test_generics_with_multiple_args(self) -> None: self.assert_join(self.fx.hab, self.fx.hab, self.fx.hab) self.assert_join(self.fx.hab, self.fx.hbb, self.fx.hab) self.assert_join(self.fx.had, self.fx.haa, self.fx.o) def test_generics_with_inheritance(self) -> None: self.assert_join(self.fx.gsab, self.fx.gb, self.fx.gb) self.assert_join(self.fx.gsba, self.fx.gb, self.fx.ga) self.assert_join(self.fx.gsab, self.fx.gd, self.fx.o) def test_generics_with_inheritance_and_shared_supertype(self) -> None: self.assert_join(self.fx.gsba, self.fx.gs2a, self.fx.ga) self.assert_join(self.fx.gsab, self.fx.gs2a, self.fx.ga) self.assert_join(self.fx.gsab, self.fx.gs2d, self.fx.o) def test_generic_types_and_any(self) -> None: self.assert_join(self.fx.gdyn, self.fx.ga, self.fx.gdyn) def test_callables_with_any(self) -> None: self.assert_join(self.callable(self.fx.a, self.fx.a, self.fx.anyt, self.fx.a), self.callable(self.fx.a, self.fx.anyt, self.fx.a, self.fx.anyt), self.callable(self.fx.a, self.fx.anyt, self.fx.anyt, self.fx.anyt)) def test_overloaded(self) -> None: c = self.callable def ov(*items: CallableType) -> Overloaded: return Overloaded(list(items)) fx = self.fx func = fx.function c1 = c(fx.a, fx.a) c2 = c(fx.b, fx.b) c3 = c(fx.c, fx.c) self.assert_join(ov(c1, c2), c1, c1) self.assert_join(ov(c1, c2), c2, c2) self.assert_join(ov(c1, c2), ov(c1, c2), ov(c1, c2)) self.assert_join(ov(c1, c2), ov(c1, c3), c1) self.assert_join(ov(c2, c1), ov(c3, c1), c1) self.assert_join(ov(c1, c2), c3, func) def test_overloaded_with_any(self) -> None: c = self.callable def ov(*items: CallableType) -> Overloaded: return Overloaded(list(items)) fx = self.fx any = fx.anyt self.assert_join(ov(c(fx.a, fx.a), c(fx.b, fx.b)), c(any, fx.b), c(any, fx.b)) self.assert_join(ov(c(fx.a, fx.a), c(any, fx.b)), c(fx.b, fx.b), c(any, fx.b)) def test_join_interface_types(self) -> None: self.skip() # FIX self.assert_join(self.fx.f, self.fx.f, self.fx.f) self.assert_join(self.fx.f, self.fx.f2, self.fx.o) self.assert_join(self.fx.f, self.fx.f3, self.fx.f) def test_join_interface_and_class_types(self) -> None: self.skip() # FIX self.assert_join(self.fx.o, self.fx.f, self.fx.o) self.assert_join(self.fx.a, self.fx.f, self.fx.o) self.assert_join(self.fx.e, self.fx.f, self.fx.f) def test_join_class_types_with_interface_result(self) -> None: self.skip() # FIX # Unique result self.assert_join(self.fx.e, self.fx.e2, self.fx.f) # Ambiguous result self.assert_join(self.fx.e2, self.fx.e3, self.fx.anyt) def test_generic_interfaces(self) -> None: self.skip() # FIX fx = InterfaceTypeFixture() self.assert_join(fx.gfa, fx.gfa, fx.gfa) self.assert_join(fx.gfa, fx.gfb, fx.o) self.assert_join(fx.m1, fx.gfa, fx.gfa) self.assert_join(fx.m1, fx.gfb, fx.o) def test_simple_type_objects(self) -> None: t1 = self.type_callable(self.fx.a, self.fx.a) t2 = self.type_callable(self.fx.b, self.fx.b) tr = self.type_callable(self.fx.b, self.fx.a) self.assert_join(t1, t1, t1) j = join_types(t1, t1) assert isinstance(j, CallableType) assert_true(j.is_type_obj()) self.assert_join(t1, t2, tr) self.assert_join(t1, self.fx.type_type, self.fx.type_type) self.assert_join(self.fx.type_type, self.fx.type_type, self.fx.type_type) def test_type_type(self) -> None: self.assert_join(self.fx.type_a, self.fx.type_b, self.fx.type_a) self.assert_join(self.fx.type_b, self.fx.type_any, self.fx.type_any) self.assert_join(self.fx.type_b, self.fx.type_type, self.fx.type_type) self.assert_join(self.fx.type_b, self.fx.type_c, self.fx.type_a) self.assert_join(self.fx.type_c, self.fx.type_d, TypeType.make_normalized(self.fx.o)) self.assert_join(self.fx.type_type, self.fx.type_any, self.fx.type_type) self.assert_join(self.fx.type_b, self.fx.anyt, self.fx.anyt) # There are additional test cases in check-inference.test. # TODO: Function types + varargs and default args. def assert_join(self, s: Type, t: Type, join: Type) -> None: self.assert_simple_join(s, t, join) self.assert_simple_join(t, s, join) def assert_simple_join(self, s: Type, t: Type, join: Type) -> None: result = join_types(s, t) actual = str(result) expected = str(join) assert_equal(actual, expected, 'join({}, {}) == {{}} ({{}} expected)'.format(s, t)) assert_true(is_subtype(s, result), '{} not subtype of {}'.format(s, result)) assert_true(is_subtype(t, result), '{} not subtype of {}'.format(t, result)) def tuple(self, *a: Type) -> TupleType: return TupleType(list(a), self.fx.std_tuple) def callable(self, *a: Type) -> CallableType: """callable(a1, ..., an, r) constructs a callable with argument types a1, ... an and return type r. """ n = len(a) - 1 return CallableType(list(a[:-1]), [ARG_POS] * n, [None] * n, a[-1], self.fx.function) def type_callable(self, *a: Type) -> CallableType: """type_callable(a1, ..., an, r) constructs a callable with argument types a1, ... an and return type r, and which represents a type. """ n = len(a) - 1 return CallableType(list(a[:-1]), [ARG_POS] * n, [None] * n, a[-1], self.fx.type_type) class MeetSuite(Suite): def set_up(self) -> None: self.fx = TypeFixture() def test_trivial_cases(self) -> None: for simple in self.fx.a, self.fx.o, self.fx.b: self.assert_meet(simple, simple, simple) def test_class_subtyping(self) -> None: self.assert_meet(self.fx.a, self.fx.o, self.fx.a) self.assert_meet(self.fx.a, self.fx.b, self.fx.b) self.assert_meet(self.fx.b, self.fx.o, self.fx.b) self.assert_meet(self.fx.a, self.fx.d, NoneTyp()) self.assert_meet(self.fx.b, self.fx.c, NoneTyp()) def test_tuples(self) -> None: self.assert_meet(self.tuple(), self.tuple(), self.tuple()) self.assert_meet(self.tuple(self.fx.a), self.tuple(self.fx.a), self.tuple(self.fx.a)) self.assert_meet(self.tuple(self.fx.b, self.fx.c), self.tuple(self.fx.a, self.fx.d), self.tuple(self.fx.b, NoneTyp())) self.assert_meet(self.tuple(self.fx.a, self.fx.a), self.fx.std_tuple, self.tuple(self.fx.a, self.fx.a)) self.assert_meet(self.tuple(self.fx.a), self.tuple(self.fx.a, self.fx.a), NoneTyp()) def test_function_types(self) -> None: self.assert_meet(self.callable(self.fx.a, self.fx.b), self.callable(self.fx.a, self.fx.b), self.callable(self.fx.a, self.fx.b)) self.assert_meet(self.callable(self.fx.a, self.fx.b), self.callable(self.fx.b, self.fx.b), self.callable(self.fx.a, self.fx.b)) self.assert_meet(self.callable(self.fx.a, self.fx.b), self.callable(self.fx.a, self.fx.a), self.callable(self.fx.a, self.fx.b)) def test_type_vars(self) -> None: self.assert_meet(self.fx.t, self.fx.t, self.fx.t) self.assert_meet(self.fx.s, self.fx.s, self.fx.s) self.assert_meet(self.fx.t, self.fx.s, NoneTyp()) def test_none(self) -> None: self.assert_meet(NoneTyp(), NoneTyp(), NoneTyp()) self.assert_meet(NoneTyp(), self.fx.anyt, NoneTyp()) # Any type t joined with None results in None, unless t is Any. for t in [self.fx.a, self.fx.o, UnboundType('x'), self.fx.t, self.tuple(), self.callable(self.fx.a, self.fx.b)]: self.assert_meet(t, NoneTyp(), NoneTyp()) def test_unbound_type(self) -> None: self.assert_meet(UnboundType('x'), UnboundType('x'), self.fx.anyt) self.assert_meet(UnboundType('x'), UnboundType('y'), self.fx.anyt) self.assert_meet(UnboundType('x'), self.fx.anyt, UnboundType('x')) # The meet of any type t with an unbound type results in dynamic. # Unbound type means that there is an error somewhere in the program, # so this does not affect type safety. for t in [self.fx.a, self.fx.o, self.fx.t, self.tuple(), self.callable(self.fx.a, self.fx.b)]: self.assert_meet(t, UnboundType('X'), self.fx.anyt) def test_dynamic_type(self) -> None: # Meet against dynamic type always results in dynamic. for t in [self.fx.anyt, self.fx.a, self.fx.o, NoneTyp(), UnboundType('x'), self.fx.t, self.tuple(), self.callable(self.fx.a, self.fx.b)]: self.assert_meet(t, self.fx.anyt, t) def test_simple_generics(self) -> None: self.assert_meet(self.fx.ga, self.fx.ga, self.fx.ga) self.assert_meet(self.fx.ga, self.fx.o, self.fx.ga) self.assert_meet(self.fx.ga, self.fx.gb, self.fx.gb) self.assert_meet(self.fx.ga, self.fx.gd, self.fx.nonet) self.assert_meet(self.fx.ga, self.fx.g2a, self.fx.nonet) self.assert_meet(self.fx.ga, self.fx.nonet, self.fx.nonet) self.assert_meet(self.fx.ga, self.fx.anyt, self.fx.ga) for t in [self.fx.a, self.fx.t, self.tuple(), self.callable(self.fx.a, self.fx.b)]: self.assert_meet(t, self.fx.ga, self.fx.nonet) def test_generics_with_multiple_args(self) -> None: self.assert_meet(self.fx.hab, self.fx.hab, self.fx.hab) self.assert_meet(self.fx.hab, self.fx.haa, self.fx.hab) self.assert_meet(self.fx.hab, self.fx.had, self.fx.nonet) self.assert_meet(self.fx.hab, self.fx.hbb, self.fx.hbb) def test_generics_with_inheritance(self) -> None: self.assert_meet(self.fx.gsab, self.fx.gb, self.fx.gsab) self.assert_meet(self.fx.gsba, self.fx.gb, self.fx.nonet) def test_generics_with_inheritance_and_shared_supertype(self) -> None: self.assert_meet(self.fx.gsba, self.fx.gs2a, self.fx.nonet) self.assert_meet(self.fx.gsab, self.fx.gs2a, self.fx.nonet) def test_generic_types_and_dynamic(self) -> None: self.assert_meet(self.fx.gdyn, self.fx.ga, self.fx.ga) def test_callables_with_dynamic(self) -> None: self.assert_meet(self.callable(self.fx.a, self.fx.a, self.fx.anyt, self.fx.a), self.callable(self.fx.a, self.fx.anyt, self.fx.a, self.fx.anyt), self.callable(self.fx.a, self.fx.anyt, self.fx.anyt, self.fx.anyt)) def test_meet_interface_types(self) -> None: self.assert_meet(self.fx.f, self.fx.f, self.fx.f) self.assert_meet(self.fx.f, self.fx.f2, self.fx.nonet) self.assert_meet(self.fx.f, self.fx.f3, self.fx.f3) def test_meet_interface_and_class_types(self) -> None: self.assert_meet(self.fx.o, self.fx.f, self.fx.f) self.assert_meet(self.fx.a, self.fx.f, self.fx.nonet) self.assert_meet(self.fx.e, self.fx.f, self.fx.e) def test_meet_class_types_with_shared_interfaces(self) -> None: # These have nothing special with respect to meets, unlike joins. These # are for completeness only. self.assert_meet(self.fx.e, self.fx.e2, self.fx.nonet) self.assert_meet(self.fx.e2, self.fx.e3, self.fx.nonet) def test_meet_with_generic_interfaces(self) -> None: # TODO fix self.skip() fx = InterfaceTypeFixture() self.assert_meet(fx.gfa, fx.m1, fx.m1) self.assert_meet(fx.gfa, fx.gfa, fx.gfa) self.assert_meet(fx.gfb, fx.m1, fx.nonet) def test_type_type(self) -> None: self.assert_meet(self.fx.type_a, self.fx.type_b, self.fx.type_b) self.assert_meet(self.fx.type_b, self.fx.type_any, self.fx.type_b) self.assert_meet(self.fx.type_b, self.fx.type_type, self.fx.type_b) self.assert_meet(self.fx.type_b, self.fx.type_c, self.fx.nonet) self.assert_meet(self.fx.type_c, self.fx.type_d, self.fx.nonet) self.assert_meet(self.fx.type_type, self.fx.type_any, self.fx.type_any) self.assert_meet(self.fx.type_b, self.fx.anyt, self.fx.type_b) # FIX generic interfaces + ranges def assert_meet(self, s: Type, t: Type, meet: Type) -> None: self.assert_simple_meet(s, t, meet) self.assert_simple_meet(t, s, meet) def assert_simple_meet(self, s: Type, t: Type, meet: Type) -> None: result = meet_types(s, t) actual = str(result) expected = str(meet) assert_equal(actual, expected, 'meet({}, {}) == {{}} ({{}} expected)'.format(s, t)) assert_true(is_subtype(result, s), '{} not subtype of {}'.format(result, s)) assert_true(is_subtype(result, t), '{} not subtype of {}'.format(result, t)) def tuple(self, *a: Type) -> TupleType: return TupleType(list(a), self.fx.std_tuple) def callable(self, *a: Type) -> CallableType: """callable(a1, ..., an, r) constructs a callable with argument types a1, ... an and return type r. """ n = len(a) - 1 return CallableType(list(a[:-1]), [ARG_POS] * n, [None] * n, a[-1], self.fx.function) mypy-0.560/mypy/test/typefixture.py0000644€tŠÔÚ€2›s®0000002560113215007205023612 0ustar jukkaDROPBOX\Domain Users00000000000000"""Fixture used in type-related test cases. It contains class TypeInfos and Type objects. """ from typing import List, Optional from mypy.types import ( Type, TypeVarType, AnyType, NoneTyp, Instance, CallableType, TypeVarDef, TypeType, UninhabitedType, TypeOfAny ) from mypy.nodes import ( TypeInfo, ClassDef, Block, ARG_POS, ARG_OPT, ARG_STAR, SymbolTable, COVARIANT) class TypeFixture: """Helper class that is used as a fixture in type-related unit tests. The members are initialized to contain various type-related values. """ def __init__(self, variance: int=COVARIANT) -> None: # The 'object' class self.oi = self.make_type_info('builtins.object') # class object self.o = Instance(self.oi, []) # object # Type variables (these are effectively global) def make_type_var(name: str, id: int, values: List[Type], upper_bound: Type, variance: int) -> TypeVarType: return TypeVarType(TypeVarDef(name, name, id, values, upper_bound, variance)) self.t = make_type_var('T', 1, [], self.o, variance) # T`1 (type variable) self.tf = make_type_var('T', -1, [], self.o, variance) # T`-1 (type variable) self.tf2 = make_type_var('T', -2, [], self.o, variance) # T`-2 (type variable) self.s = make_type_var('S', 2, [], self.o, variance) # S`2 (type variable) self.s1 = make_type_var('S', 1, [], self.o, variance) # S`1 (type variable) self.sf = make_type_var('S', -2, [], self.o, variance) # S`-2 (type variable) self.sf1 = make_type_var('S', -1, [], self.o, variance) # S`-1 (type variable) # Simple types self.anyt = AnyType(TypeOfAny.special_form) self.nonet = NoneTyp() self.uninhabited = UninhabitedType() # Abstract class TypeInfos # class F self.fi = self.make_type_info('F', is_abstract=True) # class F2 self.f2i = self.make_type_info('F2', is_abstract=True) # class F3(F) self.f3i = self.make_type_info('F3', is_abstract=True, mro=[self.fi]) # Class TypeInfos self.std_tuplei = self.make_type_info('builtins.tuple') # class tuple self.type_typei = self.make_type_info('builtins.type') # class type self.functioni = self.make_type_info('builtins.function') # function TODO self.ai = self.make_type_info('A', mro=[self.oi]) # class A self.bi = self.make_type_info('B', mro=[self.ai, self.oi]) # class B(A) self.ci = self.make_type_info('C', mro=[self.ai, self.oi]) # class C(A) self.di = self.make_type_info('D', mro=[self.oi]) # class D # class E(F) self.ei = self.make_type_info('E', mro=[self.fi, self.oi]) # class E2(F2, F) self.e2i = self.make_type_info('E2', mro=[self.f2i, self.fi, self.oi]) # class E3(F, F2) self.e3i = self.make_type_info('E3', mro=[self.fi, self.f2i, self.oi]) # Generic class TypeInfos # G[T] self.gi = self.make_type_info('G', mro=[self.oi], typevars=['T'], variances=[variance]) # G2[T] self.g2i = self.make_type_info('G2', mro=[self.oi], typevars=['T'], variances=[variance]) # H[S, T] self.hi = self.make_type_info('H', mro=[self.oi], typevars=['S', 'T'], variances=[variance, variance]) # GS[T, S] <: G[S] self.gsi = self.make_type_info('GS', mro=[self.gi, self.oi], typevars=['T', 'S'], variances=[variance, variance], bases=[Instance(self.gi, [self.s])]) # GS2[S] <: G[S] self.gs2i = self.make_type_info('GS2', mro=[self.gi, self.oi], typevars=['S'], variances=[variance], bases=[Instance(self.gi, [self.s1])]) # list[T] self.std_listi = self.make_type_info('builtins.list', mro=[self.oi], typevars=['T'], variances=[variance]) # Instance types self.std_tuple = Instance(self.std_tuplei, []) # tuple self.type_type = Instance(self.type_typei, []) # type self.function = Instance(self.functioni, []) # function TODO self.a = Instance(self.ai, []) # A self.b = Instance(self.bi, []) # B self.c = Instance(self.ci, []) # C self.d = Instance(self.di, []) # D self.e = Instance(self.ei, []) # E self.e2 = Instance(self.e2i, []) # E2 self.e3 = Instance(self.e3i, []) # E3 self.f = Instance(self.fi, []) # F self.f2 = Instance(self.f2i, []) # F2 self.f3 = Instance(self.f3i, []) # F3 # Generic instance types self.ga = Instance(self.gi, [self.a]) # G[A] self.gb = Instance(self.gi, [self.b]) # G[B] self.gd = Instance(self.gi, [self.d]) # G[D] self.go = Instance(self.gi, [self.o]) # G[object] self.gt = Instance(self.gi, [self.t]) # G[T`1] self.gtf = Instance(self.gi, [self.tf]) # G[T`-1] self.gtf2 = Instance(self.gi, [self.tf2]) # G[T`-2] self.gs = Instance(self.gi, [self.s]) # G[S] self.gdyn = Instance(self.gi, [self.anyt]) # G[Any] self.g2a = Instance(self.g2i, [self.a]) # G2[A] self.gsaa = Instance(self.gsi, [self.a, self.a]) # GS[A, A] self.gsab = Instance(self.gsi, [self.a, self.b]) # GS[A, B] self.gsba = Instance(self.gsi, [self.b, self.a]) # GS[B, A] self.gs2a = Instance(self.gs2i, [self.a]) # GS2[A] self.gs2b = Instance(self.gs2i, [self.b]) # GS2[B] self.gs2d = Instance(self.gs2i, [self.d]) # GS2[D] self.hab = Instance(self.hi, [self.a, self.b]) # H[A, B] self.haa = Instance(self.hi, [self.a, self.a]) # H[A, A] self.hbb = Instance(self.hi, [self.b, self.b]) # H[B, B] self.hts = Instance(self.hi, [self.t, self.s]) # H[T, S] self.had = Instance(self.hi, [self.a, self.d]) # H[A, D] self.lsta = Instance(self.std_listi, [self.a]) # List[A] self.lstb = Instance(self.std_listi, [self.b]) # List[B] self.type_a = TypeType.make_normalized(self.a) self.type_b = TypeType.make_normalized(self.b) self.type_c = TypeType.make_normalized(self.c) self.type_d = TypeType.make_normalized(self.d) self.type_t = TypeType.make_normalized(self.t) self.type_any = TypeType.make_normalized(self.anyt) # Helper methods def callable(self, *a: Type) -> CallableType: """callable(a1, ..., an, r) constructs a callable with argument types a1, ... an and return type r. """ return CallableType(list(a[:-1]), [ARG_POS] * (len(a) - 1), [None] * (len(a) - 1), a[-1], self.function) def callable_type(self, *a: Type) -> CallableType: """callable_type(a1, ..., an, r) constructs a callable with argument types a1, ... an and return type r, and which represents a type. """ return CallableType(list(a[:-1]), [ARG_POS] * (len(a) - 1), [None] * (len(a) - 1), a[-1], self.type_type) def callable_default(self, min_args: int, *a: Type) -> CallableType: """callable_default(min_args, a1, ..., an, r) constructs a callable with argument types a1, ... an and return type r, with min_args mandatory fixed arguments. """ n = len(a) - 1 return CallableType(list(a[:-1]), [ARG_POS] * min_args + [ARG_OPT] * (n - min_args), [None] * n, a[-1], self.function) def callable_var_arg(self, min_args: int, *a: Type) -> CallableType: """callable_var_arg(min_args, a1, ..., an, r) constructs a callable with argument types a1, ... *an and return type r. """ n = len(a) - 1 return CallableType(list(a[:-1]), [ARG_POS] * min_args + [ARG_OPT] * (n - 1 - min_args) + [ARG_STAR], [None] * n, a[-1], self.function) def make_type_info(self, name: str, module_name: Optional[str] = None, is_abstract: bool = False, mro: Optional[List[TypeInfo]] = None, bases: Optional[List[Instance]] = None, typevars: Optional[List[str]] = None, variances: Optional[List[int]] = None) -> TypeInfo: """Make a TypeInfo suitable for use in unit tests.""" class_def = ClassDef(name, Block([]), None, []) class_def.fullname = name if module_name is None: if '.' in name: module_name = name.rsplit('.', 1)[0] else: module_name = '__main__' if typevars: v = [] # type: List[TypeVarDef] for id, n in enumerate(typevars, 1): if variances: variance = variances[id - 1] else: variance = COVARIANT v.append(TypeVarDef(n, n, id, [], self.o, variance=variance)) class_def.type_vars = v info = TypeInfo(SymbolTable(), class_def, module_name) if mro is None: mro = [] if name != 'builtins.object': mro.append(self.oi) info.mro = [info] + mro if bases is None: if mro: # By default, assume that there is a single non-generic base. bases = [Instance(mro[0], [])] else: bases = [] info.bases = bases return info class InterfaceTypeFixture(TypeFixture): """Extension of TypeFixture that contains additional generic interface types.""" def __init__(self) -> None: super().__init__() # GF[T] self.gfi = self.make_type_info('GF', typevars=['T'], is_abstract=True) # M1 <: GF[A] self.m1i = self.make_type_info('M1', is_abstract=True, mro=[self.gfi, self.oi], bases=[Instance(self.gfi, [self.a])]) self.gfa = Instance(self.gfi, [self.a]) # GF[A] self.gfb = Instance(self.gfi, [self.b]) # GF[B] self.m1 = Instance(self.m1i, []) # M1 mypy-0.560/mypy/test/update.py0000644€tŠÔÚ€2›s®0000000000013215007205022466 0ustar jukkaDROPBOX\Domain Users00000000000000mypy-0.560/mypy/traverser.py0000644€tŠÔÚ€2›s®0000002045613215007205022263 0ustar jukkaDROPBOX\Domain Users00000000000000"""Generic node traverser visitor""" from mypy.visitor import NodeVisitor from mypy.nodes import ( Block, MypyFile, FuncItem, CallExpr, ClassDef, Decorator, FuncDef, ExpressionStmt, AssignmentStmt, OperatorAssignmentStmt, WhileStmt, ForStmt, ReturnStmt, AssertStmt, DelStmt, IfStmt, RaiseStmt, TryStmt, WithStmt, MemberExpr, OpExpr, SliceExpr, CastExpr, RevealTypeExpr, UnaryExpr, ListExpr, TupleExpr, DictExpr, SetExpr, IndexExpr, GeneratorExpr, ListComprehension, SetComprehension, DictionaryComprehension, ConditionalExpr, TypeApplication, ExecStmt, Import, ImportFrom, LambdaExpr, ComparisonExpr, OverloadedFuncDef, YieldFromExpr, YieldExpr, StarExpr, BackquoteExpr, AwaitExpr, PrintStmt, ) class TraverserVisitor(NodeVisitor[None]): """A parse tree visitor that traverses the parse tree during visiting. It does not peform any actions outside the traversal. Subclasses should override visit methods to perform actions during traversal. Calling the superclass method allows reusing the traversal implementation. """ # Visit methods def visit_mypy_file(self, o: MypyFile) -> None: for d in o.defs: d.accept(self) def visit_block(self, block: Block) -> None: for s in block.body: s.accept(self) def visit_func(self, o: FuncItem) -> None: for arg in o.arguments: init = arg.initializer if init is not None: init.accept(self) for arg in o.arguments: self.visit_var(arg.variable) o.body.accept(self) def visit_func_def(self, o: FuncDef) -> None: self.visit_func(o) def visit_overloaded_func_def(self, o: OverloadedFuncDef) -> None: for item in o.items: item.accept(self) if o.impl: o.impl.accept(self) def visit_class_def(self, o: ClassDef) -> None: for d in o.decorators: d.accept(self) for base in o.base_type_exprs: base.accept(self) o.defs.accept(self) def visit_decorator(self, o: Decorator) -> None: o.func.accept(self) o.var.accept(self) for decorator in o.decorators: decorator.accept(self) def visit_expression_stmt(self, o: ExpressionStmt) -> None: o.expr.accept(self) def visit_assignment_stmt(self, o: AssignmentStmt) -> None: o.rvalue.accept(self) for l in o.lvalues: l.accept(self) def visit_operator_assignment_stmt(self, o: OperatorAssignmentStmt) -> None: o.rvalue.accept(self) o.lvalue.accept(self) def visit_while_stmt(self, o: WhileStmt) -> None: o.expr.accept(self) o.body.accept(self) if o.else_body: o.else_body.accept(self) def visit_for_stmt(self, o: ForStmt) -> None: o.index.accept(self) o.expr.accept(self) o.body.accept(self) if o.else_body: o.else_body.accept(self) def visit_return_stmt(self, o: ReturnStmt) -> None: if o.expr is not None: o.expr.accept(self) def visit_assert_stmt(self, o: AssertStmt) -> None: if o.expr is not None: o.expr.accept(self) if o.msg is not None: o.msg.accept(self) def visit_del_stmt(self, o: DelStmt) -> None: if o.expr is not None: o.expr.accept(self) def visit_if_stmt(self, o: IfStmt) -> None: for e in o.expr: e.accept(self) for b in o.body: b.accept(self) if o.else_body: o.else_body.accept(self) def visit_raise_stmt(self, o: RaiseStmt) -> None: if o.expr is not None: o.expr.accept(self) if o.from_expr is not None: o.from_expr.accept(self) def visit_try_stmt(self, o: TryStmt) -> None: o.body.accept(self) for i in range(len(o.types)): tp = o.types[i] if tp is not None: tp.accept(self) o.handlers[i].accept(self) if o.else_body is not None: o.else_body.accept(self) if o.finally_body is not None: o.finally_body.accept(self) def visit_with_stmt(self, o: WithStmt) -> None: for i in range(len(o.expr)): o.expr[i].accept(self) targ = o.target[i] if targ is not None: targ.accept(self) o.body.accept(self) def visit_member_expr(self, o: MemberExpr) -> None: o.expr.accept(self) def visit_yield_from_expr(self, o: YieldFromExpr) -> None: o.expr.accept(self) def visit_yield_expr(self, o: YieldExpr) -> None: if o.expr: o.expr.accept(self) def visit_call_expr(self, o: CallExpr) -> None: for a in o.args: a.accept(self) o.callee.accept(self) if o.analyzed: o.analyzed.accept(self) def visit_op_expr(self, o: OpExpr) -> None: o.left.accept(self) o.right.accept(self) def visit_comparison_expr(self, o: ComparisonExpr) -> None: for operand in o.operands: operand.accept(self) def visit_slice_expr(self, o: SliceExpr) -> None: if o.begin_index is not None: o.begin_index.accept(self) if o.end_index is not None: o.end_index.accept(self) if o.stride is not None: o.stride.accept(self) def visit_cast_expr(self, o: CastExpr) -> None: o.expr.accept(self) def visit_reveal_type_expr(self, o: RevealTypeExpr) -> None: o.expr.accept(self) def visit_unary_expr(self, o: UnaryExpr) -> None: o.expr.accept(self) def visit_list_expr(self, o: ListExpr) -> None: for item in o.items: item.accept(self) def visit_tuple_expr(self, o: TupleExpr) -> None: for item in o.items: item.accept(self) def visit_dict_expr(self, o: DictExpr) -> None: for k, v in o.items: if k is not None: k.accept(self) v.accept(self) def visit_set_expr(self, o: SetExpr) -> None: for item in o.items: item.accept(self) def visit_index_expr(self, o: IndexExpr) -> None: o.base.accept(self) o.index.accept(self) if o.analyzed: o.analyzed.accept(self) def visit_generator_expr(self, o: GeneratorExpr) -> None: for index, sequence, conditions in zip(o.indices, o.sequences, o.condlists): sequence.accept(self) index.accept(self) for cond in conditions: cond.accept(self) o.left_expr.accept(self) def visit_dictionary_comprehension(self, o: DictionaryComprehension) -> None: for index, sequence, conditions in zip(o.indices, o.sequences, o.condlists): sequence.accept(self) index.accept(self) for cond in conditions: cond.accept(self) o.key.accept(self) o.value.accept(self) def visit_list_comprehension(self, o: ListComprehension) -> None: o.generator.accept(self) def visit_set_comprehension(self, o: SetComprehension) -> None: o.generator.accept(self) def visit_conditional_expr(self, o: ConditionalExpr) -> None: o.cond.accept(self) o.if_expr.accept(self) o.else_expr.accept(self) def visit_type_application(self, o: TypeApplication) -> None: o.expr.accept(self) def visit_lambda_expr(self, o: LambdaExpr) -> None: self.visit_func(o) def visit_star_expr(self, o: StarExpr) -> None: o.expr.accept(self) def visit_backquote_expr(self, o: BackquoteExpr) -> None: o.expr.accept(self) def visit_await_expr(self, o: AwaitExpr) -> None: o.expr.accept(self) def visit_import(self, o: Import) -> None: for a in o.assignments: a.accept(self) def visit_import_from(self, o: ImportFrom) -> None: for a in o.assignments: a.accept(self) def visit_print_stmt(self, o: PrintStmt) -> None: for arg in o.args: arg.accept(self) def visit_exec_stmt(self, o: ExecStmt) -> None: o.expr.accept(self) if o.globals: o.globals.accept(self) if o.locals: o.locals.accept(self) mypy-0.560/mypy/treetransform.py0000644€tŠÔÚ€2›s®0000005724613215007205023150 0ustar jukkaDROPBOX\Domain Users00000000000000"""Base visitor that implements an identity AST transform. Subclass TransformVisitor to perform non-trivial transformations. """ from typing import List, Dict, cast, Optional, Iterable from mypy.nodes import ( MypyFile, Import, Node, ImportAll, ImportFrom, FuncItem, FuncDef, OverloadedFuncDef, ClassDef, Decorator, Block, Var, OperatorAssignmentStmt, ExpressionStmt, AssignmentStmt, ReturnStmt, RaiseStmt, AssertStmt, DelStmt, BreakStmt, ContinueStmt, PassStmt, GlobalDecl, WhileStmt, ForStmt, IfStmt, TryStmt, WithStmt, CastExpr, RevealTypeExpr, TupleExpr, GeneratorExpr, ListComprehension, ListExpr, ConditionalExpr, DictExpr, SetExpr, NameExpr, IntExpr, StrExpr, BytesExpr, UnicodeExpr, FloatExpr, CallExpr, SuperExpr, MemberExpr, IndexExpr, SliceExpr, OpExpr, UnaryExpr, LambdaExpr, TypeApplication, PrintStmt, SymbolTable, RefExpr, TypeVarExpr, NewTypeExpr, PromoteExpr, ComparisonExpr, TempNode, StarExpr, Statement, Expression, YieldFromExpr, NamedTupleExpr, TypedDictExpr, NonlocalDecl, SetComprehension, DictionaryComprehension, ComplexExpr, TypeAliasExpr, EllipsisExpr, YieldExpr, ExecStmt, Argument, BackquoteExpr, AwaitExpr, OverloadPart, EnumCallExpr, ) from mypy.types import Type, FunctionLike from mypy.traverser import TraverserVisitor from mypy.visitor import NodeVisitor class TransformVisitor(NodeVisitor[Node]): """Transform a semantically analyzed AST (or subtree) to an identical copy. Use the node() method to transform an AST node. Subclass to perform a non-identity transform. Notes: * Do not duplicate TypeInfo nodes. This would generally not be desirable. * Only update some name binding cross-references, but only those that refer to Var, Decorator or FuncDef nodes, not those targeting ClassDef or TypeInfo nodes. * Types are not transformed, but you can override type() to also perform type transformation. TODO nested classes and functions have not been tested well enough """ def __init__(self) -> None: # There may be multiple references to a Var node. Keep track of # Var translations using a dictionary. self.var_map = {} # type: Dict[Var, Var] # These are uninitialized placeholder nodes used temporarily for nested # functions while we are transforming a top-level function. This maps an # untransformed node to a placeholder (which will later become the # transformed node). self.func_placeholder_map = {} # type: Dict[FuncDef, FuncDef] def visit_mypy_file(self, node: MypyFile) -> MypyFile: # NOTE: The 'names' and 'imports' instance variables will be empty! new = MypyFile(self.statements(node.defs), [], node.is_bom, ignored_lines=set(node.ignored_lines)) new._name = node._name new._fullname = node._fullname new.path = node.path new.names = SymbolTable() return new def visit_import(self, node: Import) -> Import: return Import(node.ids[:]) def visit_import_from(self, node: ImportFrom) -> ImportFrom: return ImportFrom(node.id, node.relative, node.names[:]) def visit_import_all(self, node: ImportAll) -> ImportAll: return ImportAll(node.id, node.relative) def copy_argument(self, argument: Argument) -> Argument: arg = Argument( self.visit_var(argument.variable), argument.type_annotation, argument.initializer, argument.kind, ) # Refresh lines of the inner things arg.set_line(argument.line) return arg def visit_func_def(self, node: FuncDef) -> FuncDef: # Note that a FuncDef must be transformed to a FuncDef. # These contortions are needed to handle the case of recursive # references inside the function being transformed. # Set up placholder nodes for references within this function # to other functions defined inside it. # Don't create an entry for this function itself though, # since we want self-references to point to the original # function if this is the top-level node we are transforming. init = FuncMapInitializer(self) for stmt in node.body.body: stmt.accept(init) new = FuncDef(node.name(), [self.copy_argument(arg) for arg in node.arguments], self.block(node.body), cast(FunctionLike, self.optional_type(node.type))) self.copy_function_attributes(new, node) new._fullname = node._fullname new.is_decorated = node.is_decorated new.is_conditional = node.is_conditional new.is_abstract = node.is_abstract new.is_static = node.is_static new.is_class = node.is_class new.is_property = node.is_property new.original_def = node.original_def if node in self.func_placeholder_map: # There is a placeholder definition for this function. Replace # the attributes of the placeholder with those form the transformed # function. We know that the classes will be identical (otherwise # this wouldn't work). result = self.func_placeholder_map[node] result.__dict__ = new.__dict__ return result else: return new def visit_lambda_expr(self, node: LambdaExpr) -> LambdaExpr: new = LambdaExpr([self.copy_argument(arg) for arg in node.arguments], self.block(node.body), cast(FunctionLike, self.optional_type(node.type))) self.copy_function_attributes(new, node) return new def copy_function_attributes(self, new: FuncItem, original: FuncItem) -> None: new.info = original.info new.min_args = original.min_args new.max_pos = original.max_pos new.is_overload = original.is_overload new.is_generator = original.is_generator new.line = original.line def visit_overloaded_func_def(self, node: OverloadedFuncDef) -> OverloadedFuncDef: items = [cast(OverloadPart, item.accept(self)) for item in node.items] for newitem, olditem in zip(items, node.items): newitem.line = olditem.line new = OverloadedFuncDef(items) new._fullname = node._fullname new.type = self.optional_type(node.type) new.info = node.info if node.impl: new.impl = cast(OverloadPart, node.impl.accept(self)) return new def visit_class_def(self, node: ClassDef) -> ClassDef: new = ClassDef(node.name, self.block(node.defs), node.type_vars, self.expressions(node.base_type_exprs), self.optional_expr(node.metaclass)) new.fullname = node.fullname new.info = node.info new.decorators = [self.expr(decorator) for decorator in node.decorators] return new def visit_global_decl(self, node: GlobalDecl) -> GlobalDecl: return GlobalDecl(node.names[:]) def visit_nonlocal_decl(self, node: NonlocalDecl) -> NonlocalDecl: return NonlocalDecl(node.names[:]) def visit_block(self, node: Block) -> Block: return Block(self.statements(node.body)) def visit_decorator(self, node: Decorator) -> Decorator: # Note that a Decorator must be transformed to a Decorator. func = self.visit_func_def(node.func) func.line = node.func.line new = Decorator(func, self.expressions(node.decorators), self.visit_var(node.var)) new.is_overload = node.is_overload return new def visit_var(self, node: Var) -> Var: # Note that a Var must be transformed to a Var. if node in self.var_map: return self.var_map[node] new = Var(node.name(), self.optional_type(node.type)) new.line = node.line new._fullname = node._fullname new.info = node.info new.is_self = node.is_self new.is_ready = node.is_ready new.is_initialized_in_class = node.is_initialized_in_class new.is_staticmethod = node.is_staticmethod new.is_classmethod = node.is_classmethod new.is_property = node.is_property new.set_line(node.line) self.var_map[node] = new return new def visit_expression_stmt(self, node: ExpressionStmt) -> ExpressionStmt: return ExpressionStmt(self.expr(node.expr)) def visit_assignment_stmt(self, node: AssignmentStmt) -> AssignmentStmt: return self.duplicate_assignment(node) def duplicate_assignment(self, node: AssignmentStmt) -> AssignmentStmt: new = AssignmentStmt(self.expressions(node.lvalues), self.expr(node.rvalue), self.optional_type(node.type)) new.line = node.line return new def visit_operator_assignment_stmt(self, node: OperatorAssignmentStmt) -> OperatorAssignmentStmt: return OperatorAssignmentStmt(node.op, self.expr(node.lvalue), self.expr(node.rvalue)) def visit_while_stmt(self, node: WhileStmt) -> WhileStmt: return WhileStmt(self.expr(node.expr), self.block(node.body), self.optional_block(node.else_body)) def visit_for_stmt(self, node: ForStmt) -> ForStmt: return ForStmt(self.expr(node.index), self.expr(node.expr), self.block(node.body), self.optional_block(node.else_body), self.optional_type(node.index_type)) def visit_return_stmt(self, node: ReturnStmt) -> ReturnStmt: return ReturnStmt(self.optional_expr(node.expr)) def visit_assert_stmt(self, node: AssertStmt) -> AssertStmt: return AssertStmt(self.expr(node.expr), self.optional_expr(node.msg)) def visit_del_stmt(self, node: DelStmt) -> DelStmt: return DelStmt(self.expr(node.expr)) def visit_if_stmt(self, node: IfStmt) -> IfStmt: return IfStmt(self.expressions(node.expr), self.blocks(node.body), self.optional_block(node.else_body)) def visit_break_stmt(self, node: BreakStmt) -> BreakStmt: return BreakStmt() def visit_continue_stmt(self, node: ContinueStmt) -> ContinueStmt: return ContinueStmt() def visit_pass_stmt(self, node: PassStmt) -> PassStmt: return PassStmt() def visit_raise_stmt(self, node: RaiseStmt) -> RaiseStmt: return RaiseStmt(self.optional_expr(node.expr), self.optional_expr(node.from_expr)) def visit_try_stmt(self, node: TryStmt) -> TryStmt: return TryStmt(self.block(node.body), self.optional_names(node.vars), self.optional_expressions(node.types), self.blocks(node.handlers), self.optional_block(node.else_body), self.optional_block(node.finally_body)) def visit_with_stmt(self, node: WithStmt) -> WithStmt: return WithStmt(self.expressions(node.expr), self.optional_expressions(node.target), self.block(node.body), self.optional_type(node.target_type)) def visit_print_stmt(self, node: PrintStmt) -> PrintStmt: return PrintStmt(self.expressions(node.args), node.newline, self.optional_expr(node.target)) def visit_exec_stmt(self, node: ExecStmt) -> ExecStmt: return ExecStmt(self.expr(node.expr), self.optional_expr(node.globals), self.optional_expr(node.locals)) def visit_star_expr(self, node: StarExpr) -> StarExpr: return StarExpr(node.expr) def visit_int_expr(self, node: IntExpr) -> IntExpr: return IntExpr(node.value) def visit_str_expr(self, node: StrExpr) -> StrExpr: return StrExpr(node.value) def visit_bytes_expr(self, node: BytesExpr) -> BytesExpr: return BytesExpr(node.value) def visit_unicode_expr(self, node: UnicodeExpr) -> UnicodeExpr: return UnicodeExpr(node.value) def visit_float_expr(self, node: FloatExpr) -> FloatExpr: return FloatExpr(node.value) def visit_complex_expr(self, node: ComplexExpr) -> ComplexExpr: return ComplexExpr(node.value) def visit_ellipsis(self, node: EllipsisExpr) -> EllipsisExpr: return EllipsisExpr() def visit_name_expr(self, node: NameExpr) -> NameExpr: return self.duplicate_name(node) def duplicate_name(self, node: NameExpr) -> NameExpr: # This method is used when the transform result must be a NameExpr. # visit_name_expr() is used when there is no such restriction. new = NameExpr(node.name) self.copy_ref(new, node) return new def visit_member_expr(self, node: MemberExpr) -> MemberExpr: member = MemberExpr(self.expr(node.expr), node.name) if node.def_var: # This refers to an attribute and we don't transform attributes by default, # just normal variables. member.def_var = node.def_var self.copy_ref(member, node) return member def copy_ref(self, new: RefExpr, original: RefExpr) -> None: new.kind = original.kind new.fullname = original.fullname target = original.node if isinstance(target, Var): target = self.visit_var(target) elif isinstance(target, Decorator): target = self.visit_var(target.var) elif isinstance(target, FuncDef): # Use a placeholder node for the function if it exists. target = self.func_placeholder_map.get(target, target) new.node = target new.is_new_def = original.is_new_def new.is_inferred_def = original.is_inferred_def def visit_yield_from_expr(self, node: YieldFromExpr) -> YieldFromExpr: return YieldFromExpr(self.expr(node.expr)) def visit_yield_expr(self, node: YieldExpr) -> YieldExpr: return YieldExpr(self.optional_expr(node.expr)) def visit_await_expr(self, node: AwaitExpr) -> AwaitExpr: return AwaitExpr(self.expr(node.expr)) def visit_call_expr(self, node: CallExpr) -> CallExpr: return CallExpr(self.expr(node.callee), self.expressions(node.args), node.arg_kinds[:], node.arg_names[:], self.optional_expr(node.analyzed)) def visit_op_expr(self, node: OpExpr) -> OpExpr: new = OpExpr(node.op, self.expr(node.left), self.expr(node.right)) new.method_type = self.optional_type(node.method_type) return new def visit_comparison_expr(self, node: ComparisonExpr) -> ComparisonExpr: new = ComparisonExpr(node.operators, self.expressions(node.operands)) new.method_types = [self.optional_type(t) for t in node.method_types] return new def visit_cast_expr(self, node: CastExpr) -> CastExpr: return CastExpr(self.expr(node.expr), self.type(node.type)) def visit_reveal_type_expr(self, node: RevealTypeExpr) -> RevealTypeExpr: return RevealTypeExpr(self.expr(node.expr)) def visit_super_expr(self, node: SuperExpr) -> SuperExpr: call = self.expr(node.call) assert isinstance(call, CallExpr) new = SuperExpr(node.name, call) new.info = node.info return new def visit_unary_expr(self, node: UnaryExpr) -> UnaryExpr: new = UnaryExpr(node.op, self.expr(node.expr)) new.method_type = self.optional_type(node.method_type) return new def visit_list_expr(self, node: ListExpr) -> ListExpr: return ListExpr(self.expressions(node.items)) def visit_dict_expr(self, node: DictExpr) -> DictExpr: return DictExpr([(self.expr(key), self.expr(value)) for key, value in node.items]) def visit_tuple_expr(self, node: TupleExpr) -> TupleExpr: return TupleExpr(self.expressions(node.items)) def visit_set_expr(self, node: SetExpr) -> SetExpr: return SetExpr(self.expressions(node.items)) def visit_index_expr(self, node: IndexExpr) -> IndexExpr: new = IndexExpr(self.expr(node.base), self.expr(node.index)) if node.method_type: new.method_type = self.type(node.method_type) if node.analyzed: if isinstance(node.analyzed, TypeApplication): new.analyzed = self.visit_type_application(node.analyzed) else: new.analyzed = self.visit_type_alias_expr(node.analyzed) new.analyzed.set_line(node.analyzed.line) return new def visit_type_application(self, node: TypeApplication) -> TypeApplication: return TypeApplication(self.expr(node.expr), self.types(node.types)) def visit_list_comprehension(self, node: ListComprehension) -> ListComprehension: generator = self.duplicate_generator(node.generator) generator.set_line(node.generator.line) return ListComprehension(generator) def visit_set_comprehension(self, node: SetComprehension) -> SetComprehension: generator = self.duplicate_generator(node.generator) generator.set_line(node.generator.line) return SetComprehension(generator) def visit_dictionary_comprehension(self, node: DictionaryComprehension ) -> DictionaryComprehension: return DictionaryComprehension(self.expr(node.key), self.expr(node.value), [self.expr(index) for index in node.indices], [self.expr(s) for s in node.sequences], [[self.expr(cond) for cond in conditions] for conditions in node.condlists], node.is_async) def visit_generator_expr(self, node: GeneratorExpr) -> GeneratorExpr: return self.duplicate_generator(node) def duplicate_generator(self, node: GeneratorExpr) -> GeneratorExpr: return GeneratorExpr(self.expr(node.left_expr), [self.expr(index) for index in node.indices], [self.expr(s) for s in node.sequences], [[self.expr(cond) for cond in conditions] for conditions in node.condlists], node.is_async) def visit_slice_expr(self, node: SliceExpr) -> SliceExpr: return SliceExpr(self.optional_expr(node.begin_index), self.optional_expr(node.end_index), self.optional_expr(node.stride)) def visit_conditional_expr(self, node: ConditionalExpr) -> ConditionalExpr: return ConditionalExpr(self.expr(node.cond), self.expr(node.if_expr), self.expr(node.else_expr)) def visit_backquote_expr(self, node: BackquoteExpr) -> BackquoteExpr: return BackquoteExpr(self.expr(node.expr)) def visit_type_var_expr(self, node: TypeVarExpr) -> TypeVarExpr: return TypeVarExpr(node.name(), node.fullname(), self.types(node.values), self.type(node.upper_bound), variance=node.variance) def visit_type_alias_expr(self, node: TypeAliasExpr) -> TypeAliasExpr: return TypeAliasExpr(node.type, node.tvars, fallback=node.fallback, in_runtime=node.in_runtime) def visit_newtype_expr(self, node: NewTypeExpr) -> NewTypeExpr: res = NewTypeExpr(node.name, node.old_type, line=node.line) res.info = node.info return res def visit_namedtuple_expr(self, node: NamedTupleExpr) -> NamedTupleExpr: return NamedTupleExpr(node.info) def visit_enum_call_expr(self, node: EnumCallExpr) -> EnumCallExpr: return EnumCallExpr(node.info, node.items, node.values) def visit_typeddict_expr(self, node: TypedDictExpr) -> Node: return TypedDictExpr(node.info) def visit__promote_expr(self, node: PromoteExpr) -> PromoteExpr: return PromoteExpr(node.type) def visit_temp_node(self, node: TempNode) -> TempNode: return TempNode(self.type(node.type)) def node(self, node: Node) -> Node: new = node.accept(self) new.set_line(node.line) return new def mypyfile(self, node: MypyFile) -> MypyFile: new = node.accept(self) assert isinstance(new, MypyFile) new.set_line(node.line) return new def expr(self, expr: Expression) -> Expression: new = expr.accept(self) assert isinstance(new, Expression) new.set_line(expr.line) return new def stmt(self, stmt: Statement) -> Statement: new = stmt.accept(self) assert isinstance(new, Statement) new.set_line(stmt.line) return new # Helpers # # All the node helpers also propagate line numbers. def optional_expr(self, expr: Optional[Expression]) -> Optional[Expression]: if expr: return self.expr(expr) else: return None def block(self, block: Block) -> Block: new = self.visit_block(block) new.line = block.line return new def optional_block(self, block: Optional[Block]) -> Optional[Block]: if block: return self.block(block) else: return None def statements(self, statements: List[Statement]) -> List[Statement]: return [self.stmt(stmt) for stmt in statements] def expressions(self, expressions: List[Expression]) -> List[Expression]: return [self.expr(expr) for expr in expressions] def optional_expressions(self, expressions: Iterable[Optional[Expression]] ) -> List[Optional[Expression]]: return [self.optional_expr(expr) for expr in expressions] def blocks(self, blocks: List[Block]) -> List[Block]: return [self.block(block) for block in blocks] def names(self, names: List[NameExpr]) -> List[NameExpr]: return [self.duplicate_name(name) for name in names] def optional_names(self, names: Iterable[Optional[NameExpr]]) -> List[Optional[NameExpr]]: result = [] # type: List[Optional[NameExpr]] for name in names: if name: result.append(self.duplicate_name(name)) else: result.append(None) return result def type(self, type: Type) -> Type: # Override this method to transform types. return type def optional_type(self, type: Optional[Type]) -> Optional[Type]: if type: return self.type(type) else: return None def types(self, types: List[Type]) -> List[Type]: return [self.type(type) for type in types] class FuncMapInitializer(TraverserVisitor): """This traverser creates mappings from nested FuncDefs to placeholder FuncDefs. The placholders will later be replaced with transformed nodes. """ def __init__(self, transformer: TransformVisitor) -> None: self.transformer = transformer def visit_func_def(self, node: FuncDef) -> None: if node not in self.transformer.func_placeholder_map: # Haven't seen this FuncDef before, so create a placeholder node. self.transformer.func_placeholder_map[node] = FuncDef( node.name(), node.arguments, node.body, None) super().visit_func_def(node) mypy-0.560/mypy/tvar_scope.py0000644€tŠÔÚ€2›s®0000000657513215007205022421 0ustar jukkaDROPBOX\Domain Users00000000000000from typing import Optional, Dict, Union from mypy.types import TypeVarDef, TypeVarId from mypy.nodes import TypeVarExpr, SymbolTableNode class TypeVarScope: """Scope that holds bindings for type variables. Node fullname -> TypeVarDef.""" def __init__(self, parent: 'Optional[TypeVarScope]' = None, is_class_scope: bool = False, prohibited: 'Optional[TypeVarScope]' = None) -> None: """Initializer for TypeVarScope Parameters: parent: the outer scope for this scope is_class_scope: True if this represents a generic class prohibited: Type variables that aren't strictly in scope exactly, but can't be bound because they're part of an outer class's scope. """ self.scope = {} # type: Dict[str, TypeVarDef] self.parent = parent self.func_id = 0 self.class_id = 0 self.is_class_scope = is_class_scope self.prohibited = prohibited if parent is not None: self.func_id = parent.func_id self.class_id = parent.class_id def get_function_scope(self) -> 'Optional[TypeVarScope]': """Get the nearest parent that's a function scope, not a class scope""" it = self # type: Optional[TypeVarScope] while it is not None and it.is_class_scope: it = it.parent return it def allow_binding(self, fullname: str) -> bool: if fullname in self.scope: return False elif self.parent and not self.parent.allow_binding(fullname): return False elif self.prohibited and not self.prohibited.allow_binding(fullname): return False return True def method_frame(self) -> 'TypeVarScope': """A new scope frame for binding a method""" return TypeVarScope(self, False, None) def class_frame(self) -> 'TypeVarScope': """A new scope frame for binding a class. Prohibits *this* class's tvars""" return TypeVarScope(self.get_function_scope(), True, self) def bind(self, name: str, tvar_expr: TypeVarExpr) -> TypeVarDef: if self.is_class_scope: self.class_id += 1 i = self.class_id else: self.func_id -= 1 i = self.func_id tvar_def = TypeVarDef(name, tvar_expr.fullname(), i, values=tvar_expr.values, upper_bound=tvar_expr.upper_bound, variance=tvar_expr.variance, line=tvar_expr.line, column=tvar_expr.column) self.scope[tvar_expr.fullname()] = tvar_def return tvar_def def get_binding(self, item: Union[str, SymbolTableNode]) -> Optional[TypeVarDef]: fullname = item.fullname if isinstance(item, SymbolTableNode) else item assert fullname is not None if fullname in self.scope: return self.scope[fullname] elif self.parent is not None: return self.parent.get_binding(fullname) else: return None def __str__(self) -> str: me = ", ".join('{}: {}`{}'.format(k, v.name, v.id) for k, v in self.scope.items()) if self.parent is None: return me return "{} <- {}".format(str(self.parent), me) mypy-0.560/mypy/typeanal.py0000644€tŠÔÚ€2›s®0000013033213215007206022057 0ustar jukkaDROPBOX\Domain Users00000000000000"""Semantic analysis of types""" from collections import OrderedDict from typing import Callable, List, Optional, Set, Tuple, Iterator, TypeVar, Iterable, Dict from itertools import chain from contextlib import contextmanager import itertools from mypy.messages import MessageBuilder from mypy.options import Options from mypy.types import ( Type, UnboundType, TypeVarType, TupleType, TypedDictType, UnionType, Instance, AnyType, CallableType, NoneTyp, DeletedType, TypeList, TypeVarDef, TypeVisitor, SyntheticTypeVisitor, StarType, PartialType, EllipsisType, UninhabitedType, TypeType, get_typ_args, set_typ_args, CallableArgument, get_type_vars, TypeQuery, union_items, TypeOfAny, ForwardRef, Overloaded ) from mypy.nodes import ( TVAR, TYPE_ALIAS, UNBOUND_IMPORTED, TypeInfo, Context, SymbolTableNode, Var, Expression, IndexExpr, RefExpr, nongen_builtins, check_arg_names, check_arg_kinds, ARG_POS, ARG_NAMED, ARG_OPT, ARG_NAMED_OPT, ARG_STAR, ARG_STAR2, TypeVarExpr, FuncDef, CallExpr, NameExpr, Decorator ) from mypy.tvar_scope import TypeVarScope from mypy.sametypes import is_same_type from mypy.exprtotype import expr_to_unanalyzed_type, TypeTranslationError from mypy.subtypes import is_subtype from mypy.plugin import Plugin, AnalyzerPluginInterface, AnalyzeTypeContext from mypy import nodes, messages T = TypeVar('T') type_constructors = { 'typing.Callable', 'typing.Optional', 'typing.Tuple', 'typing.Type', 'typing.Union', } ARG_KINDS_BY_CONSTRUCTOR = { 'mypy_extensions.Arg': ARG_POS, 'mypy_extensions.DefaultArg': ARG_OPT, 'mypy_extensions.NamedArg': ARG_NAMED, 'mypy_extensions.DefaultNamedArg': ARG_NAMED_OPT, 'mypy_extensions.VarArg': ARG_STAR, 'mypy_extensions.KwArg': ARG_STAR2, } def analyze_type_alias(node: Expression, lookup_func: Callable[[str, Context], Optional[SymbolTableNode]], lookup_fqn_func: Callable[[str], SymbolTableNode], tvar_scope: TypeVarScope, fail_func: Callable[[str, Context], None], note_func: Callable[[str, Context], None], plugin: Plugin, options: Options, is_typeshed_stub: bool, allow_unnormalized: bool = False, in_dynamic_func: bool = False, global_scope: bool = True, warn_bound_tvar: bool = False) -> Optional[Type]: """Return type if node is valid as a type alias rvalue. Return None otherwise. 'node' must have been semantically analyzed. """ # Quickly return None if the expression doesn't look like a type. Note # that we don't support straight string literals as type aliases # (only string literals within index expressions). if isinstance(node, RefExpr): # Note that this misses the case where someone tried to use a # class-referenced type variable as a type alias. It's easier to catch # that one in checkmember.py if node.kind == TVAR: fail_func('Type variable "{}" is invalid as target for type alias'.format( node.fullname), node) return None if not (isinstance(node.node, TypeInfo) or node.fullname == 'typing.Any' or node.kind == TYPE_ALIAS): return None elif isinstance(node, IndexExpr): base = node.base if isinstance(base, RefExpr): if not (isinstance(base.node, TypeInfo) or base.fullname in type_constructors or base.kind == TYPE_ALIAS): return None # Enums can't be generic, and without this check we may incorrectly interpret indexing # an Enum class as creating a type alias. if isinstance(base.node, TypeInfo) and base.node.is_enum: return None else: return None elif isinstance(node, CallExpr): if (isinstance(node.callee, NameExpr) and len(node.args) == 1 and isinstance(node.args[0], NameExpr)): call = lookup_func(node.callee.name, node.callee) arg = lookup_func(node.args[0].name, node.args[0]) if (call is not None and call.node and call.node.fullname() == 'builtins.type' and arg is not None and arg.node and arg.node.fullname() == 'builtins.None'): return NoneTyp() return None return None else: return None # It's a type alias (though it may be an invalid one). try: type = expr_to_unanalyzed_type(node) except TypeTranslationError: fail_func('Invalid type alias', node) return None analyzer = TypeAnalyser(lookup_func, lookup_fqn_func, tvar_scope, fail_func, note_func, plugin, options, is_typeshed_stub, aliasing=True, allow_unnormalized=allow_unnormalized, warn_bound_tvar=warn_bound_tvar) analyzer.in_dynamic_func = in_dynamic_func analyzer.global_scope = global_scope return type.accept(analyzer) def no_subscript_builtin_alias(name: str, propose_alt: bool = True) -> str: msg = '"{}" is not subscriptable'.format(name.split('.')[-1]) replacement = nongen_builtins[name] if replacement and propose_alt: msg += ', use "{}" instead'.format(replacement) return msg class TypeAnalyser(SyntheticTypeVisitor[Type], AnalyzerPluginInterface): """Semantic analyzer for types (semantic analysis pass 2). Converts unbound types into bound types. """ # Is this called from an untyped function definition? in_dynamic_func = False # type: bool # Is this called from global scope? global_scope = True # type: bool def __init__(self, lookup_func: Callable[[str, Context], Optional[SymbolTableNode]], lookup_fqn_func: Callable[[str], SymbolTableNode], tvar_scope: Optional[TypeVarScope], fail_func: Callable[[str, Context], None], note_func: Callable[[str, Context], None], plugin: Plugin, options: Options, is_typeshed_stub: bool, *, aliasing: bool = False, allow_tuple_literal: bool = False, allow_unnormalized: bool = False, third_pass: bool = False, warn_bound_tvar: bool = False) -> None: self.lookup = lookup_func self.lookup_fqn_func = lookup_fqn_func self.fail_func = fail_func self.note_func = note_func self.tvar_scope = tvar_scope self.aliasing = aliasing self.allow_tuple_literal = allow_tuple_literal # Positive if we are analyzing arguments of another (outer) type self.nesting_level = 0 self.allow_unnormalized = allow_unnormalized self.plugin = plugin self.options = options self.is_typeshed_stub = is_typeshed_stub self.warn_bound_tvar = warn_bound_tvar self.third_pass = third_pass def visit_unbound_type(self, t: UnboundType) -> Type: if t.optional: t.optional = False # We don't need to worry about double-wrapping Optionals or # wrapping Anys: Union simplification will take care of that. return make_optional_type(self.visit_unbound_type(t)) sym = self.lookup(t.name, t, suppress_errors=self.third_pass) # type: ignore if sym is not None: if sym.node is None: # UNBOUND_IMPORTED can happen if an unknown name was imported. if sym.kind != UNBOUND_IMPORTED: self.fail('Internal error (node is None, kind={})'.format(sym.kind), t) return AnyType(TypeOfAny.special_form) fullname = sym.node.fullname() hook = self.plugin.get_type_analyze_hook(fullname) if hook: return hook(AnalyzeTypeContext(t, t, self)) if (fullname in nongen_builtins and t.args and not sym.normalized and not self.allow_unnormalized): self.fail(no_subscript_builtin_alias(fullname), t) if self.tvar_scope: tvar_def = self.tvar_scope.get_binding(sym) else: tvar_def = None if self.warn_bound_tvar and sym.kind == TVAR and tvar_def is not None: self.fail('Can\'t use bound type variable "{}"' ' to define generic alias'.format(t.name), t) return AnyType(TypeOfAny.from_error) elif sym.kind == TVAR and tvar_def is not None: if len(t.args) > 0: self.fail('Type variable "{}" used with arguments'.format( t.name), t) return TypeVarType(tvar_def, t.line) elif fullname == 'builtins.None': return NoneTyp() elif fullname == 'typing.Any' or fullname == 'builtins.Any': return AnyType(TypeOfAny.explicit) elif fullname == 'typing.Tuple': if len(t.args) == 0 and not t.empty_tuple_index: # Bare 'Tuple' is same as 'tuple' if self.options.disallow_any_generics and not self.is_typeshed_stub: self.fail(messages.BARE_GENERIC, t) typ = self.named_type('builtins.tuple', line=t.line, column=t.column) typ.from_generic_builtin = True return typ if len(t.args) == 2 and isinstance(t.args[1], EllipsisType): # Tuple[T, ...] (uniform, variable-length tuple) instance = self.named_type('builtins.tuple', [self.anal_type(t.args[0])]) instance.line = t.line return instance return self.tuple_type(self.anal_array(t.args)) elif fullname == 'typing.Union': items = self.anal_array(t.args) return UnionType.make_union(items) elif fullname == 'typing.Optional': if len(t.args) != 1: self.fail('Optional[...] must have exactly one type argument', t) return AnyType(TypeOfAny.from_error) item = self.anal_type(t.args[0]) return make_optional_type(item) elif fullname == 'typing.Callable': return self.analyze_callable_type(t) elif fullname == 'typing.Type': if len(t.args) == 0: any_type = AnyType(TypeOfAny.from_omitted_generics, line=t.line, column=t.column) return TypeType(any_type, line=t.line, column=t.column) if len(t.args) != 1: self.fail('Type[...] must have exactly one type argument', t) item = self.anal_type(t.args[0]) return TypeType.make_normalized(item, line=t.line) elif fullname == 'typing.ClassVar': if self.nesting_level > 0: self.fail('Invalid type: ClassVar nested inside other type', t) if len(t.args) == 0: return AnyType(TypeOfAny.from_omitted_generics, line=t.line, column=t.column) if len(t.args) != 1: self.fail('ClassVar[...] must have at most one type argument', t) return AnyType(TypeOfAny.from_error) item = self.anal_type(t.args[0]) if isinstance(item, TypeVarType) or get_type_vars(item): self.fail('Invalid type: ClassVar cannot be generic', t) return AnyType(TypeOfAny.from_error) return item elif fullname in ('mypy_extensions.NoReturn', 'typing.NoReturn'): return UninhabitedType(is_noreturn=True) elif sym.kind == TYPE_ALIAS: override = sym.type_override all_vars = sym.alias_tvars assert override is not None an_args = self.anal_array(t.args) if all_vars is not None: exp_len = len(all_vars) else: exp_len = 0 act_len = len(an_args) if exp_len > 0 and act_len == 0: # Interpret bare Alias same as normal generic, i.e., Alias[Any, Any, ...] assert all_vars is not None return set_any_tvars(override, all_vars, t.line, t.column) if exp_len == 0 and act_len == 0: return override if act_len != exp_len: self.fail('Bad number of arguments for type alias, expected: %s, given: %s' % (exp_len, act_len), t) return set_any_tvars(override, all_vars or [], t.line, t.column, implicit=False) assert all_vars is not None return replace_alias_tvars(override, all_vars, an_args, t.line, t.column) elif not isinstance(sym.node, TypeInfo): name = sym.fullname if name is None: name = sym.node.name() if isinstance(sym.node, Var) and isinstance(sym.node.type, AnyType): # Something with an Any type -- make it an alias for Any in a type # context. This is slightly problematic as it allows using the type 'Any' # as a base class -- however, this will fail soon at runtime so the problem # is pretty minor. return AnyType(TypeOfAny.from_unimported_type) # Allow unbound type variables when defining an alias if not (self.aliasing and sym.kind == TVAR and (not self.tvar_scope or self.tvar_scope.get_binding(sym) is None)): if (not self.third_pass and not self.in_dynamic_func and not (isinstance(sym.node, (FuncDef, Decorator)) or isinstance(sym.node, Var) and sym.node.is_ready) and not (sym.kind == TVAR and tvar_def is None)): if t.args and not self.global_scope: self.fail('Unsupported forward reference to "{}"'.format(t.name), t) return AnyType(TypeOfAny.from_error) return ForwardRef(t) self.fail('Invalid type "{}"'.format(name), t) if self.third_pass and sym.kind == TVAR: self.note_func("Forward references to type variables are prohibited", t) return t info = sym.node # type: TypeInfo if len(t.args) > 0 and info.fullname() == 'builtins.tuple': fallback = Instance(info, [AnyType(TypeOfAny.special_form)], t.line) return TupleType(self.anal_array(t.args), fallback, t.line) else: # Analyze arguments and construct Instance type. The # number of type arguments and their values are # checked only later, since we do not always know the # valid count at this point. Thus we may construct an # Instance with an invalid number of type arguments. instance = Instance(info, self.anal_array(t.args), t.line, t.column) instance.from_generic_builtin = sym.normalized tup = info.tuple_type if tup is not None: # The class has a Tuple[...] base class so it will be # represented as a tuple type. if t.args: self.fail('Generic tuple types not supported', t) return AnyType(TypeOfAny.from_error) return tup.copy_modified(items=self.anal_array(tup.items), fallback=instance) td = info.typeddict_type if td is not None: # The class has a TypedDict[...] base class so it will be # represented as a typeddict type. if t.args: self.fail('Generic TypedDict types not supported', t) return AnyType(TypeOfAny.from_error) # Create a named TypedDictType return td.copy_modified(item_types=self.anal_array(list(td.items.values())), fallback=instance) return instance else: if self.third_pass: self.fail('Invalid type "{}"'.format(t.name), t) return AnyType(TypeOfAny.from_error) return AnyType(TypeOfAny.special_form) def visit_any(self, t: AnyType) -> Type: return t def visit_none_type(self, t: NoneTyp) -> Type: return t def visit_uninhabited_type(self, t: UninhabitedType) -> Type: return t def visit_deleted_type(self, t: DeletedType) -> Type: return t def visit_type_list(self, t: TypeList) -> Type: self.fail('Invalid type', t) return AnyType(TypeOfAny.from_error) def visit_callable_argument(self, t: CallableArgument) -> Type: self.fail('Invalid type', t) return AnyType(TypeOfAny.from_error) def visit_instance(self, t: Instance) -> Type: return t def visit_type_var(self, t: TypeVarType) -> Type: return t def visit_callable_type(self, t: CallableType, nested: bool = True) -> Type: # Every Callable can bind its own type variables, if they're not in the outer scope with self.tvar_scope_frame(): if self.aliasing: variables = t.variables else: variables = self.bind_function_type_variables(t, t) ret = t.copy_modified(arg_types=self.anal_array(t.arg_types, nested=nested), ret_type=self.anal_type(t.ret_type, nested=nested), fallback=t.fallback or self.named_type('builtins.function'), variables=self.anal_var_defs(variables)) return ret def visit_tuple_type(self, t: TupleType) -> Type: # Types such as (t1, t2, ...) only allowed in assignment statements. They'll # generate errors elsewhere, and Tuple[t1, t2, ...] must be used instead. if t.implicit and not self.allow_tuple_literal: self.fail('Invalid tuple literal type', t) if len(t.items) == 1: self.note_func('Suggestion: Is there a spurious trailing comma?', t) return AnyType(TypeOfAny.from_error) star_count = sum(1 for item in t.items if isinstance(item, StarType)) if star_count > 1: self.fail('At most one star type allowed in a tuple', t) if t.implicit: return TupleType([AnyType(TypeOfAny.from_error) for _ in t.items], self.named_type('builtins.tuple'), t.line) else: return AnyType(TypeOfAny.from_error) any_type = AnyType(TypeOfAny.special_form) fallback = t.fallback if t.fallback else self.named_type('builtins.tuple', [any_type]) return TupleType(self.anal_array(t.items), fallback, t.line) def visit_typeddict_type(self, t: TypedDictType) -> Type: items = OrderedDict([ (item_name, self.anal_type(item_type)) for (item_name, item_type) in t.items.items() ]) return TypedDictType(items, set(t.required_keys), t.fallback) def visit_star_type(self, t: StarType) -> Type: return StarType(self.anal_type(t.type), t.line) def visit_union_type(self, t: UnionType) -> Type: return UnionType(self.anal_array(t.items), t.line) def visit_partial_type(self, t: PartialType) -> Type: assert False, "Internal error: Unexpected partial type" def visit_ellipsis_type(self, t: EllipsisType) -> Type: self.fail("Unexpected '...'", t) return AnyType(TypeOfAny.from_error) def visit_type_type(self, t: TypeType) -> Type: return TypeType.make_normalized(self.anal_type(t.item), line=t.line) def visit_forwardref_type(self, t: ForwardRef) -> Type: return t def analyze_callable_type(self, t: UnboundType) -> Type: fallback = self.named_type('builtins.function') if len(t.args) == 0: # Callable (bare). Treat as Callable[..., Any]. any_type = AnyType(TypeOfAny.from_omitted_generics, line=t.line, column=t.column) ret = CallableType([any_type, any_type], [nodes.ARG_STAR, nodes.ARG_STAR2], [None, None], ret_type=any_type, fallback=fallback, is_ellipsis_args=True) elif len(t.args) == 2: ret_type = t.args[1] if isinstance(t.args[0], TypeList): # Callable[[ARG, ...], RET] (ordinary callable type) analyzed_args = self.analyze_callable_args(t.args[0]) if analyzed_args is None: return AnyType(TypeOfAny.from_error) args, kinds, names = analyzed_args ret = CallableType(args, kinds, names, ret_type=ret_type, fallback=fallback) elif isinstance(t.args[0], EllipsisType): # Callable[..., RET] (with literal ellipsis; accept arbitrary arguments) ret = CallableType([AnyType(TypeOfAny.explicit), AnyType(TypeOfAny.explicit)], [nodes.ARG_STAR, nodes.ARG_STAR2], [None, None], ret_type=ret_type, fallback=fallback, is_ellipsis_args=True) else: self.fail('The first argument to Callable must be a list of types or "..."', t) return AnyType(TypeOfAny.from_error) else: self.fail('Please use "Callable[[], ]" or "Callable"', t) return AnyType(TypeOfAny.from_error) assert isinstance(ret, CallableType) return ret.accept(self) def analyze_callable_args(self, arglist: TypeList) -> Optional[Tuple[List[Type], List[int], List[Optional[str]]]]: args = [] # type: List[Type] kinds = [] # type: List[int] names = [] # type: List[Optional[str]] for arg in arglist.items: if isinstance(arg, CallableArgument): args.append(arg.typ) names.append(arg.name) if arg.constructor is None: return None found = self.lookup(arg.constructor, arg) if found is None: # Looking it up already put an error message in return None elif found.fullname not in ARG_KINDS_BY_CONSTRUCTOR: self.fail('Invalid argument constructor "{}"'.format( found.fullname), arg) return None else: assert found.fullname is not None kind = ARG_KINDS_BY_CONSTRUCTOR[found.fullname] kinds.append(kind) if arg.name is not None and kind in {ARG_STAR, ARG_STAR2}: self.fail("{} arguments should not have names".format( arg.constructor), arg) return None else: args.append(arg) kinds.append(ARG_POS) names.append(None) # Note that arglist below is only used for error context. check_arg_names(names, [arglist] * len(args), self.fail, "Callable") check_arg_kinds(kinds, [arglist] * len(args), self.fail) return args, kinds, names def analyze_type(self, t: Type) -> Type: return t.accept(self) def fail(self, msg: str, ctx: Context) -> None: self.fail_func(msg, ctx) @contextmanager def tvar_scope_frame(self) -> Iterator[None]: old_scope = self.tvar_scope if self.tvar_scope: self.tvar_scope = self.tvar_scope.method_frame() else: assert self.third_pass, "Internal error: type variable scope not given" yield self.tvar_scope = old_scope def infer_type_variables(self, type: CallableType) -> List[Tuple[str, TypeVarExpr]]: """Return list of unique type variables referred to in a callable.""" if not self.tvar_scope: return [] # We are in third pass, nothing new here names = [] # type: List[str] tvars = [] # type: List[TypeVarExpr] for arg in type.arg_types: for name, tvar_expr in arg.accept(TypeVariableQuery(self.lookup, self.tvar_scope)): if name not in names: names.append(name) tvars.append(tvar_expr) # When finding type variables in the return type of a function, don't # look inside Callable types. Type variables only appearing in # functions in the return type belong to those functions, not the # function we're currently analyzing. for name, tvar_expr in type.ret_type.accept( TypeVariableQuery(self.lookup, self.tvar_scope, include_callables=False)): if name not in names: names.append(name) tvars.append(tvar_expr) return list(zip(names, tvars)) def bind_function_type_variables(self, fun_type: CallableType, defn: Context) -> List[TypeVarDef]: """Find the type variables of the function type and bind them in our tvar_scope""" if not self.tvar_scope: return [] # We are in third pass, nothing new here if fun_type.variables: for var in fun_type.variables: var_node = self.lookup(var.name, var) assert var_node, "Binding for function type variable not found within function" var_expr = var_node.node assert isinstance(var_expr, TypeVarExpr) self.tvar_scope.bind(var.name, var_expr) return fun_type.variables typevars = self.infer_type_variables(fun_type) # Do not define a new type variable if already defined in scope. typevars = [(name, tvar) for name, tvar in typevars if not self.is_defined_type_var(name, defn)] defs = [] # type: List[TypeVarDef] for name, tvar in typevars: if not self.tvar_scope.allow_binding(tvar.fullname()): self.fail("Type variable '{}' is bound by an outer class".format(name), defn) self.tvar_scope.bind(name, tvar) binding = self.tvar_scope.get_binding(tvar.fullname()) assert binding is not None defs.append(binding) return defs def is_defined_type_var(self, tvar: str, context: Context) -> bool: if self.tvar_scope is None: return False tvar_node = self.lookup(tvar, context) if not tvar_node: return False return self.tvar_scope.get_binding(tvar_node) is not None def anal_array(self, a: List[Type], nested: bool = True) -> List[Type]: res = [] # type: List[Type] for t in a: res.append(self.anal_type(t, nested)) return res def anal_type(self, t: Type, nested: bool = True) -> Type: if nested: self.nesting_level += 1 try: return t.accept(self) finally: if nested: self.nesting_level -= 1 def anal_var_defs(self, var_defs: List[TypeVarDef]) -> List[TypeVarDef]: a = [] # type: List[TypeVarDef] for vd in var_defs: a.append(TypeVarDef(vd.name, vd.fullname, vd.id.raw_id, self.anal_array(vd.values), vd.upper_bound.accept(self), vd.variance, vd.line)) return a def named_type(self, fully_qualified_name: str, args: Optional[List[Type]] = None, line: int = -1, column: int = -1) -> Instance: node = self.lookup_fqn_func(fully_qualified_name) assert isinstance(node.node, TypeInfo) any_type = AnyType(TypeOfAny.special_form) return Instance(node.node, args or [any_type] * len(node.node.defn.type_vars), line=line, column=column) def tuple_type(self, items: List[Type]) -> TupleType: any_type = AnyType(TypeOfAny.special_form) return TupleType(items, fallback=self.named_type('builtins.tuple', [any_type])) class TypeAnalyserPass3(TypeVisitor[None]): """Analyze type argument counts and values of generic types. This is semantic analysis pass 3 for types. Perform these operations: * Report error for invalid type argument counts, such as List[x, y]. * Make implicit Any type arguments explicit my modifying types in-place. For example, modify Foo into Foo[Any] if Foo expects a single type argument. * If a type variable has a value restriction, ensure that the value is valid. For example, reject IO[int] if the type argument must be str or bytes. We can't do this earlier than the third pass, since type argument counts are only determined in pass 2, and we have to support forward references to types. """ def __init__(self, lookup_func: Callable[[str, Context], Optional[SymbolTableNode]], lookup_fqn_func: Callable[[str], SymbolTableNode], fail_func: Callable[[str, Context], None], note_func: Callable[[str, Context], None], plugin: Plugin, options: Options, is_typeshed_stub: bool, indicator: Dict[str, bool]) -> None: self.lookup_func = lookup_func self.lookup_fqn_func = lookup_fqn_func self.fail = fail_func self.note_func = note_func self.options = options self.plugin = plugin self.is_typeshed_stub = is_typeshed_stub self.indicator = indicator def visit_instance(self, t: Instance) -> None: info = t.type if info.replaced or info.tuple_type: self.indicator['synthetic'] = True # Check type argument count. if len(t.args) != len(info.type_vars): if len(t.args) == 0: from_builtins = t.type.fullname() in nongen_builtins and not t.from_generic_builtin if (self.options.disallow_any_generics and not self.is_typeshed_stub and from_builtins): alternative = nongen_builtins[t.type.fullname()] self.fail(messages.IMPLICIT_GENERIC_ANY_BUILTIN.format(alternative), t) # Insert implicit 'Any' type arguments. if from_builtins: # this 'Any' was already reported elsewhere any_type = AnyType(TypeOfAny.special_form, line=t.line, column=t.column) else: any_type = AnyType(TypeOfAny.from_omitted_generics, line=t.line, column=t.column) t.args = [any_type] * len(info.type_vars) return # Invalid number of type parameters. n = len(info.type_vars) s = '{} type arguments'.format(n) if n == 0: s = 'no type arguments' elif n == 1: s = '1 type argument' act = str(len(t.args)) if act == '0': act = 'none' self.fail('"{}" expects {}, but {} given'.format( info.name(), s, act), t) # Construct the correct number of type arguments, as # otherwise the type checker may crash as it expects # things to be right. t.args = [AnyType(TypeOfAny.from_error) for _ in info.type_vars] t.invalid = True elif info.defn.type_vars: # Check type argument values. # TODO: Calling is_subtype and is_same_types in semantic analysis is a bad idea for (i, arg), tvar in zip(enumerate(t.args), info.defn.type_vars): if tvar.values: if isinstance(arg, TypeVarType): arg_values = arg.values if not arg_values: self.fail('Type variable "{}" not valid as type ' 'argument value for "{}"'.format( arg.name, info.name()), t) continue else: arg_values = [arg] self.check_type_var_values(info, arg_values, tvar.name, tvar.values, i + 1, t) # TODO: These hacks will be not necessary when this will be moved to later stage. arg = self.resolve_type(arg) bound = self.resolve_type(tvar.upper_bound) if not is_subtype(arg, bound): self.fail('Type argument "{}" of "{}" must be ' 'a subtype of "{}"'.format( arg, info.name(), bound), t) for arg in t.args: arg.accept(self) if info.is_newtype: for base in info.bases: base.accept(self) def check_type_var_values(self, type: TypeInfo, actuals: List[Type], arg_name: str, valids: List[Type], arg_number: int, context: Context) -> None: for actual in actuals: actual = self.resolve_type(actual) if (not isinstance(actual, AnyType) and not any(is_same_type(actual, self.resolve_type(value)) for value in valids)): if len(actuals) > 1 or not isinstance(actual, Instance): self.fail('Invalid type argument value for "{}"'.format( type.name()), context) else: class_name = '"{}"'.format(type.name()) actual_type_name = '"{}"'.format(actual.type.name()) self.fail(messages.INCOMPATIBLE_TYPEVAR_VALUE.format( arg_name, class_name, actual_type_name), context) def resolve_type(self, tp: Type) -> Type: # This helper is only needed while is_subtype and is_same_type are # called in third pass. This can be removed when TODO in visit_instance is fixed. if isinstance(tp, ForwardRef): if tp.resolved is None: return tp.unbound tp = tp.resolved if isinstance(tp, Instance) and tp.type.replaced: replaced = tp.type.replaced if replaced.tuple_type: tp = replaced.tuple_type if replaced.typeddict_type: tp = replaced.typeddict_type return tp def visit_callable_type(self, t: CallableType) -> None: t.ret_type.accept(self) for arg_type in t.arg_types: arg_type.accept(self) def visit_overloaded(self, t: Overloaded) -> None: for item in t.items(): item.accept(self) def visit_tuple_type(self, t: TupleType) -> None: for item in t.items: item.accept(self) def visit_typeddict_type(self, t: TypedDictType) -> None: for item_type in t.items.values(): item_type.accept(self) def visit_union_type(self, t: UnionType) -> None: for item in t.items: item.accept(self) def visit_star_type(self, t: StarType) -> None: t.type.accept(self) # Other kinds of type are trivial, since they are atomic (or invalid). def visit_unbound_type(self, t: UnboundType) -> None: pass def visit_any(self, t: AnyType) -> None: pass def visit_none_type(self, t: NoneTyp) -> None: pass def visit_uninhabited_type(self, t: UninhabitedType) -> None: pass def visit_deleted_type(self, t: DeletedType) -> None: pass def visit_type_list(self, t: TypeList) -> None: self.fail('Invalid type', t) def visit_type_var(self, t: TypeVarType) -> None: if t.upper_bound: t.upper_bound.accept(self) if t.values: for v in t.values: v.accept(self) def visit_partial_type(self, t: PartialType) -> None: pass def visit_type_type(self, t: TypeType) -> None: t.item.accept(self) def visit_forwardref_type(self, t: ForwardRef) -> None: self.indicator['forward'] = True if t.resolved is None: resolved = self.anal_type(t.unbound) t.resolve(resolved) def anal_type(self, tp: UnboundType) -> Type: tpan = TypeAnalyser(self.lookup_func, self.lookup_fqn_func, None, self.fail, self.note_func, self.plugin, self.options, self.is_typeshed_stub, third_pass=True) return tp.accept(tpan) TypeVarList = List[Tuple[str, TypeVarExpr]] def replace_alias_tvars(tp: Type, vars: List[str], subs: List[Type], newline: int, newcolumn: int) -> Type: """Replace type variables in a generic type alias tp with substitutions subs resetting context. Length of subs should be already checked. """ typ_args = get_typ_args(tp) new_args = typ_args[:] for i, arg in enumerate(typ_args): if isinstance(arg, (UnboundType, TypeVarType)): tvar = arg.name # type: Optional[str] else: tvar = None if tvar and tvar in vars: # Perform actual substitution... new_args[i] = subs[vars.index(tvar)] else: # ...recursively, if needed. new_args[i] = replace_alias_tvars(arg, vars, subs, newline, newcolumn) return set_typ_args(tp, new_args, newline, newcolumn) def set_any_tvars(tp: Type, vars: List[str], newline: int, newcolumn: int, implicit: bool = True) -> Type: if implicit: type_of_any = TypeOfAny.from_omitted_generics else: type_of_any = TypeOfAny.special_form any_type = AnyType(type_of_any, line=newline, column=newcolumn) return replace_alias_tvars(tp, vars, [any_type] * len(vars), newline, newcolumn) def remove_dups(tvars: Iterable[T]) -> List[T]: # Get unique elements in order of appearance all_tvars = set() # type: Set[T] new_tvars = [] # type: List[T] for t in tvars: if t not in all_tvars: new_tvars.append(t) all_tvars.add(t) return new_tvars def flatten_tvars(ll: Iterable[List[T]]) -> List[T]: return remove_dups(chain.from_iterable(ll)) class TypeVariableQuery(TypeQuery[TypeVarList]): def __init__(self, lookup: Callable[[str, Context], Optional[SymbolTableNode]], scope: 'TypeVarScope', *, include_callables: bool = True, include_bound_tvars: bool = False) -> None: self.include_callables = include_callables self.lookup = lookup self.scope = scope self.include_bound_tvars = include_bound_tvars super().__init__(flatten_tvars) def _seems_like_callable(self, type: UnboundType) -> bool: if not type.args: return False if isinstance(type.args[0], (EllipsisType, TypeList)): return True return False def visit_unbound_type(self, t: UnboundType) -> TypeVarList: name = t.name node = self.lookup(name, t) if node and node.kind == TVAR and ( self.include_bound_tvars or self.scope.get_binding(node) is None): assert isinstance(node.node, TypeVarExpr) return [(name, node.node)] elif not self.include_callables and self._seems_like_callable(t): return [] else: return super().visit_unbound_type(t) def visit_callable_type(self, t: CallableType) -> TypeVarList: if self.include_callables: return super().visit_callable_type(t) else: return [] def check_for_explicit_any(typ: Optional[Type], options: Options, is_typeshed_stub: bool, msg: MessageBuilder, context: Context) -> None: if (options.disallow_any_explicit and not is_typeshed_stub and typ and has_explicit_any(typ)): msg.explicit_any(context) def has_explicit_any(t: Type) -> bool: """ Whether this type is or type it contains is an Any coming from explicit type annotation """ return t.accept(HasExplicitAny()) class HasExplicitAny(TypeQuery[bool]): def __init__(self) -> None: super().__init__(any) def visit_any(self, t: AnyType) -> bool: return t.type_of_any == TypeOfAny.explicit def visit_typeddict_type(self, t: TypedDictType) -> bool: # typeddict is checked during TypedDict declaration, so don't typecheck it here. return False def has_any_from_unimported_type(t: Type) -> bool: """Return true if this type is Any because an import was not followed. If type t is such Any type or has type arguments that contain such Any type this function will return true. """ return t.accept(HasAnyFromUnimportedType()) class HasAnyFromUnimportedType(TypeQuery[bool]): def __init__(self) -> None: super().__init__(any) def visit_any(self, t: AnyType) -> bool: return t.type_of_any == TypeOfAny.from_unimported_type def visit_typeddict_type(self, t: TypedDictType) -> bool: # typeddict is checked during TypedDict declaration, so don't typecheck it here return False def collect_any_types(t: Type) -> List[AnyType]: """Return all inner `AnyType`s of type t""" return t.accept(CollectAnyTypesQuery()) class CollectAnyTypesQuery(TypeQuery[List[AnyType]]): def __init__(self) -> None: super().__init__(self.combine_lists_strategy) def visit_any(self, t: AnyType) -> List[AnyType]: return [t] @classmethod def combine_lists_strategy(cls, it: Iterable[List[AnyType]]) -> List[AnyType]: result = [] # type: List[AnyType] for l in it: result.extend(l) return result def collect_all_inner_types(t: Type) -> List[Type]: """ Return all types that `t` contains """ return t.accept(CollectAllInnerTypesQuery()) class CollectAllInnerTypesQuery(TypeQuery[List[Type]]): def __init__(self) -> None: super().__init__(self.combine_lists_strategy) def query_types(self, types: Iterable[Type]) -> List[Type]: return self.strategy(t.accept(self) for t in types) + list(types) @classmethod def combine_lists_strategy(cls, it: Iterable[List[Type]]) -> List[Type]: return list(itertools.chain.from_iterable(it)) def make_optional_type(t: Type) -> Type: """Return the type corresponding to Optional[t]. Note that we can't use normal union simplification, since this function is called during semantic analysis and simplification only works during type checking. """ if isinstance(t, NoneTyp): return t elif isinstance(t, UnionType): items = [item for item in union_items(t) if not isinstance(item, NoneTyp)] return UnionType(items + [NoneTyp()], t.line, t.column) else: return UnionType([t, NoneTyp()], t.line, t.column) mypy-0.560/mypy/types.py0000644€tŠÔÚ€2›s®0000022155413215007205021414 0ustar jukkaDROPBOX\Domain Users00000000000000"""Classes for representing mypy types.""" import copy from abc import abstractmethod from collections import OrderedDict from enum import Enum from typing import ( Any, TypeVar, Dict, List, Tuple, cast, Generic, Set, Optional, Union, Iterable, NamedTuple, Callable, Sequence ) import mypy.nodes from mypy import experiments from mypy.nodes import ( INVARIANT, SymbolNode, ARG_POS, ARG_OPT, ARG_STAR, ARG_STAR2, ARG_NAMED, ARG_NAMED_OPT, ) from mypy.sharedparse import argument_elide_name from mypy.util import IdMapper T = TypeVar('T') JsonDict = Dict[str, Any] def deserialize_type(data: Union[JsonDict, str]) -> 'Type': if isinstance(data, str): return Instance.deserialize(data) classname = data['.class'] method = deserialize_map.get(classname) if method is not None: return method(data) raise NotImplementedError('unexpected .class {}'.format(classname)) class Type(mypy.nodes.Context): """Abstract base class for all types.""" can_be_true = True can_be_false = True def accept(self, visitor: 'TypeVisitor[T]') -> T: raise RuntimeError('Not implemented') def __repr__(self) -> str: return self.accept(TypeStrVisitor()) def serialize(self) -> Union[JsonDict, str]: raise NotImplementedError('Cannot serialize {} instance'.format(self.__class__.__name__)) @classmethod def deserialize(cls, data: JsonDict) -> 'Type': raise NotImplementedError('Cannot deserialize {} instance'.format(cls.__name__)) class TypeVarId: # A type variable is uniquely identified by its raw id and meta level. # For plain variables (type parameters of generic classes and # functions) raw ids are allocated by semantic analysis, using # positive ids 1, 2, ... for generic class parameters and negative # ids -1, ... for generic function type arguments. This convention # is only used to keep type variable ids distinct when allocating # them; the type checker makes no distinction between class and # function type variables. # Metavariables are allocated unique ids starting from 1. raw_id = 0 # type: int # Level of the variable in type inference. Currently either 0 for # declared types, or 1 for type inference metavariables. meta_level = 0 # type: int # Class variable used for allocating fresh ids for metavariables. next_raw_id = 1 # type: int def __init__(self, raw_id: int, meta_level: int = 0) -> None: self.raw_id = raw_id self.meta_level = meta_level @staticmethod def new(meta_level: int) -> 'TypeVarId': raw_id = TypeVarId.next_raw_id TypeVarId.next_raw_id += 1 return TypeVarId(raw_id, meta_level) def __repr__(self) -> str: return self.raw_id.__repr__() def __eq__(self, other: object) -> bool: if isinstance(other, TypeVarId): return (self.raw_id == other.raw_id and self.meta_level == other.meta_level) else: return False def __ne__(self, other: object) -> bool: return not (self == other) def __hash__(self) -> int: return hash((self.raw_id, self.meta_level)) def is_meta_var(self) -> bool: return self.meta_level > 0 class TypeVarDef(mypy.nodes.Context): """Definition of a single type variable.""" name = '' # Name (may be qualified) fullname = '' # Fully qualified name id = None # type: TypeVarId values = None # type: List[Type] # Value restriction, empty list if no restriction upper_bound = None # type: Type variance = INVARIANT # type: int def __init__(self, name: str, fullname: str, id: Union[TypeVarId, int], values: List[Type], upper_bound: Type, variance: int = INVARIANT, line: int = -1, column: int = -1) -> None: super().__init__(line, column) assert values is not None, "No restrictions must be represented by empty list" self.name = name self.fullname = fullname if isinstance(id, int): id = TypeVarId(id) self.id = id self.values = values self.upper_bound = upper_bound self.variance = variance @staticmethod def new_unification_variable(old: 'TypeVarDef') -> 'TypeVarDef': new_id = TypeVarId.new(meta_level=1) return TypeVarDef(old.name, old.fullname, new_id, old.values, old.upper_bound, old.variance, old.line, old.column) def __repr__(self) -> str: if self.values: return '{} in {}'.format(self.name, tuple(self.values)) elif not is_named_instance(self.upper_bound, 'builtins.object'): return '{} <: {}'.format(self.name, self.upper_bound) else: return self.name def serialize(self) -> JsonDict: assert not self.id.is_meta_var() return {'.class': 'TypeVarDef', 'name': self.name, 'fullname': self.fullname, 'id': self.id.raw_id, 'values': [v.serialize() for v in self.values], 'upper_bound': self.upper_bound.serialize(), 'variance': self.variance, } @classmethod def deserialize(cls, data: JsonDict) -> 'TypeVarDef': assert data['.class'] == 'TypeVarDef' return TypeVarDef(data['name'], data['fullname'], data['id'], [deserialize_type(v) for v in data['values']], deserialize_type(data['upper_bound']), data['variance'], ) class UnboundType(Type): """Instance type that has not been bound during semantic analysis.""" name = '' args = None # type: List[Type] # should this type be wrapped in an Optional? optional = False # special case for X[()] empty_tuple_index = False def __init__(self, name: str, args: Optional[List[Type]] = None, line: int = -1, column: int = -1, optional: bool = False, empty_tuple_index: bool = False) -> None: if not args: args = [] self.name = name self.args = args self.optional = optional self.empty_tuple_index = empty_tuple_index super().__init__(line, column) def accept(self, visitor: 'TypeVisitor[T]') -> T: return visitor.visit_unbound_type(self) def __hash__(self) -> int: return hash((self.name, self.optional, tuple(self.args))) def __eq__(self, other: object) -> bool: if not isinstance(other, UnboundType): return NotImplemented return (self.name == other.name and self.optional == other.optional and self.args == other.args) def serialize(self) -> JsonDict: return {'.class': 'UnboundType', 'name': self.name, 'args': [a.serialize() for a in self.args], } @classmethod def deserialize(cls, data: JsonDict) -> 'UnboundType': assert data['.class'] == 'UnboundType' return UnboundType(data['name'], [deserialize_type(a) for a in data['args']]) class CallableArgument(Type): """Represents a Arg(type, 'name') inside a Callable's type list. Note that this is a synthetic type for helping parse ASTs, not a real type. """ typ = None # type: Type name = None # type: Optional[str] constructor = None # type: Optional[str] def __init__(self, typ: Type, name: Optional[str], constructor: Optional[str], line: int = -1, column: int = -1) -> None: super().__init__(line, column) self.typ = typ self.name = name self.constructor = constructor def accept(self, visitor: 'TypeVisitor[T]') -> T: assert isinstance(visitor, SyntheticTypeVisitor) return visitor.visit_callable_argument(self) def serialize(self) -> JsonDict: assert False, "Synthetic types don't serialize" class TypeList(Type): """Information about argument types and names [...]. This is only used for the arguments of a Callable type, i.e. for [arg, ...] in Callable[[arg, ...], ret]. This is not a real type but a syntactic AST construct. """ items = None # type: List[Type] def __init__(self, items: List[Type], line: int = -1, column: int = -1) -> None: super().__init__(line, column) self.items = items def accept(self, visitor: 'TypeVisitor[T]') -> T: assert isinstance(visitor, SyntheticTypeVisitor) return visitor.visit_type_list(self) def serialize(self) -> JsonDict: assert False, "Sythetic types don't serialize" _dummy = object() # type: Any class TypeOfAny(Enum): """ This class describes different types of Any. Each 'Any' can be of only one type at a time. """ # Was this Any type was inferred without a type annotation? unannotated = 'unannotated' # Does this Any come from an explicit type annotation? explicit = 'explicit' # Does this come from an unfollowed import? See --disallow-any-unimported option from_unimported_type = 'from_unimported_type' # Does this Any type come from omitted generics? from_omitted_generics = 'from_omitted_generics' # Does this Any come from an error? from_error = 'from_error' # Is this a type that can't be represented in mypy's type system? For instance, type of # call to NewType...). Even though these types aren't real Anys, we treat them as such. special_form = 'special_form' # Does this Any come from interaction with another Any? from_another_any = 'from_another_any' class AnyType(Type): """The type 'Any'.""" def __init__(self, type_of_any: TypeOfAny, source_any: Optional['AnyType'] = None, line: int = -1, column: int = -1) -> None: super().__init__(line, column) self.type_of_any = type_of_any # If this Any was created as a result of interacting with another 'Any', record the source # and use it in reports. self.source_any = source_any if source_any and source_any.source_any: self.source_any = source_any.source_any # Only Anys that come from another Any can have source_any. assert type_of_any != TypeOfAny.from_another_any or source_any is not None # We should not have chains of Anys. assert not self.source_any or self.source_any.type_of_any != TypeOfAny.from_another_any def accept(self, visitor: 'TypeVisitor[T]') -> T: return visitor.visit_any(self) def copy_modified(self, type_of_any: TypeOfAny = _dummy, original_any: Optional['AnyType'] = _dummy, ) -> 'AnyType': if type_of_any is _dummy: type_of_any = self.type_of_any if original_any is _dummy: original_any = self.source_any return AnyType(type_of_any=type_of_any, source_any=original_any, line=self.line, column=self.column) def __hash__(self) -> int: return hash(AnyType) def __eq__(self, other: object) -> bool: return isinstance(other, AnyType) def serialize(self) -> JsonDict: return {'.class': 'AnyType'} @classmethod def deserialize(cls, data: JsonDict) -> 'AnyType': assert data['.class'] == 'AnyType' return AnyType(TypeOfAny.special_form) class UninhabitedType(Type): """This type has no members. This type is the bottom type. With strict Optional checking, it is the only common subtype between all other types, which allows `meet` to be well defined. Without strict Optional checking, NoneTyp fills this role. In general, for any type T: join(UninhabitedType, T) = T meet(UninhabitedType, T) = UninhabitedType is_subtype(UninhabitedType, T) = True """ can_be_true = False can_be_false = False is_noreturn = False # Does this come from a NoReturn? Purely for error messages. # It is important to track whether this is an actual NoReturn type, or just a result # of ambiguous type inference, in the latter case we don't want to mark a branch as # unreachable in binder. ambiguous = False # Is this a result of inference for a variable without constraints? def __init__(self, is_noreturn: bool = False, line: int = -1, column: int = -1) -> None: super().__init__(line, column) self.is_noreturn = is_noreturn def accept(self, visitor: 'TypeVisitor[T]') -> T: return visitor.visit_uninhabited_type(self) def __hash__(self) -> int: return hash(UninhabitedType) def __eq__(self, other: object) -> bool: return isinstance(other, UninhabitedType) def serialize(self) -> JsonDict: return {'.class': 'UninhabitedType', 'is_noreturn': self.is_noreturn} @classmethod def deserialize(cls, data: JsonDict) -> 'UninhabitedType': assert data['.class'] == 'UninhabitedType' return UninhabitedType(is_noreturn=data['is_noreturn']) class NoneTyp(Type): """The type of 'None'. This type can be written by users as 'None'. """ can_be_true = False def __init__(self, line: int = -1, column: int = -1) -> None: super().__init__(line, column) def __hash__(self) -> int: return hash(NoneTyp) def __eq__(self, other: object) -> bool: return isinstance(other, NoneTyp) def accept(self, visitor: 'TypeVisitor[T]') -> T: return visitor.visit_none_type(self) def serialize(self) -> JsonDict: return {'.class': 'NoneTyp'} @classmethod def deserialize(cls, data: JsonDict) -> 'NoneTyp': assert data['.class'] == 'NoneTyp' return NoneTyp() class ErasedType(Type): """Placeholder for an erased type. This is used during type inference. This has the special property that it is ignored during type inference. """ def accept(self, visitor: 'TypeVisitor[T]') -> T: return visitor.visit_erased_type(self) class DeletedType(Type): """Type of deleted variables. These can be used as lvalues but not rvalues. """ source = '' # type: Optional[str] # May be None; name that generated this value def __init__(self, source: Optional[str] = None, line: int = -1, column: int = -1) -> None: self.source = source super().__init__(line, column) def accept(self, visitor: 'TypeVisitor[T]') -> T: return visitor.visit_deleted_type(self) def serialize(self) -> JsonDict: return {'.class': 'DeletedType', 'source': self.source} @classmethod def deserialize(cls, data: JsonDict) -> 'DeletedType': assert data['.class'] == 'DeletedType' return DeletedType(data['source']) # Fake TypeInfo to be used as a placeholder during Instance de-serialization. NOT_READY = mypy.nodes.FakeInfo(mypy.nodes.SymbolTable(), mypy.nodes.ClassDef('', mypy.nodes.Block([])), '') class Instance(Type): """An instance type of form C[T1, ..., Tn]. The list of type variables may be empty. """ type = None # type: mypy.nodes.TypeInfo args = None # type: List[Type] erased = False # True if result of type variable substitution invalid = False # True if recovered after incorrect number of type arguments error from_generic_builtin = False # True if created from a generic builtin (e.g. list() or set()) def __init__(self, typ: mypy.nodes.TypeInfo, args: List[Type], line: int = -1, column: int = -1, erased: bool = False) -> None: assert(typ is NOT_READY or typ.fullname() not in ["builtins.Any", "typing.Any"]) self.type = typ self.args = args self.erased = erased super().__init__(line, column) def accept(self, visitor: 'TypeVisitor[T]') -> T: return visitor.visit_instance(self) type_ref = None # type: str def __hash__(self) -> int: return hash((self.type, tuple(self.args))) def __eq__(self, other: object) -> bool: if not isinstance(other, Instance): return NotImplemented return self.type == other.type and self.args == other.args def serialize(self) -> Union[JsonDict, str]: assert self.type is not None type_ref = self.type.fullname() if not self.args: return type_ref data = {'.class': 'Instance', } # type: JsonDict data['type_ref'] = type_ref data['args'] = [arg.serialize() for arg in self.args] return data @classmethod def deserialize(cls, data: Union[JsonDict, str]) -> 'Instance': if isinstance(data, str): inst = Instance(NOT_READY, []) inst.type_ref = data return inst assert data['.class'] == 'Instance' args = [] # type: List[Type] if 'args' in data: args_list = data['args'] assert isinstance(args_list, list) args = [deserialize_type(arg) for arg in args_list] inst = Instance(NOT_READY, args) inst.type_ref = data['type_ref'] # Will be fixed up by fixup.py later. return inst def copy_modified(self, *, args: List[Type]) -> 'Instance': return Instance(self.type, args, self.line, self.column, self.erased) class TypeVarType(Type): """A type variable type. This refers to either a class type variable (id > 0) or a function type variable (id < 0). """ name = '' # Name of the type variable (for messages and debugging) fullname = None # type: str id = None # type: TypeVarId values = None # type: List[Type] # Value restriction, empty list if no restriction upper_bound = None # type: Type # Upper bound for values # See comments in TypeVarDef for more about variance. variance = INVARIANT # type: int def __init__(self, binder: TypeVarDef, line: int = -1, column: int = -1) -> None: self.name = binder.name self.fullname = binder.fullname self.id = binder.id self.values = binder.values self.upper_bound = binder.upper_bound self.variance = binder.variance super().__init__(line, column) def accept(self, visitor: 'TypeVisitor[T]') -> T: return visitor.visit_type_var(self) def erase_to_union_or_bound(self) -> Type: if self.values: return UnionType.make_simplified_union(self.values) else: return self.upper_bound def __hash__(self) -> int: return hash(self.id) def __eq__(self, other: object) -> bool: if not isinstance(other, TypeVarType): return NotImplemented return self.id == other.id def serialize(self) -> JsonDict: assert not self.id.is_meta_var() return {'.class': 'TypeVarType', 'name': self.name, 'fullname': self.fullname, 'id': self.id.raw_id, 'values': [v.serialize() for v in self.values], 'upper_bound': self.upper_bound.serialize(), 'variance': self.variance, } @classmethod def deserialize(cls, data: JsonDict) -> 'TypeVarType': assert data['.class'] == 'TypeVarType' tvdef = TypeVarDef(data['name'], data['fullname'], data['id'], [deserialize_type(v) for v in data['values']], deserialize_type(data['upper_bound']), data['variance']) return TypeVarType(tvdef) class FunctionLike(Type): """Abstract base class for function types.""" can_be_false = False @abstractmethod def is_type_obj(self) -> bool: pass def is_concrete_type_obj(self) -> bool: return self.is_type_obj() @abstractmethod def type_object(self) -> mypy.nodes.TypeInfo: pass @abstractmethod def items(self) -> List['CallableType']: pass @abstractmethod def with_name(self, name: str) -> 'FunctionLike': pass @abstractmethod def get_name(self) -> Optional[str]: pass # Corresponding instance type (e.g. builtins.type) fallback = None # type: Instance FormalArgument = NamedTuple('FormalArgument', [ ('name', Optional[str]), ('pos', Optional[int]), ('typ', Type), ('required', bool)]) class CallableType(FunctionLike): """Type of a non-overloaded callable object (function).""" arg_types = None # type: List[Type] # Types of function arguments arg_kinds = None # type: List[int] # ARG_ constants arg_names = None # type: List[Optional[str]] # None if not a keyword argument min_args = 0 # Minimum number of arguments; derived from arg_kinds is_var_arg = False # Is it a varargs function? derived from arg_kinds is_kw_arg = False ret_type = None # type: Type # Return value type name = '' # type: Optional[str] # Name (may be None; for error messages and plugins) definition = None # type: Optional[SymbolNode] # For error messages. May be None. # Type variables for a generic function variables = None # type: List[TypeVarDef] # Is this Callable[..., t] (with literal '...')? is_ellipsis_args = False # Is this callable constructed for the benefit of a classmethod's 'cls' argument? is_classmethod_class = False # Was this type implicitly generated instead of explicitly specified by the user? implicit = False # Defined for signatures that require special handling (currently only value is 'dict' # for a signature similar to 'dict') special_sig = None # type: Optional[str] # Was this callable generated by analyzing Type[...] instantiation? from_type_type = False # type: bool bound_args = None # type: List[Optional[Type]] def __init__(self, arg_types: List[Type], arg_kinds: List[int], arg_names: Sequence[Optional[str]], ret_type: Type, fallback: Instance, name: Optional[str] = None, definition: Optional[SymbolNode] = None, variables: Optional[List[TypeVarDef]] = None, line: int = -1, column: int = -1, is_ellipsis_args: bool = False, implicit: bool = False, is_classmethod_class: bool = False, special_sig: Optional[str] = None, from_type_type: bool = False, bound_args: Optional[List[Optional[Type]]] = None, ) -> None: if variables is None: variables = [] assert len(arg_types) == len(arg_kinds) assert not any(tp is None for tp in arg_types), "No annotation must be Any, not None" self.arg_types = arg_types self.arg_kinds = arg_kinds self.arg_names = list(arg_names) self.min_args = arg_kinds.count(ARG_POS) self.is_var_arg = ARG_STAR in arg_kinds self.is_kw_arg = ARG_STAR2 in arg_kinds self.ret_type = ret_type self.fallback = fallback assert not name or ' 'CallableType': return CallableType( arg_types=arg_types if arg_types is not _dummy else self.arg_types, arg_kinds=arg_kinds if arg_kinds is not _dummy else self.arg_kinds, arg_names=arg_names if arg_names is not _dummy else self.arg_names, ret_type=ret_type if ret_type is not _dummy else self.ret_type, fallback=fallback if fallback is not _dummy else self.fallback, name=name if name is not _dummy else self.name, definition=definition if definition is not _dummy else self.definition, variables=variables if variables is not _dummy else self.variables, line=line if line is not _dummy else self.line, column=column if column is not _dummy else self.column, is_ellipsis_args=( is_ellipsis_args if is_ellipsis_args is not _dummy else self.is_ellipsis_args), implicit=self.implicit, is_classmethod_class=self.is_classmethod_class, special_sig=special_sig if special_sig is not _dummy else self.special_sig, from_type_type=from_type_type if from_type_type is not _dummy else self.from_type_type, bound_args=bound_args if bound_args is not _dummy else self.bound_args, ) def is_type_obj(self) -> bool: return self.fallback.type.is_metaclass() def is_concrete_type_obj(self) -> bool: return self.is_type_obj() and self.is_classmethod_class def type_object(self) -> mypy.nodes.TypeInfo: assert self.is_type_obj() ret = self.ret_type if isinstance(ret, TupleType): ret = ret.fallback if isinstance(ret, TypeVarType): ret = ret.upper_bound assert isinstance(ret, Instance) return ret.type def accept(self, visitor: 'TypeVisitor[T]') -> T: return visitor.visit_callable_type(self) def with_name(self, name: str) -> 'CallableType': """Return a copy of this type with the specified name.""" return self.copy_modified(ret_type=self.ret_type, name=name) def get_name(self) -> Optional[str]: return self.name def max_fixed_args(self) -> int: n = len(self.arg_types) if self.is_var_arg: n -= 1 return n def corresponding_argument(self, model: FormalArgument) -> Optional[FormalArgument]: """Return the argument in this function that corresponds to `model`""" by_name = self.argument_by_name(model.name) by_pos = self.argument_by_position(model.pos) if by_name is None and by_pos is None: return None if by_name is not None and by_pos is not None: if by_name == by_pos: return by_name # If we're dealing with an optional pos-only and an optional # name-only arg, merge them. This is the case for all functions # taking both *args and **args, or a pair of functions like so: # def right(a: int = ...) -> None: ... # def left(__a: int = ..., *, a: int = ...) -> None: ... from mypy.subtypes import is_equivalent if (not (by_name.required or by_pos.required) and by_pos.name is None and by_name.pos is None and is_equivalent(by_name.typ, by_pos.typ)): return FormalArgument(by_name.name, by_pos.pos, by_name.typ, False) return by_name if by_name is not None else by_pos def argument_by_name(self, name: Optional[str]) -> Optional[FormalArgument]: if name is None: return None seen_star = False star2_type = None # type: Optional[Type] for i, (arg_name, kind, typ) in enumerate( zip(self.arg_names, self.arg_kinds, self.arg_types)): # No more positional arguments after these. if kind in (ARG_STAR, ARG_STAR2, ARG_NAMED, ARG_NAMED_OPT): seen_star = True if kind == ARG_STAR: continue if kind == ARG_STAR2: star2_type = typ continue if arg_name == name: position = None if seen_star else i return FormalArgument(name, position, typ, kind in (ARG_POS, ARG_NAMED)) if star2_type is not None: return FormalArgument(name, None, star2_type, False) return None def argument_by_position(self, position: Optional[int]) -> Optional[FormalArgument]: if position is None: return None if self.is_var_arg: for kind, typ in zip(self.arg_kinds, self.arg_types): if kind == ARG_STAR: star_type = typ break if position >= len(self.arg_names): if self.is_var_arg: return FormalArgument(None, position, star_type, False) else: return None name, kind, typ = ( self.arg_names[position], self.arg_kinds[position], self.arg_types[position], ) if kind in (ARG_POS, ARG_OPT): return FormalArgument(name, position, typ, kind == ARG_POS) else: if self.is_var_arg: return FormalArgument(None, position, star_type, False) else: return None def items(self) -> List['CallableType']: return [self] def is_generic(self) -> bool: return bool(self.variables) def type_var_ids(self) -> List[TypeVarId]: a = [] # type: List[TypeVarId] for tv in self.variables: a.append(tv.id) return a def __hash__(self) -> int: return hash((self.ret_type, self.is_type_obj(), self.is_ellipsis_args, self.name, tuple(self.arg_types), tuple(self.arg_names), tuple(self.arg_kinds))) def __eq__(self, other: object) -> bool: if isinstance(other, CallableType): return (self.ret_type == other.ret_type and self.arg_types == other.arg_types and self.arg_names == other.arg_names and self.arg_kinds == other.arg_kinds and self.name == other.name and self.is_type_obj() == other.is_type_obj() and self.is_ellipsis_args == other.is_ellipsis_args) else: return NotImplemented def serialize(self) -> JsonDict: # TODO: As an optimization, leave out everything related to # generic functions for non-generic functions. return {'.class': 'CallableType', 'arg_types': [t.serialize() for t in self.arg_types], 'arg_kinds': self.arg_kinds, 'arg_names': self.arg_names, 'ret_type': self.ret_type.serialize(), 'fallback': self.fallback.serialize(), 'name': self.name, # We don't serialize the definition (only used for error messages). 'variables': [v.serialize() for v in self.variables], 'is_ellipsis_args': self.is_ellipsis_args, 'implicit': self.implicit, 'is_classmethod_class': self.is_classmethod_class, 'bound_args': [(None if t is None else t.serialize()) for t in self.bound_args], } @classmethod def deserialize(cls, data: JsonDict) -> 'CallableType': assert data['.class'] == 'CallableType' # TODO: Set definition to the containing SymbolNode? return CallableType([deserialize_type(t) for t in data['arg_types']], data['arg_kinds'], data['arg_names'], deserialize_type(data['ret_type']), Instance.deserialize(data['fallback']), name=data['name'], variables=[TypeVarDef.deserialize(v) for v in data['variables']], is_ellipsis_args=data['is_ellipsis_args'], implicit=data['implicit'], is_classmethod_class=data['is_classmethod_class'], bound_args=[(None if t is None else deserialize_type(t)) for t in data['bound_args']], ) class Overloaded(FunctionLike): """Overloaded function type T1, ... Tn, where each Ti is CallableType. The variant to call is chosen based on static argument types. Overloaded function types can only be defined in stub files, and thus there is no explicit runtime dispatch implementation. """ _items = None # type: List[CallableType] # Must not be empty def __init__(self, items: List[CallableType]) -> None: self._items = items self.fallback = items[0].fallback super().__init__(items[0].line, items[0].column) def items(self) -> List[CallableType]: return self._items def name(self) -> Optional[str]: return self.get_name() def is_type_obj(self) -> bool: # All the items must have the same type object status, so it's # sufficient to query only (any) one of them. return self._items[0].is_type_obj() def type_object(self) -> mypy.nodes.TypeInfo: # All the items must have the same type object, so it's sufficient to # query only (any) one of them. return self._items[0].type_object() def with_name(self, name: str) -> 'Overloaded': ni = [] # type: List[CallableType] for it in self._items: ni.append(it.with_name(name)) return Overloaded(ni) def get_name(self) -> Optional[str]: return self._items[0].name def accept(self, visitor: 'TypeVisitor[T]') -> T: return visitor.visit_overloaded(self) def __hash__(self) -> int: return hash(tuple(self.items())) def __eq__(self, other: object) -> bool: if not isinstance(other, Overloaded): return NotImplemented return self.items() == other.items() def serialize(self) -> JsonDict: return {'.class': 'Overloaded', 'items': [t.serialize() for t in self.items()], } @classmethod def deserialize(cls, data: JsonDict) -> 'Overloaded': assert data['.class'] == 'Overloaded' return Overloaded([CallableType.deserialize(t) for t in data['items']]) class TupleType(Type): """The tuple type Tuple[T1, ..., Tn] (at least one type argument). Instance variables: items: tuple item types fallback: the underlying instance type that is used for non-tuple methods (this is currently always builtins.tuple, but it could be different for named tuples, for example) implicit: if True, derived from a tuple expression (t,....) instead of Tuple[t, ...] """ items = None # type: List[Type] fallback = None # type: Instance implicit = False def __init__(self, items: List[Type], fallback: Instance, line: int = -1, column: int = -1, implicit: bool = False) -> None: self.items = items self.fallback = fallback self.implicit = implicit self.can_be_true = len(self.items) > 0 self.can_be_false = len(self.items) == 0 super().__init__(line, column) def length(self) -> int: return len(self.items) def accept(self, visitor: 'TypeVisitor[T]') -> T: return visitor.visit_tuple_type(self) def __hash__(self) -> int: return hash((tuple(self.items), self.fallback)) def __eq__(self, other: object) -> bool: if not isinstance(other, TupleType): return NotImplemented return self.items == other.items and self.fallback == other.fallback def serialize(self) -> JsonDict: return {'.class': 'TupleType', 'items': [t.serialize() for t in self.items], 'fallback': self.fallback.serialize(), 'implicit': self.implicit, } @classmethod def deserialize(cls, data: JsonDict) -> 'TupleType': assert data['.class'] == 'TupleType' return TupleType([deserialize_type(t) for t in data['items']], Instance.deserialize(data['fallback']), implicit=data['implicit']) def copy_modified(self, *, fallback: Optional[Instance] = None, items: Optional[List[Type]] = None) -> 'TupleType': if fallback is None: fallback = self.fallback if items is None: items = self.items return TupleType(items, fallback, self.line, self.column) def slice(self, begin: Optional[int], stride: Optional[int], end: Optional[int]) -> 'TupleType': return TupleType(self.items[begin:end:stride], self.fallback, self.line, self.column, self.implicit) class TypedDictType(Type): """The type of a TypedDict instance. TypedDict(K1=VT1, ..., Kn=VTn) A TypedDictType can be either named or anonymous. If it is anonymous then its fallback will be an Instance of Mapping[str, V]. If it is named then its fallback will be an Instance of the named type (ex: "Point") whose TypeInfo has a typeddict_type that is anonymous. """ items = None # type: OrderedDict[str, Type] # item_name -> item_type required_keys = None # type: Set[str] fallback = None # type: Instance def __init__(self, items: 'OrderedDict[str, Type]', required_keys: Set[str], fallback: Instance, line: int = -1, column: int = -1) -> None: self.items = items self.required_keys = required_keys self.fallback = fallback self.can_be_true = len(self.items) > 0 self.can_be_false = len(self.items) == 0 super().__init__(line, column) def accept(self, visitor: 'TypeVisitor[T]') -> T: return visitor.visit_typeddict_type(self) def __hash__(self) -> int: return hash((frozenset(self.items.items()), self.fallback, frozenset(self.required_keys))) def __eq__(self, other: object) -> bool: if isinstance(other, TypedDictType): if frozenset(self.items.keys()) != frozenset(other.items.keys()): return False for (_, left_item_type, right_item_type) in self.zip(other): if not left_item_type == right_item_type: return False return self.fallback == other.fallback and self.required_keys == other.required_keys else: return NotImplemented def serialize(self) -> JsonDict: return {'.class': 'TypedDictType', 'items': [[n, t.serialize()] for (n, t) in self.items.items()], 'required_keys': sorted(self.required_keys), 'fallback': self.fallback.serialize(), } @classmethod def deserialize(cls, data: JsonDict) -> 'TypedDictType': assert data['.class'] == 'TypedDictType' return TypedDictType(OrderedDict([(n, deserialize_type(t)) for (n, t) in data['items']]), set(data['required_keys']), Instance.deserialize(data['fallback'])) def is_anonymous(self) -> bool: return self.fallback.type.fullname() == 'typing.Mapping' def as_anonymous(self) -> 'TypedDictType': if self.is_anonymous(): return self assert self.fallback.type.typeddict_type is not None return self.fallback.type.typeddict_type.as_anonymous() def copy_modified(self, *, fallback: Optional[Instance] = None, item_types: Optional[List[Type]] = None, required_keys: Optional[Set[str]] = None) -> 'TypedDictType': if fallback is None: fallback = self.fallback if item_types is None: items = self.items else: items = OrderedDict(zip(self.items, item_types)) if required_keys is None: required_keys = self.required_keys return TypedDictType(items, required_keys, fallback, self.line, self.column) def create_anonymous_fallback(self, *, value_type: Type) -> Instance: anonymous = self.as_anonymous() return anonymous.fallback.copy_modified(args=[ # i.e. Mapping anonymous.fallback.args[0], # i.e. str value_type ]) def names_are_wider_than(self, other: 'TypedDictType') -> bool: return len(other.items.keys() - self.items.keys()) == 0 def zip(self, right: 'TypedDictType') -> Iterable[Tuple[str, Type, Type]]: left = self for (item_name, left_item_type) in left.items.items(): right_item_type = right.items.get(item_name) if right_item_type is not None: yield (item_name, left_item_type, right_item_type) def zipall(self, right: 'TypedDictType') \ -> Iterable[Tuple[str, Optional[Type], Optional[Type]]]: left = self for (item_name, left_item_type) in left.items.items(): right_item_type = right.items.get(item_name) yield (item_name, left_item_type, right_item_type) for (item_name, right_item_type) in right.items.items(): if item_name in left.items: continue yield (item_name, None, right_item_type) class StarType(Type): """The star type *type_parameter. This is not a real type but a syntactic AST construct. """ type = None # type: Type def __init__(self, type: Type, line: int = -1, column: int = -1) -> None: self.type = type super().__init__(line, column) def accept(self, visitor: 'TypeVisitor[T]') -> T: assert isinstance(visitor, SyntheticTypeVisitor) return visitor.visit_star_type(self) def serialize(self) -> JsonDict: assert False, "Sythetic types don't serialize" class UnionType(Type): """The union type Union[T1, ..., Tn] (at least one type argument).""" items = None # type: List[Type] def __init__(self, items: List[Type], line: int = -1, column: int = -1) -> None: self.items = flatten_nested_unions(items) self.can_be_true = any(item.can_be_true for item in items) self.can_be_false = any(item.can_be_false for item in items) super().__init__(line, column) def __hash__(self) -> int: return hash(frozenset(self.items)) def __eq__(self, other: object) -> bool: if not isinstance(other, UnionType): return NotImplemented return frozenset(self.items) == frozenset(other.items) @staticmethod def make_union(items: List[Type], line: int = -1, column: int = -1) -> Type: if len(items) > 1: return UnionType(items, line, column) elif len(items) == 1: return items[0] else: return UninhabitedType() @staticmethod def make_simplified_union(items: List[Type], line: int = -1, column: int = -1) -> Type: """Build union type with redundant union items removed. If only a single item remains, this may return a non-union type. Examples: * [int, str] -> Union[int, str] * [int, object] -> object * [int, int] -> int * [int, Any] -> Union[int, Any] (Any types are not simplified away!) * [Any, Any] -> Any Note: This must NOT be used during semantic analysis, since TypeInfos may not be fully initialized. """ # TODO: Make this a function living somewhere outside mypy.types. Most other non-trivial # type operations are not static methods, so this is inconsistent. while any(isinstance(typ, UnionType) for typ in items): all_items = [] # type: List[Type] for typ in items: if isinstance(typ, UnionType): all_items.extend(typ.items) else: all_items.append(typ) items = all_items from mypy.subtypes import is_proper_subtype removed = set() # type: Set[int] for i, ti in enumerate(items): if i in removed: continue # Keep track of the truishness info for deleted subtypes which can be relevant cbt = cbf = False for j, tj in enumerate(items): if (i != j and is_proper_subtype(tj, ti)): # We found a redundant item in the union. removed.add(j) cbt = cbt or tj.can_be_true cbf = cbf or tj.can_be_false # if deleted subtypes had more general truthiness, use that if not ti.can_be_true and cbt: items[i] = true_or_false(ti) elif not ti.can_be_false and cbf: items[i] = true_or_false(ti) simplified_set = [items[i] for i in range(len(items)) if i not in removed] return UnionType.make_union(simplified_set, line, column) def length(self) -> int: return len(self.items) def accept(self, visitor: 'TypeVisitor[T]') -> T: return visitor.visit_union_type(self) def has_readable_member(self, name: str) -> bool: """For a tree of unions of instances, check whether all instances have a given member. TODO: Deal with attributes of TupleType etc. TODO: This should probably be refactored to go elsewhere. """ return all((isinstance(x, UnionType) and x.has_readable_member(name)) or (isinstance(x, Instance) and x.type.has_readable_member(name)) for x in self.relevant_items()) def relevant_items(self) -> List[Type]: """Removes NoneTypes from Unions when strict Optional checking is off.""" if experiments.STRICT_OPTIONAL: return self.items else: return [i for i in self.items if not isinstance(i, NoneTyp)] def serialize(self) -> JsonDict: return {'.class': 'UnionType', 'items': [t.serialize() for t in self.items], } @classmethod def deserialize(cls, data: JsonDict) -> 'UnionType': assert data['.class'] == 'UnionType' return UnionType([deserialize_type(t) for t in data['items']]) class PartialType(Type): """Type such as List[?] where type arguments are unknown, or partial None type. These are used for inferring types in multiphase initialization such as this: x = [] # x gets a partial type List[?], as item type is unknown x.append(1) # partial type gets replaced with normal type List[int] Or with None: x = None # x gets a partial type None if c: x = 1 # Infer actual type int for x """ # None for the 'None' partial type; otherwise a generic class type = None # type: Optional[mypy.nodes.TypeInfo] var = None # type: mypy.nodes.Var inner_types = None # type: List[Type] def __init__(self, type: 'Optional[mypy.nodes.TypeInfo]', var: 'mypy.nodes.Var', inner_types: List[Type]) -> None: self.type = type self.var = var self.inner_types = inner_types def accept(self, visitor: 'TypeVisitor[T]') -> T: return visitor.visit_partial_type(self) class EllipsisType(Type): """The type ... (ellipsis). This is not a real type but a syntactic AST construct, used in Callable[..., T], for example. A semantically analyzed type will never have ellipsis types. """ def accept(self, visitor: 'TypeVisitor[T]') -> T: assert isinstance(visitor, SyntheticTypeVisitor) return visitor.visit_ellipsis_type(self) def serialize(self) -> JsonDict: assert False, "Synthetic types don't serialize" class TypeType(Type): """For types like Type[User]. This annotates variables that are class objects, constrained by the type argument. See PEP 484 for more details. We may encounter expressions whose values are specific classes; those are represented as callables (possibly overloaded) corresponding to the class's constructor's signature and returning an instance of that class. The difference with Type[C] is that those callables always represent the exact class given as the return type; Type[C] represents any class that's a subclass of C, and C may also be a type variable or a union (or Any). Many questions around subtype relationships between Type[C1] and def(...) -> C2 are answered by looking at the subtype relationships between C1 and C2, since Type[] is considered covariant. There's an unsolved problem with constructor signatures (also unsolved in PEP 484): calling a variable whose type is Type[C] assumes the constructor signature for C, even though a subclass of C might completely change the constructor signature. For now we just assume that users of Type[C] are careful not to do that (in the future we might detect when they are violating that assumption). """ # This can't be everything, but it can be a class reference, # a generic class instance, a union, Any, a type variable... item = None # type: Type def __init__(self, item: Union[Instance, AnyType, TypeVarType, TupleType, NoneTyp, CallableType], *, line: int = -1, column: int = -1) -> None: """To ensure Type[Union[A, B]] is always represented as Union[Type[A], Type[B]], item of type UnionType must be handled through make_normalized static method. """ super().__init__(line, column) self.item = item @staticmethod def make_normalized(item: Type, *, line: int = -1, column: int = -1) -> Type: if isinstance(item, UnionType): return UnionType.make_union( [TypeType.make_normalized(union_item) for union_item in item.items], line=line, column=column ) return TypeType(item, line=line, column=column) # type: ignore def accept(self, visitor: 'TypeVisitor[T]') -> T: return visitor.visit_type_type(self) def __hash__(self) -> int: return hash(self.item) def __eq__(self, other: object) -> bool: if not isinstance(other, TypeType): return NotImplemented return self.item == other.item def serialize(self) -> JsonDict: return {'.class': 'TypeType', 'item': self.item.serialize()} @classmethod def deserialize(cls, data: JsonDict) -> Type: assert data['.class'] == 'TypeType' return TypeType.make_normalized(deserialize_type(data['item'])) class ForwardRef(Type): """Class to wrap forward references to other types. This is used when a forward reference to an (unanalyzed) synthetic type is found, for example: x: A A = TypedDict('A', {'x': int}) To avoid false positives and crashes in such situations, we first wrap the first occurrence of 'A' in ForwardRef. Then, the wrapped UnboundType is updated in the third pass of semantic analysis and ultimately fixed in the patches after the third pass. So that ForwardRefs are temporary and will be completely replaced with the linked types or Any (to avoid cyclic references) before the type checking stage. """ _unbound = None # type: UnboundType # The original wrapped type _resolved = None # type: Optional[Type] # The resolved forward reference (initially None) def __init__(self, unbound: UnboundType) -> None: self._unbound = unbound self._resolved = None @property def unbound(self) -> UnboundType: # This is read-only to make it clear that resolution happens through resolve(). return self._unbound @property def resolved(self) -> Optional[Type]: # Similar to above. return self._resolved def resolve(self, resolved: Type) -> None: """Resolve an unbound forward reference to point to a type.""" assert self._resolved is None self._resolved = resolved def accept(self, visitor: 'TypeVisitor[T]') -> T: return visitor.visit_forwardref_type(self) def serialize(self) -> str: name = self.unbound.name # We should never get here since all forward references should be resolved # and removed during semantic analysis. assert False, "Internal error: Unresolved forward reference to {}".format(name) # # Visitor-related classes # class TypeVisitor(Generic[T]): """Visitor class for types (Type subclasses). The parameter T is the return type of the visit methods. """ def _notimplemented_helper(self, name: str) -> NotImplementedError: return NotImplementedError("Method {}.visit_{}() not implemented\n" .format(type(self).__name__, name) + "This is a known bug, track development in " + "'https://github.com/JukkaL/mypy/issues/730'") @abstractmethod def visit_unbound_type(self, t: UnboundType) -> T: pass @abstractmethod def visit_any(self, t: AnyType) -> T: pass @abstractmethod def visit_none_type(self, t: NoneTyp) -> T: pass @abstractmethod def visit_uninhabited_type(self, t: UninhabitedType) -> T: pass def visit_erased_type(self, t: ErasedType) -> T: raise self._notimplemented_helper('erased_type') @abstractmethod def visit_deleted_type(self, t: DeletedType) -> T: pass @abstractmethod def visit_type_var(self, t: TypeVarType) -> T: pass @abstractmethod def visit_instance(self, t: Instance) -> T: pass @abstractmethod def visit_callable_type(self, t: CallableType) -> T: pass def visit_overloaded(self, t: Overloaded) -> T: raise self._notimplemented_helper('overloaded') @abstractmethod def visit_tuple_type(self, t: TupleType) -> T: pass @abstractmethod def visit_typeddict_type(self, t: TypedDictType) -> T: pass @abstractmethod def visit_union_type(self, t: UnionType) -> T: pass @abstractmethod def visit_partial_type(self, t: PartialType) -> T: pass @abstractmethod def visit_type_type(self, t: TypeType) -> T: pass def visit_forwardref_type(self, t: ForwardRef) -> T: raise RuntimeError('Internal error: unresolved forward reference') class SyntheticTypeVisitor(TypeVisitor[T]): """A TypeVisitor that also knows how to visit synthetic AST constructs. Not just real types.""" @abstractmethod def visit_star_type(self, t: StarType) -> T: pass @abstractmethod def visit_type_list(self, t: TypeList) -> T: pass @abstractmethod def visit_callable_argument(self, t: CallableArgument) -> T: pass @abstractmethod def visit_ellipsis_type(self, t: EllipsisType) -> T: pass class TypeTranslator(TypeVisitor[Type]): """Identity type transformation. Subclass this and override some methods to implement a non-trivial transformation. """ def visit_unbound_type(self, t: UnboundType) -> Type: return t def visit_any(self, t: AnyType) -> Type: return t def visit_none_type(self, t: NoneTyp) -> Type: return t def visit_uninhabited_type(self, t: UninhabitedType) -> Type: return t def visit_erased_type(self, t: ErasedType) -> Type: return t def visit_deleted_type(self, t: DeletedType) -> Type: return t def visit_instance(self, t: Instance) -> Type: return Instance(t.type, self.translate_types(t.args), t.line, t.column) def visit_type_var(self, t: TypeVarType) -> Type: return t def visit_partial_type(self, t: PartialType) -> Type: return t def visit_callable_type(self, t: CallableType) -> Type: return t.copy_modified(arg_types=self.translate_types(t.arg_types), ret_type=t.ret_type.accept(self), variables=self.translate_variables(t.variables)) def visit_tuple_type(self, t: TupleType) -> Type: return TupleType(self.translate_types(t.items), # TODO: This appears to be unsafe. cast(Any, t.fallback.accept(self)), t.line, t.column) def visit_typeddict_type(self, t: TypedDictType) -> Type: items = OrderedDict([ (item_name, item_type.accept(self)) for (item_name, item_type) in t.items.items() ]) return TypedDictType(items, t.required_keys, # TODO: This appears to be unsafe. cast(Any, t.fallback.accept(self)), t.line, t.column) def visit_union_type(self, t: UnionType) -> Type: return UnionType(self.translate_types(t.items), t.line, t.column) def translate_types(self, types: List[Type]) -> List[Type]: return [t.accept(self) for t in types] def translate_variables(self, variables: List[TypeVarDef]) -> List[TypeVarDef]: return variables def visit_overloaded(self, t: Overloaded) -> Type: items = [] # type: List[CallableType] for item in t.items(): new = item.accept(self) if isinstance(new, CallableType): items.append(new) else: raise RuntimeError('CallableType expectected, but got {}'.format(type(new))) return Overloaded(items=items) def visit_type_type(self, t: TypeType) -> Type: return TypeType.make_normalized(t.item.accept(self), line=t.line, column=t.column) def visit_forwardref_type(self, t: ForwardRef) -> Type: return t class TypeStrVisitor(SyntheticTypeVisitor[str]): """Visitor for pretty-printing types into strings. This is mostly for debugging/testing. Do not preserve original formatting. Notes: - Represent unbound types as Foo? or Foo?[...]. - Represent the NoneTyp type as None. """ def __init__(self, id_mapper: Optional[IdMapper] = None) -> None: self.id_mapper = id_mapper def visit_unbound_type(self, t: UnboundType)-> str: s = t.name + '?' if t.args != []: s += '[{}]'.format(self.list_str(t.args)) return s def visit_type_list(self, t: TypeList) -> str: return ''.format(self.list_str(t.items)) def visit_callable_argument(self, t: CallableArgument) -> str: typ = t.typ.accept(self) if t.name is None: return "{}({})".format(t.constructor, typ) else: return "{}({}, {})".format(t.constructor, typ, t.name) def visit_any(self, t: AnyType) -> str: return 'Any' def visit_none_type(self, t: NoneTyp) -> str: # Fully qualify to make this distinct from the None value. return "builtins.None" def visit_uninhabited_type(self, t: UninhabitedType) -> str: return "" def visit_erased_type(self, t: ErasedType) -> str: return "" def visit_deleted_type(self, t: DeletedType) -> str: if t.source is None: return "" else: return "".format(t.source) def visit_instance(self, t: Instance) -> str: if t.type is not None: s = t.type.fullname() or t.type.name() or '' else: s = '' if t.erased: s += '*' if t.args != []: s += '[{}]'.format(self.list_str(t.args)) if self.id_mapper: s += '<{}>'.format(self.id_mapper.id(t.type)) return s def visit_type_var(self, t: TypeVarType) -> str: if t.name is None: # Anonymous type variable type (only numeric id). s = '`{}'.format(t.id) else: # Named type variable type. s = '{}`{}'.format(t.name, t.id) if self.id_mapper and t.upper_bound: s += '(upper_bound={})'.format(t.upper_bound.accept(self)) return s def visit_callable_type(self, t: CallableType) -> str: s = '' bare_asterisk = False for i in range(len(t.arg_types)): if s != '': s += ', ' if t.arg_kinds[i] in (ARG_NAMED, ARG_NAMED_OPT) and not bare_asterisk: s += '*, ' bare_asterisk = True if t.arg_kinds[i] == ARG_STAR: s += '*' if t.arg_kinds[i] == ARG_STAR2: s += '**' name = t.arg_names[i] if name: s += name + ': ' s += t.arg_types[i].accept(self) if t.arg_kinds[i] in (ARG_OPT, ARG_NAMED_OPT): s += ' =' s = '({})'.format(s) if not isinstance(t.ret_type, NoneTyp): s += ' -> {}'.format(t.ret_type.accept(self)) if t.variables: vs = [] # We reimplement TypeVarDef.__repr__ here in order to support id_mapper. for var in t.variables: if var.values: vals = '({})'.format(', '.join(val.accept(self) for val in var.values)) vs.append('{} in {}'.format(var.name, vals)) elif not is_named_instance(var.upper_bound, 'builtins.object'): vs.append('{} <: {}'.format(var.name, var.upper_bound.accept(self))) else: vs.append(var.name) s = '{} {}'.format('[{}]'.format(', '.join(vs)), s) return 'def {}'.format(s) def visit_overloaded(self, t: Overloaded) -> str: a = [] for i in t.items(): a.append(i.accept(self)) return 'Overload({})'.format(', '.join(a)) def visit_tuple_type(self, t: TupleType) -> str: s = self.list_str(t.items) if t.fallback and t.fallback.type: fallback_name = t.fallback.type.fullname() if fallback_name != 'builtins.tuple': return 'Tuple[{}, fallback={}]'.format(s, t.fallback.accept(self)) return 'Tuple[{}]'.format(s) def visit_typeddict_type(self, t: TypedDictType) -> str: def item_str(name: str, typ: str) -> str: if name in t.required_keys: return '{!r}: {}'.format(name, typ) else: return '{!r}?: {}'.format(name, typ) s = '{' + ', '.join(item_str(name, typ.accept(self)) for name, typ in t.items.items()) + '}' prefix = '' suffix = '' if t.fallback and t.fallback.type: if t.fallback.type.fullname() != 'typing.Mapping': prefix = repr(t.fallback.type.fullname()) + ', ' else: suffix = ', fallback={}'.format(t.fallback.accept(self)) return 'TypedDict({}{}{})'.format(prefix, s, suffix) def visit_star_type(self, t: StarType) -> str: s = t.type.accept(self) return '*{}'.format(s) def visit_union_type(self, t: UnionType) -> str: s = self.list_str(t.items) return 'Union[{}]'.format(s) def visit_partial_type(self, t: PartialType) -> str: if t.type is None: return '' else: return ''.format(t.type.name(), ', '.join(['?'] * len(t.type.type_vars))) def visit_ellipsis_type(self, t: EllipsisType) -> str: return '...' def visit_type_type(self, t: TypeType) -> str: return 'Type[{}]'.format(t.item.accept(self)) def visit_forwardref_type(self, t: ForwardRef) -> str: if t.resolved: return '~{}'.format(t.resolved.accept(self)) else: return '~{}'.format(t.unbound.accept(self)) def list_str(self, a: List[Type]) -> str: """Convert items of an array to strings (pretty-print types) and join the results with commas. """ res = [] for t in a: if isinstance(t, Type): res.append(t.accept(self)) else: res.append(str(t)) return ', '.join(res) class TypeQuery(SyntheticTypeVisitor[T]): """Visitor for performing queries of types. strategy is used to combine results for a series of types Common use cases involve a boolean query using `any` or `all` """ def __init__(self, strategy: Callable[[Iterable[T]], T]) -> None: self.strategy = strategy def visit_unbound_type(self, t: UnboundType) -> T: return self.query_types(t.args) def visit_type_list(self, t: TypeList) -> T: return self.query_types(t.items) def visit_callable_argument(self, t: CallableArgument) -> T: return t.typ.accept(self) def visit_any(self, t: AnyType) -> T: return self.strategy([]) def visit_uninhabited_type(self, t: UninhabitedType) -> T: return self.strategy([]) def visit_none_type(self, t: NoneTyp) -> T: return self.strategy([]) def visit_erased_type(self, t: ErasedType) -> T: return self.strategy([]) def visit_deleted_type(self, t: DeletedType) -> T: return self.strategy([]) def visit_type_var(self, t: TypeVarType) -> T: return self.strategy([]) def visit_partial_type(self, t: PartialType) -> T: return self.query_types(t.inner_types) def visit_instance(self, t: Instance) -> T: return self.query_types(t.args) def visit_callable_type(self, t: CallableType) -> T: # FIX generics return self.query_types(t.arg_types + [t.ret_type]) def visit_tuple_type(self, t: TupleType) -> T: return self.query_types(t.items) def visit_typeddict_type(self, t: TypedDictType) -> T: return self.query_types(t.items.values()) def visit_star_type(self, t: StarType) -> T: return t.type.accept(self) def visit_union_type(self, t: UnionType) -> T: return self.query_types(t.items) def visit_overloaded(self, t: Overloaded) -> T: return self.query_types(t.items()) def visit_type_type(self, t: TypeType) -> T: return t.item.accept(self) def visit_forwardref_type(self, t: ForwardRef) -> T: if t.resolved: return t.resolved.accept(self) else: return t.unbound.accept(self) def visit_ellipsis_type(self, t: EllipsisType) -> T: return self.strategy([]) def query_types(self, types: Iterable[Type]) -> T: """Perform a query for a list of types. Use the strategy to combine the results. """ return self.strategy(t.accept(self) for t in types) def strip_type(typ: Type) -> Type: """Make a copy of type without 'debugging info' (function name).""" if isinstance(typ, CallableType): return typ.copy_modified(name=None) elif isinstance(typ, Overloaded): return Overloaded([cast(CallableType, strip_type(item)) for item in typ.items()]) else: return typ def is_named_instance(t: Type, fullname: str) -> bool: return (isinstance(t, Instance) and t.type is not None and t.type.fullname() == fullname) def copy_type(t: Type) -> Type: """ Build a copy of the type; used to mutate the copy with truthiness information """ return copy.copy(t) def true_only(t: Type) -> Type: """ Restricted version of t with only True-ish values """ if not t.can_be_true: # All values of t are False-ish, so there are no true values in it return UninhabitedType(line=t.line, column=t.column) elif not t.can_be_false: # All values of t are already True-ish, so true_only is idempotent in this case return t elif isinstance(t, UnionType): # The true version of a union type is the union of the true versions of its components new_items = [true_only(item) for item in t.items] return UnionType.make_simplified_union(new_items, line=t.line, column=t.column) else: new_t = copy_type(t) new_t.can_be_false = False return new_t def false_only(t: Type) -> Type: """ Restricted version of t with only False-ish values """ if not t.can_be_false: # All values of t are True-ish, so there are no false values in it return UninhabitedType(line=t.line) elif not t.can_be_true: # All values of t are already False-ish, so false_only is idempotent in this case return t elif isinstance(t, UnionType): # The false version of a union type is the union of the false versions of its components new_items = [false_only(item) for item in t.items] return UnionType.make_simplified_union(new_items, line=t.line, column=t.column) else: new_t = copy_type(t) new_t.can_be_true = False return new_t def true_or_false(t: Type) -> Type: """ Unrestricted version of t with both True-ish and False-ish values """ if isinstance(t, UnionType): new_items = [true_or_false(item) for item in t.items] return UnionType.make_simplified_union(new_items, line=t.line, column=t.column) new_t = copy_type(t) new_t.can_be_true = type(new_t).can_be_true new_t.can_be_false = type(new_t).can_be_false return new_t def function_type(func: mypy.nodes.FuncBase, fallback: Instance) -> FunctionLike: if func.type: assert isinstance(func.type, FunctionLike) return func.type else: # Implicit type signature with dynamic types. # Overloaded functions always have a signature, so func must be an ordinary function. assert isinstance(func, mypy.nodes.FuncItem), str(func) return callable_type(func, fallback) def callable_type(fdef: mypy.nodes.FuncItem, fallback: Instance, ret_type: Optional[Type] = None) -> CallableType: return CallableType( [AnyType(TypeOfAny.unannotated)] * len(fdef.arg_names), fdef.arg_kinds, [None if argument_elide_name(n) else n for n in fdef.arg_names], ret_type or AnyType(TypeOfAny.unannotated), fallback, name=fdef.name(), line=fdef.line, column=fdef.column, implicit=True, ) def get_typ_args(tp: Type) -> List[Type]: """Get all type arguments from a parameterizable Type.""" if not isinstance(tp, (Instance, UnionType, TupleType, CallableType)): return [] typ_args = (tp.args if isinstance(tp, Instance) else tp.items if not isinstance(tp, CallableType) else tp.arg_types + [tp.ret_type]) return typ_args def set_typ_args(tp: Type, new_args: List[Type], line: int = -1, column: int = -1) -> Type: """Return a copy of a parameterizable Type with arguments set to new_args.""" if isinstance(tp, Instance): return Instance(tp.type, new_args, line, column) if isinstance(tp, TupleType): return tp.copy_modified(items=new_args) if isinstance(tp, UnionType): return UnionType(new_args, line, column) if isinstance(tp, CallableType): return tp.copy_modified(arg_types=new_args[:-1], ret_type=new_args[-1], line=line, column=column) return tp def get_type_vars(typ: Type) -> List[TypeVarType]: """Get all type variables that are present in an already analyzed type, without duplicates, in order of textual appearance. Similar to TypeAnalyser.get_type_var_names. """ all_vars = [] # type: List[TypeVarType] for t in get_typ_args(typ): if isinstance(t, TypeVarType): all_vars.append(t) else: all_vars.extend(get_type_vars(t)) # Remove duplicates while preserving order included = set() # type: Set[TypeVarId] tvars = [] for var in all_vars: if var.id not in included: tvars.append(var) included.add(var.id) return tvars def flatten_nested_unions(types: Iterable[Type]) -> List[Type]: """Flatten nested unions in a type list.""" flat_items = [] # type: List[Type] for tp in types: if isinstance(tp, UnionType): flat_items.extend(flatten_nested_unions(tp.items)) else: flat_items.append(tp) return flat_items def union_items(typ: Type) -> List[Type]: """Return the flattened items of a union type. For non-union types, return a list containing just the argument. """ if isinstance(typ, UnionType): items = [] for item in typ.items: items.extend(union_items(item)) return items else: return [typ] names = globals().copy() names.pop('NOT_READY', None) deserialize_map = { key: obj.deserialize # type: ignore for key, obj in names.items() if isinstance(obj, type) and issubclass(obj, Type) and obj is not Type } mypy-0.560/mypy/typevars.py0000644€tŠÔÚ€2›s®0000000145313215007205022117 0ustar jukkaDROPBOX\Domain Users00000000000000from typing import Union, List from mypy.nodes import TypeInfo from mypy.erasetype import erase_typevars from mypy.sametypes import is_same_type from mypy.types import Instance, TypeVarType, TupleType, Type def fill_typevars(typ: TypeInfo) -> Union[Instance, TupleType]: """For a non-generic type, return instance type representing the type. For a generic G type with parameters T1, .., Tn, return G[T1, ..., Tn]. """ tv = [] # type: List[Type] for i in range(len(typ.type_vars)): tv.append(TypeVarType(typ.defn.type_vars[i])) inst = Instance(typ, tv) if typ.tuple_type is None: return inst return typ.tuple_type.copy_modified(fallback=inst) def has_no_typevars(typ: Type) -> bool: return is_same_type(typ, erase_typevars(typ)) mypy-0.560/mypy/util.py0000644€tŠÔÚ€2›s®0000001064413215007205021221 0ustar jukkaDROPBOX\Domain Users00000000000000"""Utility functions with no non-trivial dependencies.""" import re import subprocess from xml.sax.saxutils import escape from typing import TypeVar, List, Tuple, Optional, Sequence, Dict T = TypeVar('T') ENCODING_RE = re.compile(br'([ \t\v]*#.*(\r\n?|\n))??[ \t\v]*#.*coding[:=][ \t]*([-\w.]+)') default_python2_interpreter = ['python2', 'python', '/usr/bin/python', 'C:\\Python27\\python.exe'] def split_module_names(mod_name: str) -> List[str]: """Return the module and all parent module names. So, if `mod_name` is 'a.b.c', this function will return ['a.b.c', 'a.b', and 'a']. """ out = [mod_name] while '.' in mod_name: mod_name = mod_name.rsplit('.', 1)[0] out.append(mod_name) return out def short_type(obj: object) -> str: """Return the last component of the type name of an object. If obj is None, return 'nil'. For example, if obj is 1, return 'int'. """ if obj is None: return 'nil' t = str(type(obj)) return t.split('.')[-1].rstrip("'>") def array_repr(a: List[T]) -> List[str]: """Return the items of an array converted to strings using Repr.""" aa = [] # type: List[str] for x in a: aa.append(repr(x)) return aa def find_python_encoding(text: bytes, pyversion: Tuple[int, int]) -> Tuple[str, int]: """PEP-263 for detecting Python file encoding""" result = ENCODING_RE.match(text) if result: line = 2 if result.group(1) else 1 encoding = result.group(3).decode('ascii') # Handle some aliases that Python is happy to accept and that are used in the wild. if encoding.startswith(('iso-latin-1-', 'latin-1-')) or encoding == 'iso-latin-1': encoding = 'latin-1' return encoding, line else: default_encoding = 'utf8' if pyversion[0] >= 3 else 'ascii' return default_encoding, -1 _python2_interpreter = None # type: Optional[str] def try_find_python2_interpreter() -> Optional[str]: global _python2_interpreter if _python2_interpreter: return _python2_interpreter for interpreter in default_python2_interpreter: try: retcode = subprocess.Popen([ interpreter, '-c', 'import sys, typing; assert sys.version_info[:2] == (2, 7)' ]).wait() if not retcode: _python2_interpreter = interpreter return interpreter except OSError: pass return None PASS_TEMPLATE = """ """ FAIL_TEMPLATE = """ {text} """ ERROR_TEMPLATE = """ {text} """ def write_junit_xml(dt: float, serious: bool, messages: List[str], path: str) -> None: """XXX""" if not messages and not serious: xml = PASS_TEMPLATE.format(time=dt) elif not serious: xml = FAIL_TEMPLATE.format(text=escape('\n'.join(messages)), time=dt) else: xml = ERROR_TEMPLATE.format(text=escape('\n'.join(messages)), time=dt) with open(path, 'wb') as f: f.write(xml.encode('utf-8')) class IdMapper: """Generate integer ids for objects. Unlike id(), these start from 0 and increment by 1, and ids won't get reused across the life-time of IdMapper. Assume objects don't redefine __eq__ or __hash__. """ def __init__(self) -> None: self.id_map = {} # type: Dict[object, int] self.next_id = 0 def id(self, o: object) -> int: if o not in self.id_map: self.id_map[o] = self.next_id self.next_id += 1 return self.id_map[o] def get_prefix(fullname: str) -> str: """Drop the final component of a qualified name (e.g. ('x.y' -> 'x').""" return fullname.rsplit('.', 1)[0] mypy-0.560/mypy/version.py0000644€tŠÔÚ€2›s®0000000053213215007206021725 0ustar jukkaDROPBOX\Domain Users00000000000000import os from mypy import git __version__ = '0.560' base_version = __version__ mypy_dir = os.path.abspath(os.path.dirname(os.path.dirname(__file__))) if git.is_git_repo(mypy_dir) and git.have_git(): __version__ += '-' + git.git_revision(mypy_dir).decode('utf-8') if git.is_dirty(mypy_dir): __version__ += '-dirty' del mypy_dir mypy-0.560/mypy/visitor.py0000644€tŠÔÚ€2›s®0000003260313215007205021742 0ustar jukkaDROPBOX\Domain Users00000000000000"""Generic abstract syntax tree node visitor""" from abc import abstractmethod from typing import TypeVar, Generic if False: # break import cycle only needed for mypy import mypy.nodes T = TypeVar('T') class ExpressionVisitor(Generic[T]): @abstractmethod def visit_int_expr(self, o: 'mypy.nodes.IntExpr') -> T: pass @abstractmethod def visit_str_expr(self, o: 'mypy.nodes.StrExpr') -> T: pass @abstractmethod def visit_bytes_expr(self, o: 'mypy.nodes.BytesExpr') -> T: pass @abstractmethod def visit_unicode_expr(self, o: 'mypy.nodes.UnicodeExpr') -> T: pass @abstractmethod def visit_float_expr(self, o: 'mypy.nodes.FloatExpr') -> T: pass @abstractmethod def visit_complex_expr(self, o: 'mypy.nodes.ComplexExpr') -> T: pass @abstractmethod def visit_ellipsis(self, o: 'mypy.nodes.EllipsisExpr') -> T: pass @abstractmethod def visit_star_expr(self, o: 'mypy.nodes.StarExpr') -> T: pass @abstractmethod def visit_name_expr(self, o: 'mypy.nodes.NameExpr') -> T: pass @abstractmethod def visit_member_expr(self, o: 'mypy.nodes.MemberExpr') -> T: pass @abstractmethod def visit_yield_from_expr(self, o: 'mypy.nodes.YieldFromExpr') -> T: pass @abstractmethod def visit_yield_expr(self, o: 'mypy.nodes.YieldExpr') -> T: pass @abstractmethod def visit_call_expr(self, o: 'mypy.nodes.CallExpr') -> T: pass @abstractmethod def visit_op_expr(self, o: 'mypy.nodes.OpExpr') -> T: pass @abstractmethod def visit_comparison_expr(self, o: 'mypy.nodes.ComparisonExpr') -> T: pass @abstractmethod def visit_cast_expr(self, o: 'mypy.nodes.CastExpr') -> T: pass @abstractmethod def visit_reveal_type_expr(self, o: 'mypy.nodes.RevealTypeExpr') -> T: pass @abstractmethod def visit_super_expr(self, o: 'mypy.nodes.SuperExpr') -> T: pass @abstractmethod def visit_unary_expr(self, o: 'mypy.nodes.UnaryExpr') -> T: pass @abstractmethod def visit_list_expr(self, o: 'mypy.nodes.ListExpr') -> T: pass @abstractmethod def visit_dict_expr(self, o: 'mypy.nodes.DictExpr') -> T: pass @abstractmethod def visit_tuple_expr(self, o: 'mypy.nodes.TupleExpr') -> T: pass @abstractmethod def visit_set_expr(self, o: 'mypy.nodes.SetExpr') -> T: pass @abstractmethod def visit_index_expr(self, o: 'mypy.nodes.IndexExpr') -> T: pass @abstractmethod def visit_type_application(self, o: 'mypy.nodes.TypeApplication') -> T: pass @abstractmethod def visit_lambda_expr(self, o: 'mypy.nodes.LambdaExpr') -> T: pass @abstractmethod def visit_list_comprehension(self, o: 'mypy.nodes.ListComprehension') -> T: pass @abstractmethod def visit_set_comprehension(self, o: 'mypy.nodes.SetComprehension') -> T: pass @abstractmethod def visit_dictionary_comprehension(self, o: 'mypy.nodes.DictionaryComprehension') -> T: pass @abstractmethod def visit_generator_expr(self, o: 'mypy.nodes.GeneratorExpr') -> T: pass @abstractmethod def visit_slice_expr(self, o: 'mypy.nodes.SliceExpr') -> T: pass @abstractmethod def visit_conditional_expr(self, o: 'mypy.nodes.ConditionalExpr') -> T: pass @abstractmethod def visit_backquote_expr(self, o: 'mypy.nodes.BackquoteExpr') -> T: pass @abstractmethod def visit_type_var_expr(self, o: 'mypy.nodes.TypeVarExpr') -> T: pass @abstractmethod def visit_type_alias_expr(self, o: 'mypy.nodes.TypeAliasExpr') -> T: pass @abstractmethod def visit_namedtuple_expr(self, o: 'mypy.nodes.NamedTupleExpr') -> T: pass @abstractmethod def visit_enum_call_expr(self, o: 'mypy.nodes.EnumCallExpr') -> T: pass @abstractmethod def visit_typeddict_expr(self, o: 'mypy.nodes.TypedDictExpr') -> T: pass @abstractmethod def visit_newtype_expr(self, o: 'mypy.nodes.NewTypeExpr') -> T: pass @abstractmethod def visit__promote_expr(self, o: 'mypy.nodes.PromoteExpr') -> T: pass @abstractmethod def visit_await_expr(self, o: 'mypy.nodes.AwaitExpr') -> T: pass @abstractmethod def visit_temp_node(self, o: 'mypy.nodes.TempNode') -> T: pass class StatementVisitor(Generic[T]): # Definitions @abstractmethod def visit_assignment_stmt(self, o: 'mypy.nodes.AssignmentStmt') -> T: pass @abstractmethod def visit_for_stmt(self, o: 'mypy.nodes.ForStmt') -> T: pass @abstractmethod def visit_with_stmt(self, o: 'mypy.nodes.WithStmt') -> T: pass @abstractmethod def visit_del_stmt(self, o: 'mypy.nodes.DelStmt') -> T: pass @abstractmethod def visit_func_def(self, o: 'mypy.nodes.FuncDef') -> T: pass @abstractmethod def visit_overloaded_func_def(self, o: 'mypy.nodes.OverloadedFuncDef') -> T: pass @abstractmethod def visit_class_def(self, o: 'mypy.nodes.ClassDef') -> T: pass @abstractmethod def visit_global_decl(self, o: 'mypy.nodes.GlobalDecl') -> T: pass @abstractmethod def visit_nonlocal_decl(self, o: 'mypy.nodes.NonlocalDecl') -> T: pass @abstractmethod def visit_decorator(self, o: 'mypy.nodes.Decorator') -> T: pass @abstractmethod def visit_var(self, o: 'mypy.nodes.Var') -> T: pass # Module structure @abstractmethod def visit_import(self, o: 'mypy.nodes.Import') -> T: pass @abstractmethod def visit_import_from(self, o: 'mypy.nodes.ImportFrom') -> T: pass @abstractmethod def visit_import_all(self, o: 'mypy.nodes.ImportAll') -> T: pass # Statements @abstractmethod def visit_block(self, o: 'mypy.nodes.Block') -> T: pass @abstractmethod def visit_expression_stmt(self, o: 'mypy.nodes.ExpressionStmt') -> T: pass @abstractmethod def visit_operator_assignment_stmt(self, o: 'mypy.nodes.OperatorAssignmentStmt') -> T: pass @abstractmethod def visit_while_stmt(self, o: 'mypy.nodes.WhileStmt') -> T: pass @abstractmethod def visit_return_stmt(self, o: 'mypy.nodes.ReturnStmt') -> T: pass @abstractmethod def visit_assert_stmt(self, o: 'mypy.nodes.AssertStmt') -> T: pass @abstractmethod def visit_if_stmt(self, o: 'mypy.nodes.IfStmt') -> T: pass @abstractmethod def visit_break_stmt(self, o: 'mypy.nodes.BreakStmt') -> T: pass @abstractmethod def visit_continue_stmt(self, o: 'mypy.nodes.ContinueStmt') -> T: pass @abstractmethod def visit_pass_stmt(self, o: 'mypy.nodes.PassStmt') -> T: pass @abstractmethod def visit_raise_stmt(self, o: 'mypy.nodes.RaiseStmt') -> T: pass @abstractmethod def visit_try_stmt(self, o: 'mypy.nodes.TryStmt') -> T: pass @abstractmethod def visit_print_stmt(self, o: 'mypy.nodes.PrintStmt') -> T: pass @abstractmethod def visit_exec_stmt(self, o: 'mypy.nodes.ExecStmt') -> T: pass class NodeVisitor(Generic[T], ExpressionVisitor[T], StatementVisitor[T]): """Empty base class for parse tree node visitors. The T type argument specifies the return type of the visit methods. As all methods defined here return None by default, subclasses do not always need to override all the methods. TODO make the default return value explicit """ # Not in superclasses: def visit_mypy_file(self, o: 'mypy.nodes.MypyFile') -> T: pass # Module structure def visit_import(self, o: 'mypy.nodes.Import') -> T: pass def visit_import_from(self, o: 'mypy.nodes.ImportFrom') -> T: pass def visit_import_all(self, o: 'mypy.nodes.ImportAll') -> T: pass # Definitions def visit_func_def(self, o: 'mypy.nodes.FuncDef') -> T: pass def visit_overloaded_func_def(self, o: 'mypy.nodes.OverloadedFuncDef') -> T: pass def visit_class_def(self, o: 'mypy.nodes.ClassDef') -> T: pass def visit_global_decl(self, o: 'mypy.nodes.GlobalDecl') -> T: pass def visit_nonlocal_decl(self, o: 'mypy.nodes.NonlocalDecl') -> T: pass def visit_decorator(self, o: 'mypy.nodes.Decorator') -> T: pass def visit_var(self, o: 'mypy.nodes.Var') -> T: pass # Statements def visit_block(self, o: 'mypy.nodes.Block') -> T: pass def visit_expression_stmt(self, o: 'mypy.nodes.ExpressionStmt') -> T: pass def visit_assignment_stmt(self, o: 'mypy.nodes.AssignmentStmt') -> T: pass def visit_operator_assignment_stmt(self, o: 'mypy.nodes.OperatorAssignmentStmt') -> T: pass def visit_while_stmt(self, o: 'mypy.nodes.WhileStmt') -> T: pass def visit_for_stmt(self, o: 'mypy.nodes.ForStmt') -> T: pass def visit_return_stmt(self, o: 'mypy.nodes.ReturnStmt') -> T: pass def visit_assert_stmt(self, o: 'mypy.nodes.AssertStmt') -> T: pass def visit_del_stmt(self, o: 'mypy.nodes.DelStmt') -> T: pass def visit_if_stmt(self, o: 'mypy.nodes.IfStmt') -> T: pass def visit_break_stmt(self, o: 'mypy.nodes.BreakStmt') -> T: pass def visit_continue_stmt(self, o: 'mypy.nodes.ContinueStmt') -> T: pass def visit_pass_stmt(self, o: 'mypy.nodes.PassStmt') -> T: pass def visit_raise_stmt(self, o: 'mypy.nodes.RaiseStmt') -> T: pass def visit_try_stmt(self, o: 'mypy.nodes.TryStmt') -> T: pass def visit_with_stmt(self, o: 'mypy.nodes.WithStmt') -> T: pass def visit_print_stmt(self, o: 'mypy.nodes.PrintStmt') -> T: pass def visit_exec_stmt(self, o: 'mypy.nodes.ExecStmt') -> T: pass # Expressions (default no-op implementation) def visit_int_expr(self, o: 'mypy.nodes.IntExpr') -> T: pass def visit_str_expr(self, o: 'mypy.nodes.StrExpr') -> T: pass def visit_bytes_expr(self, o: 'mypy.nodes.BytesExpr') -> T: pass def visit_unicode_expr(self, o: 'mypy.nodes.UnicodeExpr') -> T: pass def visit_float_expr(self, o: 'mypy.nodes.FloatExpr') -> T: pass def visit_complex_expr(self, o: 'mypy.nodes.ComplexExpr') -> T: pass def visit_ellipsis(self, o: 'mypy.nodes.EllipsisExpr') -> T: pass def visit_star_expr(self, o: 'mypy.nodes.StarExpr') -> T: pass def visit_name_expr(self, o: 'mypy.nodes.NameExpr') -> T: pass def visit_member_expr(self, o: 'mypy.nodes.MemberExpr') -> T: pass def visit_yield_from_expr(self, o: 'mypy.nodes.YieldFromExpr') -> T: pass def visit_yield_expr(self, o: 'mypy.nodes.YieldExpr') -> T: pass def visit_call_expr(self, o: 'mypy.nodes.CallExpr') -> T: pass def visit_op_expr(self, o: 'mypy.nodes.OpExpr') -> T: pass def visit_comparison_expr(self, o: 'mypy.nodes.ComparisonExpr') -> T: pass def visit_cast_expr(self, o: 'mypy.nodes.CastExpr') -> T: pass def visit_reveal_type_expr(self, o: 'mypy.nodes.RevealTypeExpr') -> T: pass def visit_super_expr(self, o: 'mypy.nodes.SuperExpr') -> T: pass def visit_unary_expr(self, o: 'mypy.nodes.UnaryExpr') -> T: pass def visit_list_expr(self, o: 'mypy.nodes.ListExpr') -> T: pass def visit_dict_expr(self, o: 'mypy.nodes.DictExpr') -> T: pass def visit_tuple_expr(self, o: 'mypy.nodes.TupleExpr') -> T: pass def visit_set_expr(self, o: 'mypy.nodes.SetExpr') -> T: pass def visit_index_expr(self, o: 'mypy.nodes.IndexExpr') -> T: pass def visit_type_application(self, o: 'mypy.nodes.TypeApplication') -> T: pass def visit_lambda_expr(self, o: 'mypy.nodes.LambdaExpr') -> T: pass def visit_list_comprehension(self, o: 'mypy.nodes.ListComprehension') -> T: pass def visit_set_comprehension(self, o: 'mypy.nodes.SetComprehension') -> T: pass def visit_dictionary_comprehension(self, o: 'mypy.nodes.DictionaryComprehension') -> T: pass def visit_generator_expr(self, o: 'mypy.nodes.GeneratorExpr') -> T: pass def visit_slice_expr(self, o: 'mypy.nodes.SliceExpr') -> T: pass def visit_conditional_expr(self, o: 'mypy.nodes.ConditionalExpr') -> T: pass def visit_backquote_expr(self, o: 'mypy.nodes.BackquoteExpr') -> T: pass def visit_type_var_expr(self, o: 'mypy.nodes.TypeVarExpr') -> T: pass def visit_type_alias_expr(self, o: 'mypy.nodes.TypeAliasExpr') -> T: pass def visit_namedtuple_expr(self, o: 'mypy.nodes.NamedTupleExpr') -> T: pass def visit_enum_call_expr(self, o: 'mypy.nodes.EnumCallExpr') -> T: pass def visit_typeddict_expr(self, o: 'mypy.nodes.TypedDictExpr') -> T: pass def visit_newtype_expr(self, o: 'mypy.nodes.NewTypeExpr') -> T: pass def visit__promote_expr(self, o: 'mypy.nodes.PromoteExpr') -> T: pass def visit_await_expr(self, o: 'mypy.nodes.AwaitExpr') -> T: pass def visit_temp_node(self, o: 'mypy.nodes.TempNode') -> T: pass mypy-0.560/mypy/waiter.py0000644€tŠÔÚ€2›s®0000003613313215007205021540 0ustar jukkaDROPBOX\Domain Users00000000000000"""Parallel subprocess task runner. This is used for running mypy tests. """ from typing import Dict, List, Optional, Set, Tuple, Any, Iterable, IO import os from multiprocessing import cpu_count import pipes import re from subprocess import Popen, STDOUT, DEVNULL import sys import tempfile import time import json from collections import defaultdict class WaiterError(Exception): pass class LazySubprocess: """Wrapper around a subprocess that runs a test task.""" def __init__(self, name: str, args: List[str], *, cwd: Optional[str] = None, env: Optional[Dict[str, str]] = None, passthrough: Optional[int] = None) -> None: self.name = name self.args = args self.cwd = cwd self.env = env self.start_time = None # type: Optional[float] self.end_time = None # type: Optional[float] # None means no passthrough # otherwise, it represents verbosity level self.passthrough = passthrough def start(self) -> None: if self.passthrough is None or self.passthrough < 0: self.outfile = tempfile.TemporaryFile() # type: Optional[IO[Any]] else: self.outfile = None self.start_time = time.perf_counter() self.process = Popen(self.args, cwd=self.cwd, env=self.env, stdout=self.outfile, stderr=STDOUT) self.pid = self.process.pid def wait(self) -> int: return self.process.wait() def status(self) -> Optional[int]: return self.process.returncode def read_output(self) -> str: if not self.outfile: return '' file = self.outfile file.seek(0) # Assume it's ascii to avoid unicode headaches (and portability issues). return file.read().decode('ascii') @property def elapsed_time(self) -> float: if self.end_time is None or self.start_time is None: return 0 else: return self.end_time - self.start_time class Noter: """Update stats about running jobs. Only used when verbosity == 0. """ def __init__(self, total: int) -> None: # Total number of tasks. self.total = total self.running = set() # type: Set[int] # Passed tasks. self.passes = 0 # Failed tasks. self.fails = 0 def start(self, job: int) -> None: self.running.add(job) self.update() def stop(self, job: int, failed: bool) -> None: self.running.remove(job) if failed: self.fails += 1 else: self.passes += 1 self.update() def message(self, msg: str) -> None: # Using a CR instead of NL will overwrite the line. sys.stdout.write('%-80s\r' % msg) sys.stdout.flush() def update(self) -> None: pending = self.total - self.passes - self.fails - len(self.running) args = (self.passes, self.fails, pending, len(self.running)) msg = 'passed %d, failed %d, pending %d; running %d' % args self.message(msg) def clear(self) -> None: self.message('') class Waiter: """Run subprocesses in parallel and wait for them. Usage: waiter = Waiter() waiter.add('sleep 9') waiter.add('sleep 10') if not waiter.run(): print('error') """ LOGSIZE = 50 FULL_LOG_FILENAME = '.runtest_log.json' def __init__(self, limit: int = 0, *, verbosity: int = 0, xfail: List[str] = [], lf: bool = False, ff: bool = False) -> None: self.verbosity = verbosity self.queue = [] # type: List[LazySubprocess] # Index of next task to run in the queue. self.next = 0 self.current = {} # type: Dict[int, Tuple[int, LazySubprocess]] if limit == 0: try: sched_getaffinity = os.sched_getaffinity except AttributeError: # no support for affinity on OSX/Windows limit = cpu_count() else: # Note: only count CPUs we are allowed to use. It is a # major mistake to count *all* CPUs on the machine. limit = len(sched_getaffinity(0)) self.limit = limit self.lf = lf self.ff = ff assert limit > 0 self.xfail = set(xfail) self._note = None # type: Optional[Noter] self.times1 = {} # type: Dict[str, float] self.times2 = {} # type: Dict[str, float] self.new_log = defaultdict(dict) # type: Dict[str, Dict[str, float]] self.sequential_tasks = set() # type: Set[str] def load_log_file(self) -> Optional[List[Dict[str, Dict[str, Any]]]]: try: # get the last log with open(self.FULL_LOG_FILENAME) as fp: test_log = json.load(fp) except FileNotFoundError: test_log = [] except json.JSONDecodeError: print('corrupt test log file {}'.format(self.FULL_LOG_FILENAME), file=sys.stderr) test_log = [] return test_log def add(self, cmd: LazySubprocess, sequential: bool = False) -> int: rv = len(self.queue) if cmd.name in (task.name for task in self.queue): sys.exit('Duplicate test name: {}'.format(cmd.name)) self.queue.append(cmd) if sequential: self.sequential_tasks.add(cmd.name) return rv def _start_next(self) -> None: num = self.next cmd = self.queue[num] name = cmd.name cmd.start() self.current[cmd.pid] = (num, cmd) if self.verbosity >= 1: print('%-8s #%d %s' % ('START', num, name)) if self.verbosity >= 2: print('%-8s #%d %s' % ('CWD', num, cmd.cwd or '.')) cmd_str = ' '.join(pipes.quote(a) for a in cmd.args) print('%-8s #%d %s' % ('COMMAND', num, cmd_str)) sys.stdout.flush() elif self.verbosity >= 0: assert self._note is not None self._note.start(num) self.next += 1 def _record_time(self, name: str, elapsed_time: float) -> None: # The names we use are space-separated series of rather arbitrary words. # They tend to start general and get more specific, so use that. name1 = re.sub(' .*', '', name) # First word. self.times1[name1] = elapsed_time + self.times1.get(name1, 0) name2 = re.sub('( .*?) .*', r'\1', name) # First two words. self.times2[name2] = elapsed_time + self.times2.get(name2, 0) def _poll_current(self) -> Tuple[int, int]: while True: time.sleep(.01) for pid in self.current: cmd = self.current[pid][1] code = cmd.process.poll() if code is not None: cmd.end_time = time.perf_counter() assert cmd.start_time is not None self.new_log['exit_code'][cmd.name] = code self.new_log['runtime'][cmd.name] = cmd.end_time - cmd.start_time return pid, code def _wait_next(self) -> Tuple[List[str], int, int]: """Wait for a single task to finish. Return tuple (list of failed tasks, number test cases, number of failed tests). """ pid, status = self._poll_current() num, cmd = self.current.pop(pid) name = cmd.name self._record_time(cmd.name, cmd.elapsed_time) rc = cmd.wait() if rc >= 0: msg = 'EXIT %d' % rc else: msg = 'SIG %d' % -rc if self.verbosity >= 1: print('%-8s #%d %s' % (msg, num, name)) sys.stdout.flush() elif self.verbosity >= 0: assert self._note is not None self._note.stop(num, bool(rc)) elif self.verbosity >= -1: sys.stdout.write('.' if rc == 0 else msg[0]) num_complete = self.next - len(self.current) if num_complete % 50 == 0 or num_complete == len(self.queue): sys.stdout.write(' %d/%d\n' % (num_complete, len(self.queue))) elif num_complete % 10 == 0: sys.stdout.write(' ') sys.stdout.flush() if rc != 0: if name not in self.xfail: fail_type = 'FAILURE' # type: Optional[str] else: fail_type = 'XFAIL' else: if name not in self.xfail: fail_type = None else: fail_type = 'UPASS' # Get task output. output = cmd.read_output() num_tests, num_tests_failed = parse_test_stats_from_output(output, fail_type) if fail_type is not None or self.verbosity >= 1: self._report_task_failure(fail_type, num, name, output) if fail_type is not None: failed_tasks = ['%8s %s' % (fail_type, name)] else: failed_tasks = [] return failed_tasks, num_tests, num_tests_failed def _report_task_failure(self, fail_type: Optional[str], num: int, name: str, output: str) -> None: if self.verbosity <= 0: sys.stdout.write('\n') sys.stdout.write('\n%-8s #%d %s\n\n' % (fail_type or 'PASS', num, name)) sys.stdout.write(output + '\n') sys.stdout.flush() def run(self) -> int: if self.verbosity >= -1: print('%-8s %d' % ('PARALLEL', self.limit)) sys.stdout.flush() if self.verbosity == 0: self._note = Noter(len(self.queue)) print('SUMMARY %d tasks selected' % len(self.queue)) def avg(lst: Iterable[float]) -> float: valid_items = [item for item in lst if item is not None] if not valid_items: # we don't know how long a new task takes # better err by putting it in front in case it is slow: # a fast task in front hurts performance less than a slow task in the back return float('inf') else: return sum(valid_items) / len(valid_items) logs = self.load_log_file() if logs: times = {cmd.name: avg(log['runtime'].get(cmd.name, None) for log in logs) for cmd in self.queue} def sort_function(cmd: LazySubprocess) -> Tuple[Any, int, float]: # longest tasks first runtime = -times[cmd.name] # sequential tasks go first by default sequential = -(cmd.name in self.sequential_tasks) if self.ff: # failed tasks first with -ff assert logs is not None exit_code = -logs[-1]['exit_code'].get(cmd.name, 0) if not exit_code: # avoid interrupting parallel tasks with sequential in between # so either: seq failed, parallel failed, parallel passed, seq passed # or: parallel failed, seq failed, seq passed, parallel passed # I picked the first one arbitrarily, since no obvious pros/cons # in other words, among failed tasks, sequential should go before parallel, # and among successful tasks, sequential should go after parallel sequential = -sequential else: # ignore exit code without -ff exit_code = 0 return exit_code, sequential, runtime self.queue = sorted(self.queue, key=sort_function) if self.lf: self.queue = [cmd for cmd in self.queue if logs[-1]['exit_code'].get(cmd.name, 0)] sys.stdout.flush() # Failed tasks. all_failures = [] # type: List[str] # Number of test cases. Some tasks can involve multiple test cases. total_tests = 0 # Number of failed test cases. total_failed_tests = 0 running_sequential_task = False while self.current or self.next < len(self.queue): while len(self.current) < self.limit and self.next < len(self.queue): # only start next task if idle, or current and next tasks are both parallel if running_sequential_task: break if self.queue[self.next].name in self.sequential_tasks: if self.current: break else: running_sequential_task = True self._start_next() fails, tests, test_fails = self._wait_next() running_sequential_task = False all_failures += fails total_tests += tests total_failed_tests += test_fails if self.verbosity == 0: assert self._note is not None self._note.clear() if self.new_log: # don't append empty log, it will corrupt the cache file # log only LOGSIZE most recent tests logs = self.load_log_file() assert logs is not None test_log = (logs + [self.new_log])[-self.LOGSIZE:] try: with open(self.FULL_LOG_FILENAME, 'w') as fp: json.dump(test_log, fp, sort_keys=True, indent=4) except Exception as e: print('cannot save test log file:', e) if all_failures: summary = 'SUMMARY %d/%d tasks and %d/%d tests failed' % ( len(all_failures), len(self.queue), total_failed_tests, total_tests) print(summary) for f in all_failures: print(f) print(summary) print('*** FAILURE ***') sys.stdout.flush() if any('XFAIL' not in f for f in all_failures): return 1 else: print('SUMMARY all %d tasks and %d tests passed' % ( len(self.queue), total_tests)) print('*** OK ***') sys.stdout.flush() return 0 def parse_test_stats_from_output(output: str, fail_type: Optional[str]) -> Tuple[int, int]: """Parse tasks output and determine test counts. Return tuple (number of tests, number of test failures). Default to the entire task representing a single test as a fallback. """ # pytest m = re.search('^=+ (.*) in [0-9.]+ seconds =+\n\Z', output, re.MULTILINE) if m: counts = {} for part in m.group(1).split(', '): # e.g., '3 failed, 32 passed, 345 deselected' count, key = part.split() counts[key] = int(count) return (sum(c for k, c in counts.items() if k != 'deselected'), counts.get('failed', 0)) # myunit m = re.search('^([0-9]+)/([0-9]+) test cases failed(, ([0-9]+) skipped)?.$', output, re.MULTILINE) if m: return int(m.group(2)), int(m.group(1)) m = re.search('^([0-9]+) test cases run(, ([0-9]+) skipped)?, all passed.$', output, re.MULTILINE) if m: return int(m.group(1)), 0 # Couldn't find test counts, so fall back to single test per tasks. if fail_type is not None: return 1, 1 else: return 1, 0 mypy-0.560/mypy.egg-info/0000755€tŠÔÚ€2›s®0000000000013215007242021360 5ustar jukkaDROPBOX\Domain Users00000000000000mypy-0.560/mypy.egg-info/dependency_links.txt0000644€tŠÔÚ€2›s®0000000000113215007242025426 0ustar jukkaDROPBOX\Domain Users00000000000000 mypy-0.560/mypy.egg-info/entry_points.txt0000644€tŠÔÚ€2›s®0000000015213215007242024654 0ustar jukkaDROPBOX\Domain Users00000000000000[console_scripts] dmypy = mypy.dmypy:main mypy = mypy.__main__:console_entry stubgen = mypy.stubgen:main mypy-0.560/mypy.egg-info/PKG-INFO0000644€tŠÔÚ€2›s®0000000231413215007242022455 0ustar jukkaDROPBOX\Domain Users00000000000000Metadata-Version: 1.1 Name: mypy Version: 0.560 Summary: Optional static typing for Python Home-page: http://www.mypy-lang.org/ Author: Jukka Lehtosalo Author-email: jukka.lehtosalo@iki.fi License: MIT License Description-Content-Type: UNKNOWN Description: Mypy -- Optional Static Typing for Python ========================================= Add type annotations to your Python programs, and use mypy to type check them. Mypy is essentially a Python linter on steroids, and it can catch many programming errors by analyzing your program, without actually having to run it. Mypy has a powerful type system with features such as type inference, gradual typing, generics and union types. Platform: POSIX Classifier: Development Status :: 3 - Alpha Classifier: Environment :: Console Classifier: Intended Audience :: Developers Classifier: License :: OSI Approved :: MIT License Classifier: Operating System :: POSIX Classifier: Programming Language :: Python :: 3 Classifier: Programming Language :: Python :: 3.4 Classifier: Programming Language :: Python :: 3.5 Classifier: Programming Language :: Python :: 3.6 Classifier: Topic :: Software Development mypy-0.560/mypy.egg-info/requires.txt0000644€tŠÔÚ€2›s®0000000012613215007242023757 0ustar jukkaDROPBOX\Domain Users00000000000000typed-ast<1.2.0,>=1.1.0 psutil<5.5.0,>=5.4.0 [:python_version < "3.5"] typing>=3.5.3 mypy-0.560/mypy.egg-info/SOURCES.txt0000644€tŠÔÚ€2›s®0000014036413215007242023254 0ustar jukkaDROPBOX\Domain Users00000000000000LICENSE MANIFEST.in README.md mypy_self_check.ini runtests.py setup.cfg setup.py docs/Makefile docs/README.md docs/make.bat docs/requirements-docs.txt docs/source/additional_features.rst docs/source/basics.rst docs/source/builtin_types.rst docs/source/casts.rst docs/source/cheat_sheet.rst docs/source/cheat_sheet_py3.rst docs/source/class_basics.rst docs/source/command_line.rst docs/source/common_issues.rst docs/source/conf.py docs/source/config_file.rst docs/source/duck_type_compatibility.rst docs/source/dynamic_typing.rst docs/source/faq.rst docs/source/function_overloading.rst docs/source/generics.rst docs/source/getting_started.rst docs/source/index.rst docs/source/introduction.rst docs/source/kinds_of_types.rst docs/source/python2.rst docs/source/python36.rst docs/source/revision_history.rst docs/source/supported_python_features.rst docs/source/type_inference_and_annotations.rst extensions/LICENSE extensions/MANIFEST.in extensions/README.md extensions/mypy_extensions.py extensions/setup.cfg extensions/setup.py mypy/__init__.py mypy/__main__.py mypy/api.py mypy/applytype.py mypy/binder.py mypy/build.py mypy/checker.py mypy/checkexpr.py mypy/checkmember.py mypy/checkstrformat.py mypy/constraints.py mypy/defaults.py mypy/dmypy.py mypy/dmypy_server.py mypy/dmypy_util.py mypy/erasetype.py mypy/errors.py mypy/expandtype.py mypy/experiments.py mypy/exprtotype.py mypy/fastparse.py mypy/fastparse2.py mypy/fixup.py mypy/git.py mypy/indirection.py mypy/infer.py mypy/join.py mypy/literals.py mypy/main.py mypy/maptype.py mypy/meet.py mypy/messages.py mypy/moduleinfo.py mypy/nodes.py mypy/options.py mypy/parse.py mypy/plugin.py mypy/report.py mypy/sametypes.py mypy/semanal.py mypy/semanal_pass1.py mypy/semanal_pass3.py mypy/sharedparse.py mypy/solve.py mypy/stats.py mypy/strconv.py mypy/stubgen.py mypy/stubgenc.py mypy/stubutil.py mypy/subtypes.py mypy/traverser.py mypy/treetransform.py mypy/tvar_scope.py mypy/typeanal.py mypy/types.py mypy/typevars.py mypy/util.py mypy/version.py mypy/visitor.py mypy/waiter.py mypy.egg-info/PKG-INFO mypy.egg-info/SOURCES.txt mypy.egg-info/dependency_links.txt mypy.egg-info/entry_points.txt mypy.egg-info/requires.txt mypy.egg-info/top_level.txt mypy/myunit/__init__.py mypy/myunit/__main__.py mypy/server/__init__.py mypy/server/astdiff.py mypy/server/astmerge.py mypy/server/aststrip.py mypy/server/deps.py mypy/server/subexpr.py mypy/server/target.py mypy/server/trigger.py mypy/server/update.py mypy/test/__init__.py mypy/test/collect.py mypy/test/config.py mypy/test/data.py mypy/test/helpers.py mypy/test/testargs.py mypy/test/testcheck.py mypy/test/testcmdline.py mypy/test/testdeps.py mypy/test/testdiff.py mypy/test/testdmypy.py mypy/test/testextensions.py mypy/test/testfinegrained.py mypy/test/testgraph.py mypy/test/testinfer.py mypy/test/testmerge.py mypy/test/testmoduleinfo.py mypy/test/testparse.py mypy/test/testpythoneval.py mypy/test/testreports.py mypy/test/testsemanal.py mypy/test/testsolve.py mypy/test/teststubgen.py mypy/test/testsubtypes.py mypy/test/testtransform.py mypy/test/testtypegen.py mypy/test/testtypes.py mypy/test/typefixture.py mypy/test/update.py scripts/dmypy scripts/dumpmodule.py scripts/find_type.py scripts/finegrained.py scripts/mypy scripts/mypy.bat scripts/myunit scripts/stubgen scripts/stubtest.py test-data/.flake8 test-data/samples/bottles.py test-data/samples/class.py test-data/samples/cmdline.py test-data/samples/crawl.py test-data/samples/crawl2.py test-data/samples/dict.py test-data/samples/fib.py test-data/samples/files.py test-data/samples/for.py test-data/samples/generators.py test-data/samples/greet.py test-data/samples/guess.py test-data/samples/hello.py test-data/samples/input.py test-data/samples/itertool.py test-data/samples/readme.txt test-data/samples/regexp.py test-data/stdlib-samples/3.2/base64.py test-data/stdlib-samples/3.2/fnmatch.py test-data/stdlib-samples/3.2/genericpath.py test-data/stdlib-samples/3.2/getopt.py test-data/stdlib-samples/3.2/glob.py test-data/stdlib-samples/3.2/posixpath.py test-data/stdlib-samples/3.2/pprint.py test-data/stdlib-samples/3.2/random.py test-data/stdlib-samples/3.2/shutil.py test-data/stdlib-samples/3.2/subprocess.py test-data/stdlib-samples/3.2/tempfile.py test-data/stdlib-samples/3.2/textwrap.py test-data/stdlib-samples/3.2/incomplete/logging/__init__.py test-data/stdlib-samples/3.2/incomplete/urllib/__init__.py test-data/stdlib-samples/3.2/incomplete/urllib/parse.py test-data/stdlib-samples/3.2/test/__init__.py test-data/stdlib-samples/3.2/test/randv2_32.pck test-data/stdlib-samples/3.2/test/randv2_64.pck test-data/stdlib-samples/3.2/test/randv3.pck test-data/stdlib-samples/3.2/test/support.py test-data/stdlib-samples/3.2/test/test_base64.py test-data/stdlib-samples/3.2/test/test_fnmatch.py test-data/stdlib-samples/3.2/test/test_genericpath.py test-data/stdlib-samples/3.2/test/test_getopt.py test-data/stdlib-samples/3.2/test/test_glob.py test-data/stdlib-samples/3.2/test/test_posixpath.py test-data/stdlib-samples/3.2/test/test_pprint.py test-data/stdlib-samples/3.2/test/test_random.py test-data/stdlib-samples/3.2/test/test_set.py test-data/stdlib-samples/3.2/test/test_shutil.py test-data/stdlib-samples/3.2/test/test_subprocess.py test-data/stdlib-samples/3.2/test/test_tempfile.py test-data/stdlib-samples/3.2/test/test_textwrap.py test-data/stdlib-samples/3.2/test/tf_inherit_check.py test-data/stdlib-samples/3.2/test/subprocessdata/fd_status.py test-data/stdlib-samples/3.2/test/subprocessdata/input_reader.py test-data/stdlib-samples/3.2/test/subprocessdata/qcat.py test-data/stdlib-samples/3.2/test/subprocessdata/qgrep.py test-data/stdlib-samples/3.2/test/subprocessdata/sigchild_ignore.py test-data/unit/README.md test-data/unit/check-abstract.test test-data/unit/check-async-await.test test-data/unit/check-basic.test test-data/unit/check-bound.test test-data/unit/check-callable.test test-data/unit/check-class-namedtuple.test test-data/unit/check-classes.test test-data/unit/check-classvar.test test-data/unit/check-columns.test test-data/unit/check-custom-plugin.test test-data/unit/check-default-plugin.test test-data/unit/check-dynamic-typing.test test-data/unit/check-enum.test test-data/unit/check-expressions.test test-data/unit/check-fastparse.test test-data/unit/check-flags.test test-data/unit/check-functions.test test-data/unit/check-generic-subtyping.test test-data/unit/check-generics.test test-data/unit/check-ignore.test test-data/unit/check-incomplete-fixture.test test-data/unit/check-incremental.test test-data/unit/check-inference-context.test test-data/unit/check-inference.test test-data/unit/check-isinstance.test test-data/unit/check-kwargs.test test-data/unit/check-lists.test test-data/unit/check-modules.test test-data/unit/check-multiple-inheritance.test test-data/unit/check-namedtuple.test test-data/unit/check-newsyntax.test test-data/unit/check-newtype.test test-data/unit/check-optional.test test-data/unit/check-overloading.test test-data/unit/check-protocols.test test-data/unit/check-python2.test test-data/unit/check-selftype.test test-data/unit/check-semanal-error.test test-data/unit/check-serialize.test test-data/unit/check-statements.test test-data/unit/check-super.test test-data/unit/check-tuples.test test-data/unit/check-type-aliases.test test-data/unit/check-type-checks.test test-data/unit/check-type-promotion.test test-data/unit/check-typeddict.test test-data/unit/check-typevar-values.test test-data/unit/check-underscores.test test-data/unit/check-unions.test test-data/unit/check-unreachable-code.test test-data/unit/check-unsupported.test test-data/unit/check-varargs.test test-data/unit/check-warnings.test test-data/unit/cmdline.test test-data/unit/deps-classes.test test-data/unit/deps-expressions.test test-data/unit/deps-generics.test test-data/unit/deps-statements.test test-data/unit/deps-types.test test-data/unit/deps.test test-data/unit/diff.test test-data/unit/fine-grained-blockers.test test-data/unit/fine-grained-cycles.test test-data/unit/fine-grained-modules.test test-data/unit/fine-grained.test test-data/unit/merge.test test-data/unit/parse-errors.test test-data/unit/parse-python2.test test-data/unit/parse.test test-data/unit/python2eval.test test-data/unit/pythoneval-asyncio.test test-data/unit/pythoneval.test test-data/unit/reports.test test-data/unit/semanal-abstractclasses.test test-data/unit/semanal-basic.test test-data/unit/semanal-classes.test test-data/unit/semanal-classvar.test test-data/unit/semanal-errors.test test-data/unit/semanal-expressions.test test-data/unit/semanal-modules.test test-data/unit/semanal-namedtuple.test test-data/unit/semanal-python2.test test-data/unit/semanal-statements.test test-data/unit/semanal-symtable.test test-data/unit/semanal-typealiases.test test-data/unit/semanal-typeddict.test test-data/unit/semanal-typeinfo.test test-data/unit/semanal-types.test test-data/unit/stubgen.test test-data/unit/typexport-basic.test test-data/unit/fixtures/__new__.pyi test-data/unit/fixtures/alias.pyi test-data/unit/fixtures/args.pyi test-data/unit/fixtures/async_await.pyi test-data/unit/fixtures/bool.pyi test-data/unit/fixtures/callable.pyi test-data/unit/fixtures/classmethod.pyi test-data/unit/fixtures/complex.pyi test-data/unit/fixtures/dict.pyi test-data/unit/fixtures/exception.pyi test-data/unit/fixtures/f_string.pyi test-data/unit/fixtures/fine_grained.pyi test-data/unit/fixtures/float.pyi test-data/unit/fixtures/floatdict.pyi test-data/unit/fixtures/for.pyi test-data/unit/fixtures/function.pyi test-data/unit/fixtures/isinstance.pyi test-data/unit/fixtures/isinstancelist.pyi test-data/unit/fixtures/list.pyi test-data/unit/fixtures/module.pyi test-data/unit/fixtures/module_all.pyi test-data/unit/fixtures/module_all_python2.pyi test-data/unit/fixtures/ops.pyi test-data/unit/fixtures/primitives.pyi test-data/unit/fixtures/property.pyi test-data/unit/fixtures/python2.pyi test-data/unit/fixtures/set.pyi test-data/unit/fixtures/slice.pyi test-data/unit/fixtures/staticmethod.pyi test-data/unit/fixtures/transform.pyi test-data/unit/fixtures/tuple-simple.pyi test-data/unit/fixtures/tuple.pyi test-data/unit/fixtures/type.pyi test-data/unit/fixtures/typing-full.pyi test-data/unit/fixtures/union.pyi test-data/unit/lib-stub/__builtin__.pyi test-data/unit/lib-stub/abc.pyi test-data/unit/lib-stub/blocker.pyi test-data/unit/lib-stub/blocker2.pyi test-data/unit/lib-stub/broken.pyi test-data/unit/lib-stub/builtins.pyi test-data/unit/lib-stub/collections.pyi test-data/unit/lib-stub/contextlib.pyi test-data/unit/lib-stub/enum.pyi test-data/unit/lib-stub/mypy_extensions.pyi test-data/unit/lib-stub/six.pyi test-data/unit/lib-stub/sys.pyi test-data/unit/lib-stub/types.pyi test-data/unit/lib-stub/typing.pyi test-data/unit/lib-stub/typing_extensions.pyi test-data/unit/plugins/attrhook.py test-data/unit/plugins/badreturn.py test-data/unit/plugins/badreturn2.py test-data/unit/plugins/fnplugin.py test-data/unit/plugins/named_callable.py test-data/unit/plugins/noentry.py test-data/unit/plugins/plugin2.py test-data/unit/plugins/type_anal_hook.py typeshed/stdlib/2/BaseHTTPServer.pyi typeshed/stdlib/2/ConfigParser.pyi typeshed/stdlib/2/Cookie.pyi typeshed/stdlib/2/HTMLParser.pyi typeshed/stdlib/2/Queue.pyi typeshed/stdlib/2/SimpleHTTPServer.pyi typeshed/stdlib/2/SocketServer.pyi typeshed/stdlib/2/StringIO.pyi typeshed/stdlib/2/UserDict.pyi typeshed/stdlib/2/UserList.pyi typeshed/stdlib/2/UserString.pyi typeshed/stdlib/2/__builtin__.pyi typeshed/stdlib/2/_ast.pyi typeshed/stdlib/2/_collections.pyi typeshed/stdlib/2/_functools.pyi typeshed/stdlib/2/_hotshot.pyi typeshed/stdlib/2/_io.pyi typeshed/stdlib/2/_json.pyi typeshed/stdlib/2/_md5.pyi typeshed/stdlib/2/_sha.pyi typeshed/stdlib/2/_sha256.pyi typeshed/stdlib/2/_sha512.pyi typeshed/stdlib/2/_socket.pyi typeshed/stdlib/2/_sre.pyi typeshed/stdlib/2/_struct.pyi typeshed/stdlib/2/_symtable.pyi typeshed/stdlib/2/_threading_local.pyi typeshed/stdlib/2/_warnings.pyi typeshed/stdlib/2/abc.pyi typeshed/stdlib/2/ast.pyi typeshed/stdlib/2/atexit.pyi typeshed/stdlib/2/builtins.pyi typeshed/stdlib/2/cPickle.pyi typeshed/stdlib/2/cStringIO.pyi typeshed/stdlib/2/collections.pyi typeshed/stdlib/2/commands.pyi typeshed/stdlib/2/compileall.pyi typeshed/stdlib/2/cookielib.pyi typeshed/stdlib/2/datetime.pyi typeshed/stdlib/2/decimal.pyi typeshed/stdlib/2/dummy_thread.pyi typeshed/stdlib/2/exceptions.pyi typeshed/stdlib/2/fcntl.pyi typeshed/stdlib/2/fnmatch.pyi typeshed/stdlib/2/functools.pyi typeshed/stdlib/2/future_builtins.pyi typeshed/stdlib/2/gc.pyi typeshed/stdlib/2/genericpath.pyi typeshed/stdlib/2/getopt.pyi typeshed/stdlib/2/getpass.pyi typeshed/stdlib/2/gettext.pyi typeshed/stdlib/2/glob.pyi typeshed/stdlib/2/gzip.pyi typeshed/stdlib/2/hashlib.pyi typeshed/stdlib/2/heapq.pyi typeshed/stdlib/2/htmlentitydefs.pyi typeshed/stdlib/2/httplib.pyi typeshed/stdlib/2/imp.pyi typeshed/stdlib/2/importlib.pyi typeshed/stdlib/2/inspect.pyi typeshed/stdlib/2/io.pyi typeshed/stdlib/2/itertools.pyi typeshed/stdlib/2/json.pyi typeshed/stdlib/2/macpath.pyi typeshed/stdlib/2/markupbase.pyi typeshed/stdlib/2/md5.pyi typeshed/stdlib/2/mimetools.pyi typeshed/stdlib/2/mutex.pyi typeshed/stdlib/2/ntpath.pyi typeshed/stdlib/2/nturl2path.pyi typeshed/stdlib/2/os2emxpath.pyi typeshed/stdlib/2/pipes.pyi typeshed/stdlib/2/platform.pyi typeshed/stdlib/2/popen2.pyi typeshed/stdlib/2/posix.pyi typeshed/stdlib/2/posixpath.pyi typeshed/stdlib/2/pydoc.pyi typeshed/stdlib/2/random.pyi typeshed/stdlib/2/re.pyi typeshed/stdlib/2/repr.pyi typeshed/stdlib/2/resource.pyi typeshed/stdlib/2/rfc822.pyi typeshed/stdlib/2/robotparser.pyi typeshed/stdlib/2/runpy.pyi typeshed/stdlib/2/sets.pyi typeshed/stdlib/2/sha.pyi typeshed/stdlib/2/shelve.pyi typeshed/stdlib/2/shlex.pyi typeshed/stdlib/2/shutil.pyi typeshed/stdlib/2/signal.pyi typeshed/stdlib/2/smtplib.pyi typeshed/stdlib/2/spwd.pyi typeshed/stdlib/2/sre_constants.pyi typeshed/stdlib/2/sre_parse.pyi typeshed/stdlib/2/ssl.pyi typeshed/stdlib/2/stat.pyi typeshed/stdlib/2/string.pyi typeshed/stdlib/2/stringold.pyi typeshed/stdlib/2/strop.pyi typeshed/stdlib/2/subprocess.pyi typeshed/stdlib/2/symbol.pyi typeshed/stdlib/2/sys.pyi typeshed/stdlib/2/tempfile.pyi typeshed/stdlib/2/textwrap.pyi typeshed/stdlib/2/thread.pyi typeshed/stdlib/2/time.pyi typeshed/stdlib/2/toaiff.pyi typeshed/stdlib/2/tokenize.pyi typeshed/stdlib/2/types.pyi typeshed/stdlib/2/typing.pyi typeshed/stdlib/2/unittest.pyi typeshed/stdlib/2/urllib.pyi typeshed/stdlib/2/urllib2.pyi typeshed/stdlib/2/urlparse.pyi typeshed/stdlib/2/user.pyi typeshed/stdlib/2/whichdb.pyi typeshed/stdlib/2/xmlrpclib.pyi typeshed/stdlib/2/distutils/__init__.pyi typeshed/stdlib/2/distutils/emxccompiler.pyi typeshed/stdlib/2/email/MIMEText.pyi typeshed/stdlib/2/email/__init__.pyi typeshed/stdlib/2/email/_parseaddr.pyi typeshed/stdlib/2/email/utils.pyi typeshed/stdlib/2/email/mime/__init__.pyi typeshed/stdlib/2/email/mime/application.pyi typeshed/stdlib/2/email/mime/base.pyi typeshed/stdlib/2/email/mime/multipart.pyi typeshed/stdlib/2/email/mime/nonmultipart.pyi typeshed/stdlib/2/email/mime/text.pyi typeshed/stdlib/2/encodings/__init__.pyi typeshed/stdlib/2/encodings/utf_8.pyi typeshed/stdlib/2/multiprocessing/__init__.pyi typeshed/stdlib/2/multiprocessing/process.pyi typeshed/stdlib/2/multiprocessing/util.pyi typeshed/stdlib/2/os/__init__.pyi typeshed/stdlib/2/os/path.pyi typeshed/stdlib/2/sqlite3/__init__.pyi typeshed/stdlib/2/sqlite3/dbapi2.pyi typeshed/stdlib/2/wsgiref/__init__.pyi typeshed/stdlib/2/wsgiref/types.pyi typeshed/stdlib/2/wsgiref/validate.pyi typeshed/stdlib/2and3/__future__.pyi typeshed/stdlib/2and3/_bisect.pyi typeshed/stdlib/2and3/_codecs.pyi typeshed/stdlib/2and3/_csv.pyi typeshed/stdlib/2and3/_heapq.pyi typeshed/stdlib/2and3/_random.pyi typeshed/stdlib/2and3/_weakref.pyi typeshed/stdlib/2and3/_weakrefset.pyi typeshed/stdlib/2and3/argparse.pyi typeshed/stdlib/2and3/array.pyi typeshed/stdlib/2and3/asynchat.pyi typeshed/stdlib/2and3/asyncore.pyi typeshed/stdlib/2and3/base64.pyi typeshed/stdlib/2and3/binascii.pyi typeshed/stdlib/2and3/binhex.pyi typeshed/stdlib/2and3/bisect.pyi typeshed/stdlib/2and3/bz2.pyi typeshed/stdlib/2and3/cProfile.pyi typeshed/stdlib/2and3/calendar.pyi typeshed/stdlib/2and3/cgi.pyi typeshed/stdlib/2and3/chunk.pyi typeshed/stdlib/2and3/cmath.pyi typeshed/stdlib/2and3/cmd.pyi typeshed/stdlib/2and3/code.pyi typeshed/stdlib/2and3/codecs.pyi typeshed/stdlib/2and3/codeop.pyi typeshed/stdlib/2and3/colorsys.pyi typeshed/stdlib/2and3/contextlib.pyi typeshed/stdlib/2and3/copy.pyi typeshed/stdlib/2and3/crypt.pyi typeshed/stdlib/2and3/csv.pyi typeshed/stdlib/2and3/difflib.pyi typeshed/stdlib/2and3/dis.pyi typeshed/stdlib/2and3/doctest.pyi typeshed/stdlib/2and3/errno.pyi typeshed/stdlib/2and3/filecmp.pyi typeshed/stdlib/2and3/fileinput.pyi typeshed/stdlib/2and3/formatter.pyi typeshed/stdlib/2and3/fractions.pyi typeshed/stdlib/2and3/ftplib.pyi typeshed/stdlib/2and3/grp.pyi typeshed/stdlib/2and3/hmac.pyi typeshed/stdlib/2and3/imaplib.pyi typeshed/stdlib/2and3/keyword.pyi typeshed/stdlib/2and3/linecache.pyi typeshed/stdlib/2and3/locale.pyi typeshed/stdlib/2and3/marshal.pyi typeshed/stdlib/2and3/math.pyi typeshed/stdlib/2and3/mimetypes.pyi typeshed/stdlib/2and3/mmap.pyi typeshed/stdlib/2and3/numbers.pyi typeshed/stdlib/2and3/opcode.pyi typeshed/stdlib/2and3/operator.pyi typeshed/stdlib/2and3/optparse.pyi typeshed/stdlib/2and3/pdb.pyi typeshed/stdlib/2and3/pickle.pyi typeshed/stdlib/2and3/pickletools.pyi typeshed/stdlib/2and3/pkgutil.pyi typeshed/stdlib/2and3/plistlib.pyi typeshed/stdlib/2and3/poplib.pyi typeshed/stdlib/2and3/pprint.pyi typeshed/stdlib/2and3/profile.pyi typeshed/stdlib/2and3/pstats.pyi typeshed/stdlib/2and3/pty.pyi typeshed/stdlib/2and3/pwd.pyi typeshed/stdlib/2and3/py_compile.pyi typeshed/stdlib/2and3/pyclbr.pyi typeshed/stdlib/2and3/quopri.pyi typeshed/stdlib/2and3/readline.pyi typeshed/stdlib/2and3/rlcompleter.pyi typeshed/stdlib/2and3/sched.pyi typeshed/stdlib/2and3/select.pyi typeshed/stdlib/2and3/site.pyi typeshed/stdlib/2and3/smtpd.pyi typeshed/stdlib/2and3/sndhdr.pyi typeshed/stdlib/2and3/socket.pyi typeshed/stdlib/2and3/stringprep.pyi typeshed/stdlib/2and3/struct.pyi typeshed/stdlib/2and3/sunau.pyi typeshed/stdlib/2and3/symtable.pyi typeshed/stdlib/2and3/sysconfig.pyi typeshed/stdlib/2and3/syslog.pyi typeshed/stdlib/2and3/tabnanny.pyi typeshed/stdlib/2and3/tarfile.pyi typeshed/stdlib/2and3/telnetlib.pyi typeshed/stdlib/2and3/termios.pyi typeshed/stdlib/2and3/threading.pyi typeshed/stdlib/2and3/timeit.pyi typeshed/stdlib/2and3/token.pyi typeshed/stdlib/2and3/trace.pyi typeshed/stdlib/2and3/traceback.pyi typeshed/stdlib/2and3/tty.pyi typeshed/stdlib/2and3/unicodedata.pyi typeshed/stdlib/2and3/uu.pyi typeshed/stdlib/2and3/uuid.pyi typeshed/stdlib/2and3/warnings.pyi typeshed/stdlib/2and3/wave.pyi typeshed/stdlib/2and3/weakref.pyi typeshed/stdlib/2and3/webbrowser.pyi typeshed/stdlib/2and3/xdrlib.pyi typeshed/stdlib/2and3/zipfile.pyi typeshed/stdlib/2and3/zipimport.pyi typeshed/stdlib/2and3/zlib.pyi typeshed/stdlib/2and3/distutils/__init__.pyi typeshed/stdlib/2and3/distutils/archive_util.pyi typeshed/stdlib/2and3/distutils/bcppcompiler.pyi typeshed/stdlib/2and3/distutils/ccompiler.pyi typeshed/stdlib/2and3/distutils/cmd.pyi typeshed/stdlib/2and3/distutils/core.pyi typeshed/stdlib/2and3/distutils/cygwinccompiler.pyi typeshed/stdlib/2and3/distutils/debug.pyi typeshed/stdlib/2and3/distutils/dep_util.pyi typeshed/stdlib/2and3/distutils/dir_util.pyi typeshed/stdlib/2and3/distutils/dist.pyi typeshed/stdlib/2and3/distutils/errors.pyi typeshed/stdlib/2and3/distutils/extension.pyi typeshed/stdlib/2and3/distutils/fancy_getopt.pyi typeshed/stdlib/2and3/distutils/file_util.pyi typeshed/stdlib/2and3/distutils/filelist.pyi typeshed/stdlib/2and3/distutils/log.pyi typeshed/stdlib/2and3/distutils/msvccompiler.pyi typeshed/stdlib/2and3/distutils/spawn.pyi typeshed/stdlib/2and3/distutils/sysconfig.pyi typeshed/stdlib/2and3/distutils/text_file.pyi typeshed/stdlib/2and3/distutils/unixccompiler.pyi typeshed/stdlib/2and3/distutils/util.pyi typeshed/stdlib/2and3/distutils/version.pyi typeshed/stdlib/2and3/distutils/command/__init__.pyi typeshed/stdlib/2and3/distutils/command/bdist.pyi typeshed/stdlib/2and3/distutils/command/bdist_dumb.pyi typeshed/stdlib/2and3/distutils/command/bdist_msi.pyi typeshed/stdlib/2and3/distutils/command/bdist_packager.pyi typeshed/stdlib/2and3/distutils/command/bdist_rpm.pyi typeshed/stdlib/2and3/distutils/command/bdist_wininst.pyi typeshed/stdlib/2and3/distutils/command/build.pyi typeshed/stdlib/2and3/distutils/command/build_clib.pyi typeshed/stdlib/2and3/distutils/command/build_ext.pyi typeshed/stdlib/2and3/distutils/command/build_py.pyi typeshed/stdlib/2and3/distutils/command/build_scripts.pyi typeshed/stdlib/2and3/distutils/command/check.pyi typeshed/stdlib/2and3/distutils/command/clean.pyi typeshed/stdlib/2and3/distutils/command/config.pyi typeshed/stdlib/2and3/distutils/command/install.pyi typeshed/stdlib/2and3/distutils/command/install_data.pyi typeshed/stdlib/2and3/distutils/command/install_headers.pyi typeshed/stdlib/2and3/distutils/command/install_lib.pyi typeshed/stdlib/2and3/distutils/command/install_scripts.pyi typeshed/stdlib/2and3/distutils/command/register.pyi typeshed/stdlib/2and3/distutils/command/sdist.pyi typeshed/stdlib/2and3/lib2to3/__init__.pyi typeshed/stdlib/2and3/lib2to3/pygram.pyi typeshed/stdlib/2and3/lib2to3/pytree.pyi typeshed/stdlib/2and3/lib2to3/pgen2/__init__.pyi typeshed/stdlib/2and3/lib2to3/pgen2/driver.pyi typeshed/stdlib/2and3/lib2to3/pgen2/grammar.pyi typeshed/stdlib/2and3/lib2to3/pgen2/literals.pyi typeshed/stdlib/2and3/lib2to3/pgen2/parse.pyi typeshed/stdlib/2and3/lib2to3/pgen2/pgen.pyi typeshed/stdlib/2and3/lib2to3/pgen2/token.pyi typeshed/stdlib/2and3/lib2to3/pgen2/tokenize.pyi typeshed/stdlib/2and3/logging/__init__.pyi typeshed/stdlib/2and3/logging/config.pyi typeshed/stdlib/2and3/logging/handlers.pyi typeshed/stdlib/2and3/xml/__init__.pyi typeshed/stdlib/2and3/xml/etree/ElementInclude.pyi typeshed/stdlib/2and3/xml/etree/ElementPath.pyi typeshed/stdlib/2and3/xml/etree/ElementTree.pyi typeshed/stdlib/2and3/xml/etree/__init__.pyi typeshed/stdlib/2and3/xml/etree/cElementTree.pyi typeshed/stdlib/2and3/xml/sax/__init__.pyi typeshed/stdlib/2and3/xml/sax/handler.pyi typeshed/stdlib/2and3/xml/sax/saxutils.pyi typeshed/stdlib/2and3/xml/sax/xmlreader.pyi typeshed/stdlib/3/_ast.pyi typeshed/stdlib/3/_compression.pyi typeshed/stdlib/3/_curses.pyi typeshed/stdlib/3/_dummy_thread.pyi typeshed/stdlib/3/_imp.pyi typeshed/stdlib/3/_importlib_modulespec.pyi typeshed/stdlib/3/_json.pyi typeshed/stdlib/3/_markupbase.pyi typeshed/stdlib/3/_operator.pyi typeshed/stdlib/3/_posixsubprocess.pyi typeshed/stdlib/3/_subprocess.pyi typeshed/stdlib/3/_thread.pyi typeshed/stdlib/3/_threading_local.pyi typeshed/stdlib/3/_warnings.pyi typeshed/stdlib/3/abc.pyi typeshed/stdlib/3/ast.pyi typeshed/stdlib/3/atexit.pyi typeshed/stdlib/3/builtins.pyi typeshed/stdlib/3/compileall.pyi typeshed/stdlib/3/configparser.pyi typeshed/stdlib/3/datetime.pyi typeshed/stdlib/3/decimal.pyi typeshed/stdlib/3/fcntl.pyi typeshed/stdlib/3/fnmatch.pyi typeshed/stdlib/3/functools.pyi typeshed/stdlib/3/gc.pyi typeshed/stdlib/3/getopt.pyi typeshed/stdlib/3/getpass.pyi typeshed/stdlib/3/gettext.pyi typeshed/stdlib/3/glob.pyi typeshed/stdlib/3/gzip.pyi typeshed/stdlib/3/hashlib.pyi typeshed/stdlib/3/heapq.pyi typeshed/stdlib/3/imp.pyi typeshed/stdlib/3/inspect.pyi typeshed/stdlib/3/io.pyi typeshed/stdlib/3/itertools.pyi typeshed/stdlib/3/macpath.pyi typeshed/stdlib/3/msvcrt.pyi typeshed/stdlib/3/nntplib.pyi typeshed/stdlib/3/ntpath.pyi typeshed/stdlib/3/nturl2path.pyi typeshed/stdlib/3/pipes.pyi typeshed/stdlib/3/platform.pyi typeshed/stdlib/3/posix.pyi typeshed/stdlib/3/posixpath.pyi typeshed/stdlib/3/queue.pyi typeshed/stdlib/3/random.pyi typeshed/stdlib/3/re.pyi typeshed/stdlib/3/reprlib.pyi typeshed/stdlib/3/resource.pyi typeshed/stdlib/3/runpy.pyi typeshed/stdlib/3/shelve.pyi typeshed/stdlib/3/shlex.pyi typeshed/stdlib/3/shutil.pyi typeshed/stdlib/3/signal.pyi typeshed/stdlib/3/smtplib.pyi typeshed/stdlib/3/socketserver.pyi typeshed/stdlib/3/spwd.pyi typeshed/stdlib/3/sre_constants.pyi typeshed/stdlib/3/sre_parse.pyi typeshed/stdlib/3/ssl.pyi typeshed/stdlib/3/stat.pyi typeshed/stdlib/3/string.pyi typeshed/stdlib/3/subprocess.pyi typeshed/stdlib/3/symbol.pyi typeshed/stdlib/3/sys.pyi typeshed/stdlib/3/tempfile.pyi typeshed/stdlib/3/textwrap.pyi typeshed/stdlib/3/time.pyi typeshed/stdlib/3/tokenize.pyi typeshed/stdlib/3/types.pyi typeshed/stdlib/3/typing.pyi typeshed/stdlib/3.3/ipaddress.pyi typeshed/stdlib/3.4/_stat.pyi typeshed/stdlib/3.4/_tracemalloc.pyi typeshed/stdlib/3.4/enum.pyi typeshed/stdlib/3.4/pathlib.pyi typeshed/stdlib/3.4/selectors.pyi typeshed/stdlib/3.4/statistics.pyi typeshed/stdlib/3.4/tracemalloc.pyi typeshed/stdlib/3.4/asyncio/__init__.pyi typeshed/stdlib/3.4/asyncio/coroutines.pyi typeshed/stdlib/3.4/asyncio/events.pyi typeshed/stdlib/3.4/asyncio/futures.pyi typeshed/stdlib/3.4/asyncio/locks.pyi typeshed/stdlib/3.4/asyncio/protocols.pyi typeshed/stdlib/3.4/asyncio/queues.pyi typeshed/stdlib/3.4/asyncio/streams.pyi typeshed/stdlib/3.4/asyncio/subprocess.pyi typeshed/stdlib/3.4/asyncio/tasks.pyi typeshed/stdlib/3.4/asyncio/transports.pyi typeshed/stdlib/3.5/zipapp.pyi typeshed/stdlib/3.6/secrets.pyi typeshed/stdlib/3/collections/__init__.pyi typeshed/stdlib/3/collections/abc.pyi typeshed/stdlib/3/concurrent/__init__.pyi typeshed/stdlib/3/concurrent/futures/__init__.pyi typeshed/stdlib/3/concurrent/futures/_base.pyi typeshed/stdlib/3/concurrent/futures/process.pyi typeshed/stdlib/3/concurrent/futures/thread.pyi typeshed/stdlib/3/curses/__init__.pyi typeshed/stdlib/3/email/__init__.pyi typeshed/stdlib/3/email/charset.pyi typeshed/stdlib/3/email/contentmanager.pyi typeshed/stdlib/3/email/encoders.pyi typeshed/stdlib/3/email/errors.pyi typeshed/stdlib/3/email/feedparser.pyi typeshed/stdlib/3/email/generator.pyi typeshed/stdlib/3/email/header.pyi typeshed/stdlib/3/email/headerregistry.pyi typeshed/stdlib/3/email/iterators.pyi typeshed/stdlib/3/email/message.pyi typeshed/stdlib/3/email/parser.pyi typeshed/stdlib/3/email/policy.pyi typeshed/stdlib/3/email/utils.pyi typeshed/stdlib/3/email/mime/__init__.pyi typeshed/stdlib/3/email/mime/application.pyi typeshed/stdlib/3/email/mime/audio.pyi typeshed/stdlib/3/email/mime/base.pyi typeshed/stdlib/3/email/mime/image.pyi typeshed/stdlib/3/email/mime/message.pyi typeshed/stdlib/3/email/mime/multipart.pyi typeshed/stdlib/3/email/mime/nonmultipart.pyi typeshed/stdlib/3/email/mime/text.pyi typeshed/stdlib/3/encodings/__init__.pyi typeshed/stdlib/3/encodings/utf_8.pyi typeshed/stdlib/3/html/__init__.pyi typeshed/stdlib/3/html/entities.pyi typeshed/stdlib/3/html/parser.pyi typeshed/stdlib/3/http/__init__.pyi typeshed/stdlib/3/http/client.pyi typeshed/stdlib/3/http/cookiejar.pyi typeshed/stdlib/3/http/cookies.pyi typeshed/stdlib/3/http/server.pyi typeshed/stdlib/3/importlib/__init__.pyi typeshed/stdlib/3/importlib/abc.pyi typeshed/stdlib/3/importlib/machinery.pyi typeshed/stdlib/3/importlib/util.pyi typeshed/stdlib/3/json/__init__.pyi typeshed/stdlib/3/json/decoder.pyi typeshed/stdlib/3/json/encoder.pyi typeshed/stdlib/3/multiprocessing/__init__.pyi typeshed/stdlib/3/multiprocessing/connection.pyi typeshed/stdlib/3/multiprocessing/context.pyi typeshed/stdlib/3/multiprocessing/managers.pyi typeshed/stdlib/3/multiprocessing/pool.pyi typeshed/stdlib/3/multiprocessing/process.pyi typeshed/stdlib/3/multiprocessing/synchronize.pyi typeshed/stdlib/3/os/__init__.pyi typeshed/stdlib/3/os/path.pyi typeshed/stdlib/3/sqlite3/__init__.pyi typeshed/stdlib/3/sqlite3/dbapi2.pyi typeshed/stdlib/3/tkinter/__init__.pyi typeshed/stdlib/3/tkinter/constants.pyi typeshed/stdlib/3/tkinter/ttk.pyi typeshed/stdlib/3/unittest/__init__.pyi typeshed/stdlib/3/unittest/mock.pyi typeshed/stdlib/3/urllib/__init__.pyi typeshed/stdlib/3/urllib/error.pyi typeshed/stdlib/3/urllib/parse.pyi typeshed/stdlib/3/urllib/request.pyi typeshed/stdlib/3/urllib/response.pyi typeshed/stdlib/3/urllib/robotparser.pyi typeshed/stdlib/3/wsgiref/__init__.pyi typeshed/stdlib/3/wsgiref/types.pyi typeshed/stdlib/3/wsgiref/validate.pyi typeshed/tests/mypy_selftest.py typeshed/tests/mypy_test.py typeshed/tests/pytype_test.py typeshed/third_party/2/enum.pyi typeshed/third_party/2/gflags.pyi typeshed/third_party/2/itsdangerous.pyi typeshed/third_party/2/pycurl.pyi typeshed/third_party/2/pymssql.pyi typeshed/third_party/2/OpenSSL/__init__.pyi typeshed/third_party/2/OpenSSL/crypto.pyi typeshed/third_party/2/concurrent/__init__.pyi typeshed/third_party/2/concurrent/futures/__init__.pyi typeshed/third_party/2/cryptography/__init__.pyi typeshed/third_party/2/cryptography/hazmat/__init__.pyi typeshed/third_party/2/cryptography/hazmat/primitives/__init__.pyi typeshed/third_party/2/cryptography/hazmat/primitives/serialization.pyi typeshed/third_party/2/cryptography/hazmat/primitives/asymmetric/__init__.pyi typeshed/third_party/2/cryptography/hazmat/primitives/asymmetric/dsa.pyi typeshed/third_party/2/cryptography/hazmat/primitives/asymmetric/rsa.pyi typeshed/third_party/2/dateutil/__init__.pyi typeshed/third_party/2/dateutil/parser.pyi typeshed/third_party/2/dateutil/relativedelta.pyi typeshed/third_party/2/dateutil/tz/__init__.pyi typeshed/third_party/2/dateutil/tz/_common.pyi typeshed/third_party/2/dateutil/tz/tz.pyi typeshed/third_party/2/fb303/FacebookService.pyi typeshed/third_party/2/fb303/__init__.pyi typeshed/third_party/2/google/__init__.pyi typeshed/third_party/2/google/protobuf/__init__.pyi typeshed/third_party/2/google/protobuf/descriptor.pyi typeshed/third_party/2/google/protobuf/descriptor_pb2.pyi typeshed/third_party/2/google/protobuf/descriptor_pool.pyi typeshed/third_party/2/google/protobuf/message.pyi typeshed/third_party/2/google/protobuf/message_factory.pyi typeshed/third_party/2/google/protobuf/reflection.pyi typeshed/third_party/2/google/protobuf/symbol_database.pyi typeshed/third_party/2/google/protobuf/internal/__init__.pyi typeshed/third_party/2/google/protobuf/internal/decoder.pyi typeshed/third_party/2/google/protobuf/internal/encoder.pyi typeshed/third_party/2/google/protobuf/internal/enum_type_wrapper.pyi typeshed/third_party/2/google/protobuf/internal/wire_format.pyi typeshed/third_party/2/kazoo/__init__.pyi typeshed/third_party/2/kazoo/client.pyi typeshed/third_party/2/kazoo/exceptions.pyi typeshed/third_party/2/kazoo/recipe/__init__.pyi typeshed/third_party/2/kazoo/recipe/watchers.pyi typeshed/third_party/2/redis/__init__.pyi typeshed/third_party/2/redis/client.pyi typeshed/third_party/2/redis/connection.pyi typeshed/third_party/2/redis/exceptions.pyi typeshed/third_party/2/redis/utils.pyi typeshed/third_party/2/routes/__init__.pyi typeshed/third_party/2/routes/mapper.pyi typeshed/third_party/2/routes/util.pyi typeshed/third_party/2/scribe/__init__.pyi typeshed/third_party/2/scribe/scribe.pyi typeshed/third_party/2/scribe/ttypes.pyi typeshed/third_party/2/selenium/webdriver/remote/webdriver.pyi typeshed/third_party/2/selenium/webdriver/remote/webelement.pyi typeshed/third_party/2/simplejson/__init__.pyi typeshed/third_party/2/simplejson/decoder.pyi typeshed/third_party/2/simplejson/encoder.pyi typeshed/third_party/2/simplejson/scanner.pyi typeshed/third_party/2/six/__init__.pyi typeshed/third_party/2/six/moves/__init__.pyi typeshed/third_party/2/six/moves/urllib/__init__.pyi typeshed/third_party/2/six/moves/urllib/error.pyi typeshed/third_party/2/six/moves/urllib/parse.pyi typeshed/third_party/2/six/moves/urllib/request.pyi typeshed/third_party/2/six/moves/urllib/response.pyi typeshed/third_party/2/six/moves/urllib/robotparser.pyi typeshed/third_party/2/tornado/__init__.pyi typeshed/third_party/2/tornado/concurrent.pyi typeshed/third_party/2/tornado/gen.pyi typeshed/third_party/2/tornado/httpclient.pyi typeshed/third_party/2/tornado/httpserver.pyi typeshed/third_party/2/tornado/httputil.pyi typeshed/third_party/2/tornado/ioloop.pyi typeshed/third_party/2/tornado/locks.pyi typeshed/third_party/2/tornado/netutil.pyi typeshed/third_party/2/tornado/tcpserver.pyi typeshed/third_party/2/tornado/testing.pyi typeshed/third_party/2/tornado/util.pyi typeshed/third_party/2/tornado/web.pyi typeshed/third_party/2/werkzeug/__init__.pyi typeshed/third_party/2/werkzeug/_compat.pyi typeshed/third_party/2/werkzeug/_internal.pyi typeshed/third_party/2/werkzeug/_reloader.pyi typeshed/third_party/2/werkzeug/datastructures.pyi typeshed/third_party/2/werkzeug/exceptions.pyi typeshed/third_party/2/werkzeug/filesystem.pyi typeshed/third_party/2/werkzeug/formparser.pyi typeshed/third_party/2/werkzeug/http.pyi typeshed/third_party/2/werkzeug/local.pyi typeshed/third_party/2/werkzeug/posixemulation.pyi typeshed/third_party/2/werkzeug/routing.pyi typeshed/third_party/2/werkzeug/script.pyi typeshed/third_party/2/werkzeug/security.pyi typeshed/third_party/2/werkzeug/serving.pyi typeshed/third_party/2/werkzeug/test.pyi typeshed/third_party/2/werkzeug/testapp.pyi typeshed/third_party/2/werkzeug/urls.pyi typeshed/third_party/2/werkzeug/useragents.pyi typeshed/third_party/2/werkzeug/utils.pyi typeshed/third_party/2/werkzeug/wrappers.pyi typeshed/third_party/2/werkzeug/wsgi.pyi typeshed/third_party/2/werkzeug/contrib/__init__.pyi typeshed/third_party/2/werkzeug/contrib/atom.pyi typeshed/third_party/2/werkzeug/contrib/cache.pyi typeshed/third_party/2/werkzeug/contrib/fixers.pyi typeshed/third_party/2/werkzeug/contrib/iterio.pyi typeshed/third_party/2/werkzeug/contrib/jsrouting.pyi typeshed/third_party/2/werkzeug/contrib/limiter.pyi typeshed/third_party/2/werkzeug/contrib/lint.pyi typeshed/third_party/2/werkzeug/contrib/profiler.pyi typeshed/third_party/2/werkzeug/contrib/securecookie.pyi typeshed/third_party/2/werkzeug/contrib/sessions.pyi typeshed/third_party/2/werkzeug/contrib/testtools.pyi typeshed/third_party/2/werkzeug/contrib/wrappers.pyi typeshed/third_party/2/werkzeug/debug/__init__.pyi typeshed/third_party/2/werkzeug/debug/console.pyi typeshed/third_party/2/werkzeug/debug/repr.pyi typeshed/third_party/2/werkzeug/debug/tbtools.pyi typeshed/third_party/2and3/backports_abc.pyi typeshed/third_party/2and3/certifi.pyi typeshed/third_party/2and3/croniter.pyi typeshed/third_party/2and3/emoji.pyi typeshed/third_party/2and3/mypy_extensions.pyi typeshed/third_party/2and3/singledispatch.pyi typeshed/third_party/2and3/typing_extensions.pyi typeshed/third_party/2and3/ujson.pyi typeshed/third_party/2and3/Crypto/__init__.pyi typeshed/third_party/2and3/Crypto/pct_warnings.pyi typeshed/third_party/2and3/Crypto/Cipher/AES.pyi typeshed/third_party/2and3/Crypto/Cipher/ARC2.pyi typeshed/third_party/2and3/Crypto/Cipher/ARC4.pyi typeshed/third_party/2and3/Crypto/Cipher/Blowfish.pyi typeshed/third_party/2and3/Crypto/Cipher/CAST.pyi typeshed/third_party/2and3/Crypto/Cipher/DES.pyi typeshed/third_party/2and3/Crypto/Cipher/DES3.pyi typeshed/third_party/2and3/Crypto/Cipher/PKCS1_OAEP.pyi typeshed/third_party/2and3/Crypto/Cipher/PKCS1_v1_5.pyi typeshed/third_party/2and3/Crypto/Cipher/XOR.pyi typeshed/third_party/2and3/Crypto/Cipher/__init__.pyi typeshed/third_party/2and3/Crypto/Cipher/blockalgo.pyi typeshed/third_party/2and3/Crypto/Hash/HMAC.pyi typeshed/third_party/2and3/Crypto/Hash/MD2.pyi typeshed/third_party/2and3/Crypto/Hash/MD4.pyi typeshed/third_party/2and3/Crypto/Hash/MD5.pyi typeshed/third_party/2and3/Crypto/Hash/RIPEMD.pyi typeshed/third_party/2and3/Crypto/Hash/SHA.pyi typeshed/third_party/2and3/Crypto/Hash/SHA224.pyi typeshed/third_party/2and3/Crypto/Hash/SHA256.pyi typeshed/third_party/2and3/Crypto/Hash/SHA384.pyi typeshed/third_party/2and3/Crypto/Hash/SHA512.pyi typeshed/third_party/2and3/Crypto/Hash/__init__.pyi typeshed/third_party/2and3/Crypto/Hash/hashalgo.pyi typeshed/third_party/2and3/Crypto/Protocol/AllOrNothing.pyi typeshed/third_party/2and3/Crypto/Protocol/Chaffing.pyi typeshed/third_party/2and3/Crypto/Protocol/KDF.pyi typeshed/third_party/2and3/Crypto/Protocol/__init__.pyi typeshed/third_party/2and3/Crypto/PublicKey/DSA.pyi typeshed/third_party/2and3/Crypto/PublicKey/ElGamal.pyi typeshed/third_party/2and3/Crypto/PublicKey/RSA.pyi typeshed/third_party/2and3/Crypto/PublicKey/__init__.pyi typeshed/third_party/2and3/Crypto/PublicKey/pubkey.pyi typeshed/third_party/2and3/Crypto/Random/__init__.pyi typeshed/third_party/2and3/Crypto/Random/random.pyi typeshed/third_party/2and3/Crypto/Random/Fortuna/FortunaAccumulator.pyi typeshed/third_party/2and3/Crypto/Random/Fortuna/FortunaGenerator.pyi typeshed/third_party/2and3/Crypto/Random/Fortuna/SHAd256.pyi typeshed/third_party/2and3/Crypto/Random/Fortuna/__init__.pyi typeshed/third_party/2and3/Crypto/Random/OSRNG/__init__.pyi typeshed/third_party/2and3/Crypto/Random/OSRNG/fallback.pyi typeshed/third_party/2and3/Crypto/Random/OSRNG/posix.pyi typeshed/third_party/2and3/Crypto/Random/OSRNG/rng_base.pyi typeshed/third_party/2and3/Crypto/Signature/PKCS1_PSS.pyi typeshed/third_party/2and3/Crypto/Signature/PKCS1_v1_5.pyi typeshed/third_party/2and3/Crypto/Signature/__init__.pyi typeshed/third_party/2and3/Crypto/Util/Counter.pyi typeshed/third_party/2and3/Crypto/Util/RFC1751.pyi typeshed/third_party/2and3/Crypto/Util/__init__.pyi typeshed/third_party/2and3/Crypto/Util/asn1.pyi typeshed/third_party/2and3/Crypto/Util/number.pyi typeshed/third_party/2and3/Crypto/Util/randpool.pyi typeshed/third_party/2and3/Crypto/Util/strxor.pyi typeshed/third_party/2and3/atomicwrites/__init__.pyi typeshed/third_party/2and3/backports/__init__.pyi typeshed/third_party/2and3/backports/ssl_match_hostname.pyi typeshed/third_party/2and3/boto/__init__.pyi typeshed/third_party/2and3/boto/auth.pyi typeshed/third_party/2and3/boto/auth_handler.pyi typeshed/third_party/2and3/boto/compat.pyi typeshed/third_party/2and3/boto/connection.pyi typeshed/third_party/2and3/boto/exception.pyi typeshed/third_party/2and3/boto/plugin.pyi typeshed/third_party/2and3/boto/regioninfo.pyi typeshed/third_party/2and3/boto/utils.pyi typeshed/third_party/2and3/boto/ec2/__init__.pyi typeshed/third_party/2and3/boto/elb/__init__.pyi typeshed/third_party/2and3/boto/kms/__init__.pyi typeshed/third_party/2and3/boto/kms/exceptions.pyi typeshed/third_party/2and3/boto/kms/layer1.pyi typeshed/third_party/2and3/boto/s3/__init__.pyi typeshed/third_party/2and3/boto/s3/acl.pyi typeshed/third_party/2and3/boto/s3/bucket.pyi typeshed/third_party/2and3/boto/s3/bucketlistresultset.pyi typeshed/third_party/2and3/boto/s3/bucketlogging.pyi typeshed/third_party/2and3/boto/s3/connection.pyi typeshed/third_party/2and3/boto/s3/cors.pyi typeshed/third_party/2and3/boto/s3/deletemarker.pyi typeshed/third_party/2and3/boto/s3/key.pyi typeshed/third_party/2and3/boto/s3/keyfile.pyi typeshed/third_party/2and3/boto/s3/lifecycle.pyi typeshed/third_party/2and3/boto/s3/multidelete.pyi typeshed/third_party/2and3/boto/s3/multipart.pyi typeshed/third_party/2and3/boto/s3/prefix.pyi typeshed/third_party/2and3/boto/s3/tagging.pyi typeshed/third_party/2and3/boto/s3/user.pyi typeshed/third_party/2and3/boto/s3/website.pyi typeshed/third_party/2and3/characteristic/__init__.pyi typeshed/third_party/2and3/click/__init__.pyi typeshed/third_party/2and3/click/core.pyi typeshed/third_party/2and3/click/decorators.pyi typeshed/third_party/2and3/click/exceptions.pyi typeshed/third_party/2and3/click/formatting.pyi typeshed/third_party/2and3/click/globals.pyi typeshed/third_party/2and3/click/parser.pyi typeshed/third_party/2and3/click/termui.pyi typeshed/third_party/2and3/click/types.pyi typeshed/third_party/2and3/click/utils.pyi typeshed/third_party/2and3/jinja2/__init__.pyi typeshed/third_party/2and3/jinja2/_compat.pyi typeshed/third_party/2and3/jinja2/_stringdefs.pyi typeshed/third_party/2and3/jinja2/bccache.pyi typeshed/third_party/2and3/jinja2/compiler.pyi typeshed/third_party/2and3/jinja2/constants.pyi typeshed/third_party/2and3/jinja2/debug.pyi typeshed/third_party/2and3/jinja2/defaults.pyi typeshed/third_party/2and3/jinja2/environment.pyi typeshed/third_party/2and3/jinja2/exceptions.pyi typeshed/third_party/2and3/jinja2/ext.pyi typeshed/third_party/2and3/jinja2/filters.pyi typeshed/third_party/2and3/jinja2/lexer.pyi typeshed/third_party/2and3/jinja2/loaders.pyi typeshed/third_party/2and3/jinja2/meta.pyi typeshed/third_party/2and3/jinja2/nodes.pyi typeshed/third_party/2and3/jinja2/optimizer.pyi typeshed/third_party/2and3/jinja2/parser.pyi typeshed/third_party/2and3/jinja2/runtime.pyi typeshed/third_party/2and3/jinja2/sandbox.pyi typeshed/third_party/2and3/jinja2/tests.pyi typeshed/third_party/2and3/jinja2/utils.pyi typeshed/third_party/2and3/jinja2/visitor.pyi typeshed/third_party/2and3/markupsafe/__init__.pyi typeshed/third_party/2and3/markupsafe/_compat.pyi typeshed/third_party/2and3/markupsafe/_constants.pyi typeshed/third_party/2and3/markupsafe/_native.pyi typeshed/third_party/2and3/markupsafe/_speedups.pyi typeshed/third_party/2and3/pymysql/__init__.pyi typeshed/third_party/2and3/pymysql/charset.pyi typeshed/third_party/2and3/pymysql/connections.pyi typeshed/third_party/2and3/pymysql/converters.pyi typeshed/third_party/2and3/pymysql/cursors.pyi typeshed/third_party/2and3/pymysql/err.pyi typeshed/third_party/2and3/pymysql/times.pyi typeshed/third_party/2and3/pymysql/util.pyi typeshed/third_party/2and3/pymysql/constants/CLIENT.pyi typeshed/third_party/2and3/pymysql/constants/COMMAND.pyi typeshed/third_party/2and3/pymysql/constants/ER.pyi typeshed/third_party/2and3/pymysql/constants/FIELD_TYPE.pyi typeshed/third_party/2and3/pymysql/constants/FLAG.pyi typeshed/third_party/2and3/pymysql/constants/SERVER_STATUS.pyi typeshed/third_party/2and3/pymysql/constants/__init__.pyi typeshed/third_party/2and3/pynamodb/__init__.pyi typeshed/third_party/2and3/pynamodb/attributes.pyi typeshed/third_party/2and3/pynamodb/constants.pyi typeshed/third_party/2and3/pynamodb/exceptions.pyi typeshed/third_party/2and3/pynamodb/indexes.pyi typeshed/third_party/2and3/pynamodb/models.pyi typeshed/third_party/2and3/pynamodb/settings.pyi typeshed/third_party/2and3/pynamodb/throttle.pyi typeshed/third_party/2and3/pynamodb/types.pyi typeshed/third_party/2and3/pynamodb/connection/__init__.pyi typeshed/third_party/2and3/pynamodb/connection/base.pyi typeshed/third_party/2and3/pynamodb/connection/table.pyi typeshed/third_party/2and3/pynamodb/connection/util.pyi typeshed/third_party/2and3/pytz/__init__.pyi typeshed/third_party/2and3/pytz/lazy.pyi typeshed/third_party/2and3/requests/__init__.pyi typeshed/third_party/2and3/requests/adapters.pyi typeshed/third_party/2and3/requests/api.pyi typeshed/third_party/2and3/requests/auth.pyi typeshed/third_party/2and3/requests/compat.pyi typeshed/third_party/2and3/requests/cookies.pyi typeshed/third_party/2and3/requests/exceptions.pyi typeshed/third_party/2and3/requests/hooks.pyi typeshed/third_party/2and3/requests/models.pyi typeshed/third_party/2and3/requests/sessions.pyi typeshed/third_party/2and3/requests/status_codes.pyi typeshed/third_party/2and3/requests/structures.pyi typeshed/third_party/2and3/requests/utils.pyi typeshed/third_party/2and3/requests/packages/__init__.pyi typeshed/third_party/2and3/requests/packages/urllib3/__init__.pyi typeshed/third_party/2and3/requests/packages/urllib3/_collections.pyi typeshed/third_party/2and3/requests/packages/urllib3/connection.pyi typeshed/third_party/2and3/requests/packages/urllib3/connectionpool.pyi typeshed/third_party/2and3/requests/packages/urllib3/exceptions.pyi typeshed/third_party/2and3/requests/packages/urllib3/fields.pyi typeshed/third_party/2and3/requests/packages/urllib3/filepost.pyi typeshed/third_party/2and3/requests/packages/urllib3/poolmanager.pyi typeshed/third_party/2and3/requests/packages/urllib3/request.pyi typeshed/third_party/2and3/requests/packages/urllib3/response.pyi typeshed/third_party/2and3/requests/packages/urllib3/contrib/__init__.pyi typeshed/third_party/2and3/requests/packages/urllib3/packages/__init__.pyi typeshed/third_party/2and3/requests/packages/urllib3/packages/ssl_match_hostname/__init__.pyi typeshed/third_party/2and3/requests/packages/urllib3/packages/ssl_match_hostname/_implementation.pyi typeshed/third_party/2and3/requests/packages/urllib3/util/__init__.pyi typeshed/third_party/2and3/requests/packages/urllib3/util/connection.pyi typeshed/third_party/2and3/requests/packages/urllib3/util/request.pyi typeshed/third_party/2and3/requests/packages/urllib3/util/response.pyi typeshed/third_party/2and3/requests/packages/urllib3/util/retry.pyi typeshed/third_party/2and3/requests/packages/urllib3/util/ssl_.pyi typeshed/third_party/2and3/requests/packages/urllib3/util/timeout.pyi typeshed/third_party/2and3/requests/packages/urllib3/util/url.pyi typeshed/third_party/2and3/thrift/Thrift.pyi typeshed/third_party/2and3/thrift/__init__.pyi typeshed/third_party/2and3/thrift/protocol/TBinaryProtocol.pyi typeshed/third_party/2and3/thrift/protocol/TProtocol.pyi typeshed/third_party/2and3/thrift/protocol/__init__.pyi typeshed/third_party/2and3/thrift/transport/TSocket.pyi typeshed/third_party/2and3/thrift/transport/TTransport.pyi typeshed/third_party/2and3/thrift/transport/__init__.pyi typeshed/third_party/2and3/yaml/__init__.pyi typeshed/third_party/2and3/yaml/composer.pyi typeshed/third_party/2and3/yaml/constructor.pyi typeshed/third_party/2and3/yaml/dumper.pyi typeshed/third_party/2and3/yaml/emitter.pyi typeshed/third_party/2and3/yaml/error.pyi typeshed/third_party/2and3/yaml/events.pyi typeshed/third_party/2and3/yaml/loader.pyi typeshed/third_party/2and3/yaml/nodes.pyi typeshed/third_party/2and3/yaml/parser.pyi typeshed/third_party/2and3/yaml/reader.pyi typeshed/third_party/2and3/yaml/representer.pyi typeshed/third_party/2and3/yaml/resolver.pyi typeshed/third_party/2and3/yaml/scanner.pyi typeshed/third_party/2and3/yaml/serializer.pyi typeshed/third_party/2and3/yaml/tokens.pyi typeshed/third_party/3/enum.pyi typeshed/third_party/3/itsdangerous.pyi typeshed/third_party/3/pkg_resources.pyi typeshed/third_party/3/dateutil/__init__.pyi typeshed/third_party/3/dateutil/parser.pyi typeshed/third_party/3/dateutil/relativedelta.pyi typeshed/third_party/3/dateutil/tz/__init__.pyi typeshed/third_party/3/dateutil/tz/_common.pyi typeshed/third_party/3/dateutil/tz/tz.pyi typeshed/third_party/3/docutils/__init__.pyi typeshed/third_party/3/docutils/examples.pyi typeshed/third_party/3/docutils/nodes.pyi typeshed/third_party/3/docutils/parsers/__init__.pyi typeshed/third_party/3/docutils/parsers/rst/__init__.pyi typeshed/third_party/3/docutils/parsers/rst/nodes.pyi typeshed/third_party/3/docutils/parsers/rst/roles.pyi typeshed/third_party/3/docutils/parsers/rst/states.pyi typeshed/third_party/3/jwt/__init__.pyi typeshed/third_party/3/jwt/algorithms.pyi typeshed/third_party/3/jwt/contrib/__init__.pyi typeshed/third_party/3/jwt/contrib/algorithms/__init__.pyi typeshed/third_party/3/jwt/contrib/algorithms/py_ecdsa.pyi typeshed/third_party/3/jwt/contrib/algorithms/pycrypto.pyi typeshed/third_party/3/six/__init__.pyi typeshed/third_party/3/six/moves/__init__.pyi typeshed/third_party/3/six/moves/urllib/__init__.pyi typeshed/third_party/3/six/moves/urllib/error.pyi typeshed/third_party/3/six/moves/urllib/parse.pyi typeshed/third_party/3/six/moves/urllib/request.pyi typeshed/third_party/3/six/moves/urllib/response.pyi typeshed/third_party/3/six/moves/urllib/robotparser.pyi typeshed/third_party/3/typed_ast/__init__.pyi typeshed/third_party/3/typed_ast/ast27.pyi typeshed/third_party/3/typed_ast/ast3.pyi typeshed/third_party/3/typed_ast/conversions.pyi typeshed/third_party/3/werkzeug/__init__.pyi typeshed/third_party/3/werkzeug/_compat.pyi typeshed/third_party/3/werkzeug/_internal.pyi typeshed/third_party/3/werkzeug/_reloader.pyi typeshed/third_party/3/werkzeug/datastructures.pyi typeshed/third_party/3/werkzeug/exceptions.pyi typeshed/third_party/3/werkzeug/filesystem.pyi typeshed/third_party/3/werkzeug/formparser.pyi typeshed/third_party/3/werkzeug/http.pyi typeshed/third_party/3/werkzeug/local.pyi typeshed/third_party/3/werkzeug/posixemulation.pyi typeshed/third_party/3/werkzeug/routing.pyi typeshed/third_party/3/werkzeug/script.pyi typeshed/third_party/3/werkzeug/security.pyi typeshed/third_party/3/werkzeug/serving.pyi typeshed/third_party/3/werkzeug/test.pyi typeshed/third_party/3/werkzeug/testapp.pyi typeshed/third_party/3/werkzeug/urls.pyi typeshed/third_party/3/werkzeug/useragents.pyi typeshed/third_party/3/werkzeug/utils.pyi typeshed/third_party/3/werkzeug/wrappers.pyi typeshed/third_party/3/werkzeug/wsgi.pyi typeshed/third_party/3/werkzeug/contrib/__init__.pyi typeshed/third_party/3/werkzeug/contrib/atom.pyi typeshed/third_party/3/werkzeug/contrib/cache.pyi typeshed/third_party/3/werkzeug/contrib/fixers.pyi typeshed/third_party/3/werkzeug/contrib/iterio.pyi typeshed/third_party/3/werkzeug/contrib/jsrouting.pyi typeshed/third_party/3/werkzeug/contrib/limiter.pyi typeshed/third_party/3/werkzeug/contrib/lint.pyi typeshed/third_party/3/werkzeug/contrib/profiler.pyi typeshed/third_party/3/werkzeug/contrib/securecookie.pyi typeshed/third_party/3/werkzeug/contrib/sessions.pyi typeshed/third_party/3/werkzeug/contrib/testtools.pyi typeshed/third_party/3/werkzeug/contrib/wrappers.pyi typeshed/third_party/3/werkzeug/debug/__init__.pyi typeshed/third_party/3/werkzeug/debug/console.pyi typeshed/third_party/3/werkzeug/debug/repr.pyi typeshed/third_party/3/werkzeug/debug/tbtools.pyi xml/mypy-html.css xml/mypy-html.xslt xml/mypy-txt.xslt xml/mypy.xsdmypy-0.560/mypy.egg-info/top_level.txt0000644€tŠÔÚ€2›s®0000000000513215007242024105 0ustar jukkaDROPBOX\Domain Users00000000000000mypy mypy-0.560/mypy_self_check.ini0000644€tŠÔÚ€2›s®0000000056413215007205022541 0ustar jukkaDROPBOX\Domain Users00000000000000[mypy] disallow_untyped_defs = True disallow_subclassing_any = True warn_no_return = True strict_optional = True no_implicit_optional = True disallow_any_generics = True disallow_any_unimported = True warn_redundant_casts = True warn_unused_ignores = True warn_unused_configs = True # needs py2 compatibility [mypy-mypy.test.testextensions] disallow_untyped_defs = False mypy-0.560/PKG-INFO0000644€tŠÔÚ€2›s®0000000231413215007244017767 0ustar jukkaDROPBOX\Domain Users00000000000000Metadata-Version: 1.1 Name: mypy Version: 0.560 Summary: Optional static typing for Python Home-page: http://www.mypy-lang.org/ Author: Jukka Lehtosalo Author-email: jukka.lehtosalo@iki.fi License: MIT License Description-Content-Type: UNKNOWN Description: Mypy -- Optional Static Typing for Python ========================================= Add type annotations to your Python programs, and use mypy to type check them. Mypy is essentially a Python linter on steroids, and it can catch many programming errors by analyzing your program, without actually having to run it. Mypy has a powerful type system with features such as type inference, gradual typing, generics and union types. Platform: POSIX Classifier: Development Status :: 3 - Alpha Classifier: Environment :: Console Classifier: Intended Audience :: Developers Classifier: License :: OSI Approved :: MIT License Classifier: Operating System :: POSIX Classifier: Programming Language :: Python :: 3 Classifier: Programming Language :: Python :: 3.4 Classifier: Programming Language :: Python :: 3.5 Classifier: Programming Language :: Python :: 3.6 Classifier: Topic :: Software Development mypy-0.560/README.md0000644€tŠÔÚ€2›s®0000001734413215007205020157 0ustar jukkaDROPBOX\Domain Users00000000000000Mypy: Optional Static Typing for Python ======================================= [![Build Status](https://api.travis-ci.org/python/mypy.svg?branch=master)](https://travis-ci.org/python/mypy) [![Chat at https://gitter.im/python/typing](https://badges.gitter.im/python/typing.svg)](https://gitter.im/python/typing?utm_source=badge&utm_medium=badge&utm_campaign=pr-badge&utm_content=badge) Got a question? Join us on Gitter! ---------------------------------- We don't have a mailing list; but we are always happy to answer questions on [gitter chat](https://gitter.im/python/typing). If you are sure you've found a bug please search our issue trackers for a duplicate before filing a new issue: - [mypy tracker](https://github.com/python/mypy/issues) for mypy isues - [typeshed tracker](https://github.com/python/typeshed/issues) for issues with specific modules - [typing tracker](https://github.com/python/typing/issues) for discussion of new type system features (PEP 484 changes) and runtime bugs in the typing module What is mypy? ------------- Mypy is an optional static type checker for Python. You can add type hints ([PEP 484](https://www.python.org/dev/peps/pep-0484/)) to your Python programs, and use mypy to type check them statically. Find bugs in your programs without even running them! You can mix dynamic and static typing in your programs. You can always fall back to dynamic typing when static typing is not convenient, such as for legacy code. Here is a small example to whet your appetite (Python 3): ```python from typing import Iterator def fib(n: int) -> Iterator[int]: a, b = 0, 1 while a < n: yield a a, b = b, a + b ``` See [the documentation](http://mypy.readthedocs.io/en/stable/introduction.html) for more examples. For Python 2.7, the standard annotations are written as comments: ```python def is_palindrome(s): # type: (str) -> bool return s == s[::-1] ``` See [the documentation for Python 2 support](http://mypy.readthedocs.io/en/latest/python2.html). Mypy is in development; some features are missing and there are bugs. See 'Development status' below. Requirements ------------ You need Python 3.4 or later to run mypy. You can have multiple Python versions (2.x and 3.x) installed on the same system without problems. In Ubuntu, Mint and Debian you can install Python 3 like this: $ sudo apt-get install python3 python3-pip For other Linux flavors, OS X and Windows, packages are available at http://www.python.org/getit/ Quick start ----------- Mypy can be installed using pip: $ python3 -m pip install -U mypy If you want to run the latest version of the code, you can install from git: $ python3 -m pip install -U git+git://github.com/python/mypy.git Now, if Python on your system is configured properly (else see "Troubleshooting" below), you can type-check the [statically typed parts] of a program like this: $ mypy PROGRAM You can always use a Python interpreter to run your statically typed programs, even if they have type errors: $ python3 PROGRAM [statically typed parts]: http://mypy.readthedocs.io/en/latest/basics.html#function-signatures IDE & Linter Integrations ------------------------- Mypy can be integrated into popular IDEs: * Vim: [vim-mypy](https://github.com/Integralist/vim-mypy) * Emacs: using [Flycheck](https://github.com/flycheck/) and [Flycheck-mypy](https://github.com/lbolla/emacs-flycheck-mypy) * Sublime Text: [SublimeLinter-contrib-mypy](https://github.com/fredcallaway/SublimeLinter-contrib-mypy) * Atom: [linter-mypy](https://atom.io/packages/linter-mypy) * PyCharm: PyCharm integrates [its own implementation of PEP 484](https://www.jetbrains.com/help/pycharm/2017.1/type-hinting-in-pycharm.html). Mypy can also be integrated into [Flake8] using [flake8-mypy]. [Flake8]: http://flake8.pycqa.org/ [flake8-mypy]: https://github.com/ambv/flake8-mypy Web site and documentation -------------------------- Documentation and additional information is available at the web site: http://www.mypy-lang.org/ Or you can jump straight to the documentation: http://mypy.readthedocs.io/ Troubleshooting --------------- Depending on your configuration, you may have to run `pip` like this: $ python3 -m pip install -U mypy This should automatically installed the appropriate version of mypy's parser, typed-ast. If for some reason it does not, you can install it manually: $ python3 -m pip install -U typed-ast If the `mypy` command isn't found after installation: After `python3 -m pip install`, the `mypy` script and dependencies, including the `typing` module, will be installed to system-dependent locations. Sometimes the script directory will not be in `PATH`, and you have to add the target directory to `PATH` manually or create a symbolic link to the script. In particular, on Mac OS X, the script may be installed under `/Library/Frameworks`: /Library/Frameworks/Python.framework/Versions//bin In Windows, the script is generally installed in `\PythonNN\Scripts`. So, type check a program like this (replace `\Python34` with your Python installation path): C:\>\Python34\python \Python34\Scripts\mypy PROGRAM ### Working with `virtualenv` If you are using [`virtualenv`](https://virtualenv.pypa.io/en/stable/), make sure you are running a python3 environment. Installing via `pip3` in a v2 environment will not configure the environment to run installed modules from the command line. $ python3 -m pip install -U virtualenv $ python3 -m virtualenv env Quick start for contributing to mypy ------------------------------------ If you want to contribute, first clone the mypy git repository: $ git clone --recurse-submodules https://github.com/python/mypy.git If you've already cloned the repo without `--recurse-submodules`, you need to pull in the typeshed repo as follows: $ git submodule init $ git submodule update Either way you should now have a subdirectory `typeshed` containing a clone of the typeshed repo (`https://github.com/python/typeshed`). From the mypy directory, use pip to install mypy: $ cd mypy $ python3 -m pip install -U . Replace `python3` with your Python 3 interpreter. You may have to do the above as root. For example, in Ubuntu: $ sudo python3 -m pip install -U . Now you can use the `mypy` program just as above. In case of trouble see "Troubleshooting" above. Working with the git version of mypy ------------------------------------ mypy contains a submodule, "typeshed". See http://github.com/python/typeshed. This submodule contains types for the Python standard library. Due to the way git submodules work, you'll have to do ``` git submodule update typeshed ``` whenever you change branches, merge, rebase, or pull. (It's possible to automate this: Search Google for "git hook update submodule") Tests ----- See [Test README.md](test-data/unit/README.md) Development status ------------------ Mypy is alpha software, but it has already been used in production for well over a year at Dropbox, and it has an extensive test suite. See [the roadmap](ROADMAP.md) if you are interested in plans for the future. Issue tracker ------------- Please report any bugs and enhancement ideas using the mypy issue tracker: https://github.com/python/mypy/issues Feel free to also ask questions on the tracker. Help wanted ----------- Any help in testing, development, documentation and other tasks is highly appreciated and useful to the project. There are tasks for contributors of all experience levels. If you're just getting started, ask on the [gitter chat](https://gitter.im/python/typing) for ideas of good beginner issues. For more details, see the file [CONTRIBUTING.md](CONTRIBUTING.md). License ------- Mypy is licensed under the terms of the MIT License (see the file LICENSE). mypy-0.560/runtests.py0000755€tŠÔÚ€2›s®0000003742113215007205021142 0ustar jukkaDROPBOX\Domain Users00000000000000#!/usr/bin/env python3 """Mypy test runner.""" from typing import Dict, List, Optional, Set, Iterable, Tuple from mypy.waiter import Waiter, LazySubprocess from mypy import util import itertools import os from os.path import join, isdir import sys def get_versions(): # type: () -> List[str] major = sys.version_info[0] minor = sys.version_info[1] if major == 2: return ['2.7'] else: # generates list of python versions to use. # For Python2, this is only [2.7]. # Otherwise, it is [3.4, 3.3, 3.2, 3.1, 3.0]. return ['%d.%d' % (major, i) for i in range(minor, -1, -1)] # Ideally, all tests would be `discover`able so that they can be driven # (and parallelized) by an external test driver. class Driver: def __init__(self, *, whitelist: List[str], blacklist: List[str], lf: bool, ff: bool, arglist: List[str], pyt_arglist: List[str], verbosity: int, parallel_limit: int, xfail: List[str], coverage: bool) -> None: self.whitelist = whitelist self.blacklist = blacklist self.arglist = arglist self.pyt_arglist = pyt_arglist self.verbosity = verbosity self.waiter = Waiter(verbosity=verbosity, limit=parallel_limit, xfail=xfail, lf=lf, ff=ff) self.versions = get_versions() self.cwd = os.getcwd() self.mypy = os.path.join(self.cwd, 'scripts', 'mypy') self.env = dict(os.environ) self.coverage = coverage def prepend_path(self, name: str, paths: List[str]) -> None: old_val = self.env.get(name) paths = [p for p in paths if isdir(p)] if not paths: return if old_val is not None: new_val = os.pathsep.join(itertools.chain(paths, [old_val])) else: new_val = os.pathsep.join(paths) self.env[name] = new_val def allow(self, name: str) -> bool: if any(f in name for f in self.whitelist): if not any(f in name for f in self.blacklist): if self.verbosity >= 2: print('SELECT #%d %s' % (len(self.waiter.queue), name)) return True if self.verbosity >= 3: print('OMIT %s' % name) return False def add_mypy_cmd(self, name: str, mypy_args: List[str], cwd: Optional[str] = None) -> None: full_name = 'check %s' % name if not self.allow(full_name): return args = [sys.executable, self.mypy] + mypy_args args.append('--show-traceback') self.waiter.add(LazySubprocess(full_name, args, cwd=cwd, env=self.env)) def add_mypy(self, name: str, *args: str, cwd: Optional[str] = None) -> None: self.add_mypy_cmd(name, list(args), cwd=cwd) def add_mypy_modules(self, name: str, modules: Iterable[str], cwd: Optional[str] = None) -> None: args = list(itertools.chain(*(['-m', mod] for mod in modules))) self.add_mypy_cmd(name, args, cwd=cwd) def add_mypy_package(self, name: str, packagename: str, *flags: str) -> None: self.add_mypy_cmd(name, ['-p', packagename] + list(flags)) def add_mypy_string(self, name: str, *args: str, cwd: Optional[str] = None) -> None: self.add_mypy_cmd(name, ['-c'] + list(args), cwd=cwd) def add_pytest(self, files: List[Tuple[str, str]], coverage: bool = True) -> None: pytest_files = [name for kind, name in files if self.allow('pytest {} {}'.format(kind, name))] if not pytest_files: return pytest_args = pytest_files + self.arglist + self.pyt_arglist if coverage and self.coverage: args = [sys.executable, '-m', 'pytest', '--cov=mypy'] + pytest_args else: args = [sys.executable, '-m', 'pytest'] + pytest_args self.waiter.add(LazySubprocess('pytest', args, env=self.env, passthrough=self.verbosity), sequential=True) def add_python(self, name: str, *args: str, cwd: Optional[str] = None) -> None: name = 'run %s' % name if not self.allow(name): return largs = list(args) largs[0:0] = [sys.executable] env = self.env self.waiter.add(LazySubprocess(name, largs, cwd=cwd, env=env)) def add_python_mod(self, name: str, *args: str, cwd: Optional[str] = None, coverage: bool = False) -> None: name = 'run %s' % name if not self.allow(name): return largs = list(args) if coverage and self.coverage: largs[0:0] = ['coverage', 'run', '-m'] else: largs[0:0] = [sys.executable, '-m'] env = self.env self.waiter.add(LazySubprocess(name, largs, cwd=cwd, env=env)) def add_python_string(self, name: str, *args: str, cwd: Optional[str] = None) -> None: name = 'run %s' % name if not self.allow(name): return largs = list(args) largs[0:0] = [sys.executable, '-c'] env = self.env self.waiter.add(LazySubprocess(name, largs, cwd=cwd, env=env)) def add_python2(self, name: str, *args: str, cwd: Optional[str] = None) -> None: name = 'run2 %s' % name if not self.allow(name): return largs = list(args) python2 = util.try_find_python2_interpreter() assert python2, "Couldn't find a Python 2.7 interpreter" largs[0:0] = [python2] env = self.env self.waiter.add(LazySubprocess(name, largs, cwd=cwd, env=env)) def add_flake8(self, cwd: Optional[str] = None) -> None: name = 'lint' if not self.allow(name): return largs = ['flake8', '-j0'] env = self.env self.waiter.add(LazySubprocess(name, largs, cwd=cwd, env=env)) def list_tasks(self) -> None: for id, task in enumerate(self.waiter.queue): print('{id}:{task}'.format(id=id, task=task.name)) def add_basic(driver: Driver) -> None: if False: driver.add_mypy('file setup.py', 'setup.py') driver.add_mypy('file runtests.py', 'runtests.py') driver.add_mypy('legacy entry script', 'scripts/mypy') driver.add_mypy('legacy myunit script', 'scripts/myunit') # needs typed_ast installed: driver.add_mypy('fast-parse', '--fast-parse', 'test-data/samples/hello.py') def add_selftypecheck(driver: Driver) -> None: driver.add_mypy_package('package mypy', 'mypy', '--config-file', 'mypy_self_check.ini') def find_files(base: str, prefix: str = '', suffix: str = '') -> List[str]: return [join(root, f) for root, dirs, files in os.walk(base) for f in files if f.startswith(prefix) and f.endswith(suffix)] def file_to_module(file: str) -> str: rv = os.path.splitext(file)[0].replace(os.sep, '.') if rv.endswith('.__init__'): rv = rv[:-len('.__init__')] return rv def add_imports(driver: Driver) -> None: # Make sure each module can be imported originally. # There is currently a bug in mypy where a module can pass typecheck # because of *implicit* imports from other modules. for f in find_files('mypy', suffix='.py'): mod = file_to_module(f) if not mod.endswith('.__main__'): driver.add_python_string('import %s' % mod, 'import %s' % mod) def test_path(*names: str): return [os.path.join('mypy', 'test', '{}.py'.format(name)) for name in names] PYTEST_FILES = test_path( 'testcheck', 'testdmypy', 'testextensions', 'testdeps', 'testdiff', 'testfinegrained', 'testmerge', 'testtransform', 'testtypegen', 'testparse', 'testsemanal' ) SLOW_FILES = test_path( 'testpythoneval', 'testcmdline', 'teststubgen', ) MYUNIT_FILES = test_path( 'teststubgen', 'testargs', 'testgraph', 'testinfer', 'testmoduleinfo', 'testreports', 'testsolve', 'testsubtypes', 'testtypes', ) for f in find_files('mypy', prefix='test', suffix='.py'): assert f in PYTEST_FILES + SLOW_FILES + MYUNIT_FILES, f def add_pytest(driver: Driver) -> None: driver.add_pytest([('unit-test', name) for name in PYTEST_FILES] + [('integration', name) for name in SLOW_FILES]) def add_myunit(driver: Driver) -> None: for f in MYUNIT_FILES: mod = file_to_module(f) driver.add_python_mod('myunit unit-test %s' % mod, 'mypy.myunit', '-m', mod, *driver.arglist, coverage=True) def add_stubs(driver: Driver) -> None: # We only test each module in the one version mypy prefers to find. # TODO: test stubs for other versions, especially Python 2 stubs. modules = set() # type: Set[str] modules.add('typing') # TODO: This should also test Python 2, and pass pyversion accordingly. for version in ["2and3", "3", "3.3", "3.4", "3.5"]: for stub_type in ['builtins', 'stdlib', 'third_party']: stubdir = join('typeshed', stub_type, version) for f in find_files(stubdir, suffix='.pyi'): module = file_to_module(f[len(stubdir) + 1:]) modules.add(module) driver.add_mypy_modules('stubs', sorted(modules)) def add_stdlibsamples(driver: Driver) -> None: seen = set() # type: Set[str] for version in driver.versions: stdlibsamples_dir = join(driver.cwd, 'test-data', 'stdlib-samples', version) modules = [] # type: List[str] for f in find_files(stdlibsamples_dir, prefix='test_', suffix='.py'): module = file_to_module(f[len(stdlibsamples_dir) + 1:]) if module not in seen: seen.add(module) modules.append(module) if modules: driver.add_mypy_modules('stdlibsamples (%s)' % (version,), modules, cwd=stdlibsamples_dir) def add_samples(driver: Driver) -> None: for f in find_files(os.path.join('test-data', 'samples'), suffix='.py'): driver.add_mypy('file %s' % f, f) def usage(status: int) -> None: print('Usage: %s [-h | -v | -q | --lf | --ff | [-x] FILTER | -a ARG | -p ARG]' '... [-- FILTER ...]' % sys.argv[0]) print() print('Run mypy tests. If given no arguments, run all tests.') print() print('Examples:') print(' %s unit-test (run unit tests only)' % sys.argv[0]) print(' %s testcheck (run type checking unit tests only)' % sys.argv[0]) print(' %s "pytest unit-test" -a -k -a Tuple' % sys.argv[0]) print(' (run all pytest unit tests with "Tuple" in test name)') print() print('You can also run pytest directly without using %s:' % sys.argv[0]) print(' pytest mypy/test/testcheck.py -k Tuple') print() print('Options:') print(' -h, --help show this help') print(' -v, --verbose increase driver verbosity') print(' --lf rerun only the tests that failed at the last run') print(' --ff run all tests but run the last failures first') print(' -q, --quiet decrease driver verbosity') print(' -jN run N tasks at once (default: one per CPU)') print(' -a, --argument ARG pass an argument to myunit tasks') print(' -p, --pytest_arg ARG pass an argument to pytest tasks') print(' (-v: verbose; glob pattern: filter by test name)') print(' -l, --list list included tasks (after filtering) and exit') print(' FILTER include tasks matching FILTER') print(' -x, --exclude FILTER exclude tasks matching FILTER') print(' -c, --coverage calculate code coverage while running tests') print(' -- treat all remaining arguments as positional') sys.exit(status) def sanity() -> None: paths = os.getenv('PYTHONPATH') if paths is None: return failed = False for p in paths.split(os.pathsep): if not os.path.isabs(p): print('Relative PYTHONPATH entry %r' % p) failed = True if failed: print('Please use absolute so that chdir() tests can work.') print('Cowardly refusing to continue.') sys.exit(1) def main() -> None: import time t0 = time.perf_counter() sanity() verbosity = 0 parallel_limit = 0 whitelist = [] # type: List[str] blacklist = [] # type: List[str] arglist = [] # type: List[str] pyt_arglist = [] # type: List[str] lf = False ff = False list_only = False coverage = False allow_opts = True curlist = whitelist for a in sys.argv[1:]: if not (curlist is arglist or curlist is pyt_arglist) and allow_opts and a.startswith('-'): if curlist is not whitelist: break if a == '--': allow_opts = False elif a == '-v' or a == '--verbose': verbosity += 1 elif a == '-q' or a == '--quiet': verbosity -= 1 elif a.startswith('-j'): try: parallel_limit = int(a[2:]) except ValueError: usage(1) elif a == '-x' or a == '--exclude': curlist = blacklist elif a == '-a' or a == '--argument': curlist = arglist elif a == '-p' or a == '--pytest_arg': curlist = pyt_arglist # will also pass this option to pytest elif a == '--lf': lf = True # will also pass this option to pytest elif a == '--ff': ff = True elif a == '-l' or a == '--list': list_only = True elif a == '-c' or a == '--coverage': coverage = True elif a == '-h' or a == '--help': usage(0) else: usage(1) else: curlist.append(a) curlist = whitelist if curlist is blacklist: sys.exit('-x must be followed by a filter') if curlist is arglist: sys.exit('-a must be followed by an argument') if curlist is pyt_arglist: sys.exit('-p must be followed by an argument') if lf and ff: sys.exit('use either --lf or --ff, not both') # empty string is a substring of all names if not whitelist: whitelist.append('') if lf: pyt_arglist.append('--lf') if ff: pyt_arglist.append('--ff') if verbosity >= 1: pyt_arglist.extend(['-v'] * verbosity) elif verbosity < 0: pyt_arglist.extend(['-q'] * (-verbosity)) if parallel_limit: if '-n' not in pyt_arglist: pyt_arglist.append('-n{}'.format(parallel_limit)) driver = Driver(whitelist=whitelist, blacklist=blacklist, lf=lf, ff=ff, arglist=arglist, pyt_arglist=pyt_arglist, verbosity=verbosity, parallel_limit=parallel_limit, xfail=[], coverage=coverage) driver.prepend_path('PATH', [join(driver.cwd, 'scripts')]) driver.prepend_path('MYPYPATH', [driver.cwd]) driver.prepend_path('PYTHONPATH', [driver.cwd]) driver.add_flake8() add_pytest(driver) add_basic(driver) add_selftypecheck(driver) add_myunit(driver) add_imports(driver) add_stubs(driver) add_stdlibsamples(driver) add_samples(driver) if list_only: driver.list_tasks() return exit_code = driver.waiter.run() t1 = time.perf_counter() print('total runtime:', t1 - t0, 'sec') if verbosity >= 1: times = driver.waiter.times2 if verbosity >= 2 else driver.waiter.times1 times_sortable = ((t, tp) for (tp, t) in times.items()) for total_time, test_type in sorted(times_sortable, reverse=True): print('total time in %s: %f' % (test_type, total_time)) sys.exit(exit_code) if __name__ == '__main__': main() mypy-0.560/scripts/0000755€tŠÔÚ€2›s®0000000000013215007242020357 5ustar jukkaDROPBOX\Domain Users00000000000000mypy-0.560/scripts/dmypy0000755€tŠÔÚ€2›s®0000000064613215007205021454 0ustar jukkaDROPBOX\Domain Users00000000000000#!/usr/bin/env python3 """Mypy daemon client. This is just a wrapper script. Look at mypy/dmypy.py for the actual implementation. """ import os import os.path import sys file_dir = os.path.dirname(__file__) parent_dir = os.path.join(file_dir, os.pardir) if os.path.exists(os.path.join(parent_dir, '.git')): # We are running from a git clone. sys.path.insert(0, parent_dir) import mypy.dmypy mypy.dmypy.main() mypy-0.560/scripts/dumpmodule.py0000644€tŠÔÚ€2›s®0000000760613215007205023114 0ustar jukkaDROPBOX\Domain Users00000000000000"""Dump the runtime structure of a module as JSON. This is used for testing stubs. This needs to run in Python 2.7 and 3.x. """ from __future__ import print_function import importlib import json import sys import types from typing import Text from collections import defaultdict if sys.version_info >= (3, 0): import inspect long = int else: import inspect2 as inspect def dump_module(id): m = importlib.import_module(id) data = module_to_json(m) print(json.dumps(data, ensure_ascii=True, indent=4, sort_keys=True)) def module_to_json(m): result = {} for name, value in m.__dict__.items(): # Filter out some useless attributes. if name in ('__file__', '__doc__', '__name__', '__builtins__', '__package__'): continue if name == '__all__': result[name] = {'type': 'list', 'values': sorted(value)} else: result[name] = dump_value(value) try: _, line = inspect.getsourcelines(getattr(m, name)) except (TypeError, OSError): line = None result[name]['line'] = line return result def dump_value(value, depth=0): if depth > 10: return 'max_recursion_depth_exceeded' if isinstance(value, type): return dump_class(value, depth + 1) if inspect.isfunction(value): return dump_function(value) if callable(value): return {'type': 'callable'} # TODO more information if isinstance(value, types.ModuleType): return {'type': 'module'} # TODO module name if inspect.isdatadescriptor(value): return {'type': 'datadescriptor'} if inspect.ismemberdescriptor(value): return {'type': 'memberdescriptor'} return dump_simple(value) def dump_simple(value): if type(value) in (int, bool, float, str, bytes, Text, long, list, set, dict, tuple): return {'type': type(value).__name__} if value is None: return {'type': 'None'} if value is inspect.Parameter.empty: return {'type': None} # 'None' and None: Ruh-Roh return {'type': 'unknown'} def dump_class(value, depth): return { 'type': 'class', 'attributes': dump_attrs(value, depth), } special_methods = [ '__init__', '__str__', '__int__', '__float__', '__bool__', '__contains__', '__iter__', ] # Change to return a dict def dump_attrs(d, depth): result = {} seen = set() try: mro = d.mro() except TypeError: mro = [d] for base in mro: v = vars(base) for name, value in v.items(): if name not in seen: result[name] = dump_value(value, depth + 1) seen.add(name) for m in special_methods: if hasattr(d, m) and m not in seen: result[m] = dump_value(getattr(d, m), depth + 1) return result kind_map = { inspect.Parameter.POSITIONAL_ONLY: 'POS_ONLY', inspect.Parameter.POSITIONAL_OR_KEYWORD: 'POS_OR_KW', inspect.Parameter.VAR_POSITIONAL: 'VAR_POS', inspect.Parameter.KEYWORD_ONLY: 'KW_ONLY', inspect.Parameter.VAR_KEYWORD: 'VAR_KW', } def param_kind(p): s = kind_map[p.kind] if p.default != inspect.Parameter.empty: assert s in ('POS_ONLY', 'POS_OR_KW', 'KW_ONLY') s += '_OPT' return s def dump_function(value): try: sig = inspect.signature(value) except ValueError: # The signature call sometimes fails for some reason. return {'type': 'invalid_signature'} params = list(sig.parameters.items()) return { 'type': 'function', 'args': [(name, param_kind(p), dump_simple(p.default)) for name, p in params], } if __name__ == '__main__': import sys if len(sys.argv) != 2: sys.exit('usage: dumpmodule.py module-name') dump_module(sys.argv[1]) mypy-0.560/scripts/find_type.py0000755€tŠÔÚ€2›s®0000000663713215007205022730 0ustar jukkaDROPBOX\Domain Users00000000000000#!/usr/bin/env python3 # Usage: find_type.py FILENAME START_LINE START_COL END_LINE END_COL MYPY_AND_ARGS # Prints out the type of the expression in the given location if the mypy run # succeeds cleanly. Otherwise, prints out the errors encountered. # Note: this only works on expressions, and not assignment targets. # Note: MYPY_AND_ARGS is should be the remainder of argv, not a single # spaces-included argument. # NOTE: Line numbers are 1-based; column numbers are 0-based. # # # Example vim usage: # function RevealType() # " Set this to the command you use to run mypy on your project. Include the mypy invocation. # let mypycmd = 'python3 -m mypy mypy --incremental' # let [startline, startcol] = getpos("'<")[1:2] # let [endline, endcol] = getpos("'>")[1:2] # " Convert to 0-based column offsets # let startcol = startcol - 1 # " Change this line to point to the find_type.py script. # execute '!python3 /path/to/mypy/scripts/find_type.py % ' . startline . ' ' . startcol . ' ' . endline . ' ' . endcol . ' ' . mypycmd # endfunction # vnoremap t :call RevealType() # # For an Emacs example, see misc/macs.el. from typing import List, Tuple, Optional import subprocess import sys import tempfile import os.path import re REVEAL_TYPE_START = 'reveal_type(' REVEAL_TYPE_END = ')' def update_line(line: str, s: str, pos: int) -> str: return line[:pos] + s + line[pos:] def run_mypy(mypy_and_args: List[str], filename: str, tmp_name: str) -> str: proc = subprocess.run(mypy_and_args + ['--shadow-file', filename, tmp_name], stdout=subprocess.PIPE) assert(isinstance(proc.stdout, bytes)) # Guaranteed to be true because we called run with universal_newlines=False return proc.stdout.decode(encoding="utf-8") def get_revealed_type(line: str, relevant_file: str, relevant_line: int) -> Optional[str]: m = re.match("(.+?):(\d+): error: Revealed type is '(.*)'$", line) if (m and int(m.group(2)) == relevant_line and os.path.samefile(relevant_file, m.group(1))): return m.group(3) else: return None def process_output(output: str, filename: str, start_line: int) -> Tuple[Optional[str], bool]: error_found = False for line in output.splitlines(): t = get_revealed_type(line, filename, start_line) if t: return t, error_found elif 'error:' in line: error_found = True return None, True # finding no reveal_type is an error def main(): filename, start_line_str, start_col_str, end_line_str, end_col_str, *mypy_and_args = sys.argv[1:] start_line = int(start_line_str) start_col = int(start_col_str) end_line = int(end_line_str) end_col = int(end_col_str) with open(filename, 'r') as f: lines = f.readlines() lines[end_line - 1] = update_line(lines[end_line - 1], REVEAL_TYPE_END, end_col) # insert after end_col lines[start_line - 1] = update_line(lines[start_line - 1], REVEAL_TYPE_START, start_col) with tempfile.NamedTemporaryFile(mode='w', prefix='mypy') as tmp_f: tmp_f.writelines(lines) tmp_f.flush() output = run_mypy(mypy_and_args, filename, tmp_f.name) revealed_type, error = process_output(output, filename, start_line) if revealed_type: print(revealed_type) if error: print(output) exit(int(error)) if __name__ == "__main__": main() mypy-0.560/scripts/finegrained.py0000644€tŠÔÚ€2›s®0000000574413215007205023215 0ustar jukkaDROPBOX\Domain Users00000000000000"""Prototype for using fine-grained incremental checking interactively. Usage: - first start it $ finegrained.py - it now waits for user input - an empty line performs an incremental step - 'q' exits """ import glob import sys import os from typing import Tuple, List, Dict, Optional from mypy import build from mypy.build import BuildManager, Graph from mypy.main import expand_dir from mypy.options import Options from mypy.errors import CompileError from mypy.server.update import FineGrainedBuildManager def main() -> None: if len(sys.argv) != 2 or not os.path.isdir(sys.argv[1]): usage() target_dir = sys.argv[1] messages, manager, graph = build_dir(target_dir) for message in messages: sys.stdout.write(message + '\n') fine_grained_manager = FineGrainedBuildManager(manager, graph) ts = timestamps(target_dir) while True: inp = input('>>> ').strip() if inp.startswith('q'): sys.exit(0) if inp != '': print("Press enter to perform type checking; enter 'q' to quit") continue new_ts = timestamps(target_dir) changed = find_changed_module(ts, new_ts) ts = new_ts if not changed: print('[nothing changed]') continue print('[update {}]'.format(changed[0])) messages = fine_grained_manager.update([changed]) for message in messages: sys.stdout.write(message + '\n') def find_changed_module(old_ts: Dict[str, Tuple[float, str]], new_ts: Dict[str, Tuple[float, str]]) -> Optional[Tuple[str, str]]: for module_id in new_ts: if module_id not in old_ts or new_ts[module_id] != old_ts[module_id]: # Modified or created return (module_id, new_ts[module_id][1]) for module_id in old_ts: if module_id not in new_ts: # Deleted return (module_id, old_ts[module_id][1]) return None def build_dir(target_dir: str) -> Tuple[List[str], BuildManager, Graph]: sources = expand_dir(target_dir) options = Options() options.incremental = True options.show_traceback = True options.cache_dir = os.devnull try: result = build.build(sources=sources, options=options) except CompileError as e: # TODO: We need a manager and a graph in this case as well assert False, str('\n'.join(e.messages)) return e.messages, None, None return result.errors, result.manager, result.graph def timestamps(target_dir: str) -> Dict[str, Tuple[float, str]]: paths = glob.glob('%s/**/*.py' % target_dir) + glob.glob('%s/*.py' % target_dir) result = {} for path in paths: mod = path[:-3].replace('/', '.') result[mod] = (os.stat(path).st_mtime, path) return result def usage() -> None: print('usage: finegrained.py DIRECTORY') sys.exit(1) if __name__ == '__main__': try: main() except EOFError: print('^D') mypy-0.560/scripts/mypy0000755€tŠÔÚ€2›s®0000000015613215007205021304 0ustar jukkaDROPBOX\Domain Users00000000000000#!/usr/bin/env python3 """Mypy type checker command line tool.""" from mypy.main import main main(__file__) mypy-0.560/scripts/mypy.bat0000644€tŠÔÚ€2›s®0000000021413215007205022041 0ustar jukkaDROPBOX\Domain Users00000000000000@echo off setlocal if exist "%~dp0\python.exe" ( "%~dp0\python" "%~dp0mypy" %* ) else ( "%~dp0..\python" "%~dp0mypy" %* ) endlocal mypy-0.560/scripts/myunit0000755€tŠÔÚ€2›s®0000000025513215007205021633 0ustar jukkaDROPBOX\Domain Users00000000000000#!/usr/bin/env python3 """Myunit test runner command line tool. Usually used as a slave by runtests.py, but can be used directly. """ from mypy.myunit import main main() mypy-0.560/scripts/stubgen0000644€tŠÔÚ€2›s®0000000073213215007205021752 0ustar jukkaDROPBOX\Domain Users00000000000000#!/usr/bin/env python3 """Generator of dynamically typed draft stubs for arbitrary modules. This is just a wrapper script. Look at mypy/stubgen.py for the actual implementation. """ import os import os.path import sys file_dir = os.path.dirname(__file__) parent_dir = os.path.join(file_dir, os.pardir) if os.path.exists(os.path.join(parent_dir, '.git')): # We are running from a git clone. sys.path.insert(0, parent_dir) import mypy.stubgen mypy.stubgen.main() mypy-0.560/scripts/stubtest.py0000644€tŠÔÚ€2›s®0000001425013215007205022607 0ustar jukkaDROPBOX\Domain Users00000000000000"""Tests for stubs. Verify that various things in stubs are consistent with how things behave at runtime. """ import importlib import sys from typing import Dict, Any, List from collections import defaultdict, namedtuple from mypy import build from mypy.build import default_data_dir, default_lib_path, find_modules_recursive from mypy.errors import CompileError from mypy import nodes from mypy.options import Options import dumpmodule if sys.version_info < (3, 4): from singledispatch import singledispatch else: from functools import singledispatch # TODO: email.contentmanager has a symbol table with a None node. # This seems like it should not be. skip = { '_importlib_modulespec', '_subprocess', 'distutils.command.bdist_msi', 'distutils.command.bdist_packager', 'msvcrt', 'wsgiref.types', 'mypy_extensions', 'unittest.mock', # mock.call infinite loops on inspect.getsourcelines # https://bugs.python.org/issue25532 # TODO: can we filter only call? } messages = { 'not_in_runtime': ('{error.stub_type} "{error.name}" defined at line ' ' {error.line} in stub but is not defined at runtime'), 'not_in_stub': ('{error.module_type} "{error.name}" defined at line' ' {error.line} at runtime but is not defined in stub'), 'no_stubs': 'could not find typeshed {error.name}', 'inconsistent': ('"{error.name}" is {error.stub_type} in stub but' ' {error.module_type} at runtime'), } Error = namedtuple('Error', ( 'module', 'name', 'error_type', 'line', 'stub_type', 'module_type')) def test_stub(name: str): stubs = { mod: stub for mod, stub in build_stubs(name).items() if (mod == name or mod.startswith(name + '.')) and mod not in skip } for mod, stub in stubs.items(): instance = dump_module(mod) for identifiers, *error in verify(stub, instance): yield Error(mod, '.'.join(identifiers), *error) @singledispatch def verify(node, module_node): raise TypeError('unknown mypy node ' + str(node)) @verify.register(nodes.MypyFile) def verify_mypyfile(stub, instance): if instance is None: yield [], 'not_in_runtime', stub.line, type(stub), None elif instance['type'] != 'file': yield [], 'inconsistent', stub.line, type(stub), instance['type'] else: stub_children = defaultdict(lambda: None, stub.names) instance_children = defaultdict(lambda: None, instance['names']) # TODO: I would rather not filter public children here. # For example, what if the checkersurfaces an inconsistency # in the typing of a private child public_nodes = { name: (stub_children[name], instance_children[name]) for name in set(stub_children) | set(instance_children) if not name.startswith('_') and (stub_children[name] is None or stub_children[name].module_public) } for node, (stub_child, instance_child) in public_nodes.items(): stub_child = getattr(stub_child, 'node', None) for identifiers, *error in verify(stub_child, instance_child): yield ([node] + identifiers, *error) @verify.register(nodes.TypeInfo) def verify_typeinfo(stub, instance): if not instance: yield [], 'not_in_runtime', stub.line, type(stub), None elif instance['type'] != 'class': yield [], 'inconsistent', stub.line, type(stub), instance['type'] else: for attr, attr_node in stub.names.items(): subdump = instance['attributes'].get(attr, None) for identifiers, *error in verify(attr_node.node, subdump): yield ([attr] + identifiers, *error) @verify.register(nodes.FuncItem) def verify_funcitem(stub, instance): if not instance: yield [], 'not_in_runtime', stub.line, type(stub), None elif 'type' not in instance or instance['type'] not in ('function', 'callable'): yield [], 'inconsistent', stub.line, type(stub), instance['type'] # TODO check arguments and return value @verify.register(type(None)) def verify_none(stub, instance): if instance is None: yield [], 'not_in_stub', None, None, None else: yield [], 'not_in_stub', instance['line'], None, instance['type'] @verify.register(nodes.Var) def verify_var(node, module_node): if False: yield None # Need to check if types are inconsistent. #if 'type' not in dump or dump['type'] != node.node.type: # import ipdb; ipdb.set_trace() # yield name, 'inconsistent', node.node.line, shed_type, module_type @verify.register(nodes.OverloadedFuncDef) def verify_overloadedfuncdef(node, module_node): # Should check types of the union of the overloaded types. if False: yield None @verify.register(nodes.TypeVarExpr) def verify_typevarexpr(node, module_node): if False: yield None @verify.register(nodes.Decorator) def verify_decorator(node, module_noode): if False: yield None def dump_module(name: str) -> Dict[str, Any]: mod = importlib.import_module(name) return {'type': 'file', 'names': dumpmodule.module_to_json(mod)} def build_stubs(mod): data_dir = default_data_dir(None) options = Options() options.python_version = (3, 6) lib_path = default_lib_path(data_dir, options.python_version, custom_typeshed_dir=None) sources = find_modules_recursive(mod, lib_path) try: res = build.build(sources=sources, options=options) messages = res.errors except CompileError as error: messages = error.messages if messages: for msg in messages: print(msg) sys.exit(1) return res.files def main(args): if len(args) == 1: print('must provide at least one module to test') sys.exit(1) else: modules = args[1:] for module in modules: for error in test_stub(module): yield error if __name__ == '__main__': for err in main(sys.argv): print(messages[err.error_type].format(error=err)) mypy-0.560/setup.cfg0000644€tŠÔÚ€2›s®0000000063013215007244020512 0ustar jukkaDROPBOX\Domain Users00000000000000[flake8] max-line-length = 99 exclude = build, @*, env, docs/build, out, .venv, .mypy_cache, .cache, docs/source/conf.py, misc/*, pinfer/*, scripts/*, test-data/*, typeshed/*, tmp-test-dirs/* ignore = E251,E128,F401,W601,E701,W503,E704,E402,B3,B006,B007 [coverage:run] branch = true source = mypy parallel = true [coverage:report] show_missing = true [egg_info] tag_build = tag_date = 0 mypy-0.560/setup.py0000644€tŠÔÚ€2›s®0000000717513215007205020413 0ustar jukkaDROPBOX\Domain Users00000000000000#!/usr/bin/env python import glob import os import os.path import sys if sys.version_info < (3, 4, 0): sys.stderr.write("ERROR: You need Python 3.4 or later to use mypy.\n") exit(1) # This requires setuptools when building; setuptools is not needed # when installing from a wheel file (though it is still neeeded for # alternative forms of installing, as suggested by README.md). from setuptools import setup from setuptools.command.build_py import build_py from mypy.version import base_version, __version__ from mypy import git git.verify_git_integrity_or_abort(".") if any(dist_arg in sys.argv[1:] for dist_arg in ('bdist_wheel', 'sdist')): version = base_version else: version = __version__ description = 'Optional static typing for Python' long_description = ''' Mypy -- Optional Static Typing for Python ========================================= Add type annotations to your Python programs, and use mypy to type check them. Mypy is essentially a Python linter on steroids, and it can catch many programming errors by analyzing your program, without actually having to run it. Mypy has a powerful type system with features such as type inference, gradual typing, generics and union types. '''.lstrip() def find_data_files(base, globs): """Find all interesting data files, for setup(data_files=) Arguments: root: The directory to search in. globs: A list of glob patterns to accept files. """ rv_dirs = [root for root, dirs, files in os.walk(base)] rv = [] for rv_dir in rv_dirs: files = [] for pat in globs: files += glob.glob(os.path.join(rv_dir, pat)) if not files: continue target = os.path.join('lib', 'mypy', rv_dir) rv.append((target, files)) return rv class CustomPythonBuild(build_py): def pin_version(self): path = os.path.join(self.build_lib, 'mypy') self.mkpath(path) with open(os.path.join(path, 'version.py'), 'w') as stream: stream.write('__version__ = "{}"\n'.format(version)) def run(self): self.execute(self.pin_version, ()) build_py.run(self) data_files = [] data_files += find_data_files('typeshed', ['*.py', '*.pyi']) data_files += find_data_files('xml', ['*.xsd', '*.xslt', '*.css']) classifiers = [ 'Development Status :: 3 - Alpha', 'Environment :: Console', 'Intended Audience :: Developers', 'License :: OSI Approved :: MIT License', 'Operating System :: POSIX', 'Programming Language :: Python :: 3', 'Programming Language :: Python :: 3.4', 'Programming Language :: Python :: 3.5', 'Programming Language :: Python :: 3.6', 'Topic :: Software Development', ] setup(name='mypy', version=version, description=description, long_description=long_description, author='Jukka Lehtosalo', author_email='jukka.lehtosalo@iki.fi', url='http://www.mypy-lang.org/', license='MIT License', platforms=['POSIX'], py_modules=[], packages=['mypy', 'mypy.test', 'mypy.myunit', 'mypy.server'], entry_points={'console_scripts': ['mypy=mypy.__main__:console_entry', 'stubgen=mypy.stubgen:main', 'dmypy=mypy.dmypy:main', ]}, data_files=data_files, classifiers=classifiers, cmdclass={'build_py': CustomPythonBuild}, install_requires = ['typed-ast >= 1.1.0, < 1.2.0', 'psutil >= 5.4.0, < 5.5.0', ], extras_require = { ':python_version < "3.5"': 'typing >= 3.5.3', }, ) mypy-0.560/test-data/0000755€tŠÔÚ€2›s®0000000000013215007242020556 5ustar jukkaDROPBOX\Domain Users00000000000000mypy-0.560/test-data/.flake80000644€tŠÔÚ€2›s®0000000172413215007205021734 0ustar jukkaDROPBOX\Domain Users00000000000000# Some PEP8 deviations are considered irrelevant to stub files: # (error counts as of 2016-12-19) # 17381 E704 multiple statements on one line (def) # 11840 E301 expected 1 blank line # 7467 E302 expected 2 blank lines # 1772 E501 line too long # 1487 F401 imported but unused # 1248 E701 multiple statements on one line (colon) # 427 F811 redefinition # 356 E305 expected 2 blank lines # Nice-to-haves ignored for now # 152 E128 continuation line under-indented for visual indent # 43 E127 continuation line over-indented for visual indent [flake8] ignore = F401, F811, E127, E128, E301, E302, E305, E501, E701, E704, B303 # We are checking with Python 3 but many of the stubs are Python 2 stubs. # A nice future improvement would be to provide separate .flake8 # configurations for Python 2 and Python 3 files. builtins = StandardError,apply,basestring,buffer,cmp,coerce,execfile,file,intern,long,raw_input,reduce,reload,unichr,unicode,xrange exclude = .venv*,@* mypy-0.560/test-data/samples/0000755€tŠÔÚ€2›s®0000000000013215007242022222 5ustar jukkaDROPBOX\Domain Users00000000000000mypy-0.560/test-data/samples/bottles.py0000644€tŠÔÚ€2›s®0000000046413215007205024253 0ustar jukkaDROPBOX\Domain Users00000000000000import typing REFRAIN = ''' %d bottles of beer on the wall, %d bottles of beer, take one down, pass it around, %d bottles of beer on the wall! ''' bottles_of_beer = 99 while bottles_of_beer > 1: print(REFRAIN % (bottles_of_beer, bottles_of_beer, bottles_of_beer - 1)) bottles_of_beer -= 1 mypy-0.560/test-data/samples/class.py0000644€tŠÔÚ€2›s®0000000066513215007205023707 0ustar jukkaDROPBOX\Domain Users00000000000000import typing class BankAccount(object): def __init__(self, initial_balance: int = 0) -> None: self.balance = initial_balance def deposit(self, amount: int) -> None: self.balance += amount def withdraw(self, amount: int) -> None: self.balance -= amount def overdrawn(self) -> bool: return self.balance < 0 my_account = BankAccount(15) my_account.withdraw(5) print(my_account.balance) mypy-0.560/test-data/samples/cmdline.py0000644€tŠÔÚ€2›s®0000000033613215007205024210 0ustar jukkaDROPBOX\Domain Users00000000000000# This program adds up integers in the command line import sys import typing try: total = sum(int(arg) for arg in sys.argv[1:]) print('sum =', total) except ValueError: print('Please supply integer arguments') mypy-0.560/test-data/samples/crawl.py0000644€tŠÔÚ€2›s®0000007655513215007205023725 0ustar jukkaDROPBOX\Domain Users00000000000000#!/usr/bin/env python3.4 """A simple web crawler.""" # This is cloned from /examples/crawl.py, # with type annotations added (PEP 484). # # TODO: convert to `async def` + `await` (PEP 492). import argparse import asyncio import cgi from http.client import BadStatusLine import logging import re import sys import time import urllib.parse from typing import Any, Generator, IO, Optional, Sequence, Set, Tuple, List, Dict ARGS = argparse.ArgumentParser(description="Web crawler") ARGS.add_argument( '--iocp', action='store_true', dest='iocp', default=False, help='Use IOCP event loop (Windows only)') ARGS.add_argument( '--select', action='store_true', dest='select', default=False, help='Use Select event loop instead of default') ARGS.add_argument( 'roots', nargs='*', default=[], help='Root URL (may be repeated)') ARGS.add_argument( '--max_redirect', action='store', type=int, metavar='N', default=10, help='Limit redirection chains (for 301, 302 etc.)') ARGS.add_argument( '--max_tries', action='store', type=int, metavar='N', default=4, help='Limit retries on network errors') ARGS.add_argument( '--max_tasks', action='store', type=int, metavar='N', default=100, help='Limit concurrent connections') ARGS.add_argument( '--max_pool', action='store', type=int, metavar='N', default=100, help='Limit connection pool size') ARGS.add_argument( '--exclude', action='store', metavar='REGEX', help='Exclude matching URLs') ARGS.add_argument( '--strict', action='store_true', default=True, help='Strict host matching (default)') ARGS.add_argument( '--lenient', action='store_false', dest='strict', default=False, help='Lenient host matching') ARGS.add_argument( '-v', '--verbose', action='count', dest='level', default=1, help='Verbose logging (repeat for more verbose)') ARGS.add_argument( '-q', '--quiet', action='store_const', const=0, dest='level', default=1, help='Quiet logging (opposite of --verbose)') ESCAPES = [('quot', '"'), ('gt', '>'), ('lt', '<'), ('amp', '&') # Must be last. ] def unescape(url: str) -> str: """Turn & into &, and so on. This is the inverse of cgi.escape(). """ for name, char in ESCAPES: url = url.replace('&' + name + ';', char) return url def fix_url(url: str) -> str: """Prefix a schema-less URL with http://.""" if '://' not in url: url = 'http://' + url return url class Logger: def __init__(self, level: int) -> None: self.level = level def _log(self, n: int, args: Sequence[Any]) -> None: if self.level >= n: print(*args, file=sys.stderr, flush=True) def log(self, n: int, *args: Any) -> None: self._log(n, args) def __call__(self, n: int, *args: Any) -> None: self._log(n, args) KeyTuple = Tuple[str, int, bool] class ConnectionPool: """A connection pool. To open a connection, use reserve(). To recycle it, use unreserve(). The pool is mostly just a mapping from (host, port, ssl) tuples to lists of Connections. The currently active connections are *not* in the data structure; get_connection() takes the connection out, and recycle_connection() puts it back in. To recycle a connection, call conn.close(recycle=True). There are limits to both the overall pool and the per-key pool. """ def __init__(self, log: Logger, max_pool: int = 10, max_tasks: int = 5) -> None: self.log = log self.max_pool = max_pool # Overall limit. self.max_tasks = max_tasks # Per-key limit. self.loop = asyncio.get_event_loop() self.connections = {} # type: Dict[KeyTuple, List[Connection]] self.queue = [] # type: List[Connection] def close(self) -> None: """Close all connections available for reuse.""" for conns in self.connections.values(): for conn in conns: conn.close() self.connections.clear() self.queue.clear() @asyncio.coroutine def get_connection(self, host: str, port: int, ssl: bool) -> Generator[Any, None, 'Connection']: """Create or reuse a connection.""" port = port or (443 if ssl else 80) try: ipaddrs = yield from self.loop.getaddrinfo(host, port) except Exception as exc: self.log(0, 'Exception %r for (%r, %r)' % (exc, host, port)) raise self.log(1, '* %s resolves to %s' % (host, ', '.join(ip[4][0] for ip in ipaddrs))) # Look for a reusable connection. for _1, _2, _3, _4, (h, p, *_5) in ipaddrs: key = h, p, ssl conn = None conns = self.connections.get(key) while conns: conn = conns.pop(0) self.queue.remove(conn) if not conns: del self.connections[key] if conn.stale(): self.log(1, 'closing stale connection for', key) conn.close() # Just in case. else: self.log(1, '* Reusing pooled connection', key, 'FD =', conn.fileno()) return conn # Create a new connection. conn = Connection(self.log, self, host, port, ssl) yield from conn.connect() self.log(1, '* New connection', conn.key, 'FD =', conn.fileno()) return conn def recycle_connection(self, conn: 'Connection') -> None: """Make a connection available for reuse. This also prunes the pool if it exceeds the size limits. """ if conn.stale(): conn.close() return key = conn.key conns = self.connections.setdefault(key, []) conns.append(conn) self.queue.append(conn) if len(conns) <= self.max_tasks and len(self.queue) <= self.max_pool: return # Prune the queue. # Close stale connections for this key first. stale = [conn for conn in conns if conn.stale()] if stale: for conn in stale: conns.remove(conn) self.queue.remove(conn) self.log(1, 'closing stale connection for', key) conn.close() if not conns: del self.connections[key] # Close oldest connection(s) for this key if limit reached. while len(conns) > self.max_tasks: conn = conns.pop(0) self.queue.remove(conn) self.log(1, 'closing oldest connection for', key) conn.close() if len(self.queue) <= self.max_pool: return # Close overall stale connections. stale = [conn for conn in self.queue if conn.stale()] if stale: for conn in stale: conns = self.connections.get(conn.key) conns.remove(conn) self.queue.remove(conn) self.log(1, 'closing stale connection for', key) conn.close() # Close oldest overall connection(s) if limit reached. while len(self.queue) > self.max_pool: conn = self.queue.pop(0) conns = self.connections.get(conn.key) c = conns.pop(0) assert conn == c, (conn.key, conn, c, conns) self.log(1, 'closing overall oldest connection for', conn.key) conn.close() class Connection: def __init__(self, log: Logger, pool: ConnectionPool, host: str, port: int, ssl: bool) -> None: self.log = log self.pool = pool self.host = host self.port = port self.ssl = ssl self.reader = None # type: asyncio.StreamReader self.writer = None # type: asyncio.StreamWriter self.key = None # type: KeyTuple def stale(self) -> bool: return self.reader is None or self.reader.at_eof() def fileno(self) -> Optional[int]: writer = self.writer if writer is not None: transport = writer.transport if transport is not None: sock = transport.get_extra_info('socket') if sock is not None: return sock.fileno() return None @asyncio.coroutine def connect(self) -> Generator[Any, None, None]: self.reader, self.writer = yield from asyncio.open_connection( self.host, self.port, ssl=self.ssl) peername = self.writer.get_extra_info('peername') if peername: self.host, self.port = peername[:2] else: self.log(1, 'NO PEERNAME???', self.host, self.port, self.ssl) self.key = self.host, self.port, self.ssl def close(self, recycle: bool = False) -> None: if recycle and not self.stale(): self.pool.recycle_connection(self) else: self.writer.close() self.pool = self.reader = self.writer = None class Request: """HTTP request. Use connect() to open a connection; send_request() to send the request; get_response() to receive the response headers. """ def __init__(self, log: Logger, url: str, pool: ConnectionPool) -> None: self.log = log self.url = url self.pool = pool self.parts = urllib.parse.urlparse(self.url) self.scheme = self.parts.scheme assert self.scheme in ('http', 'https'), repr(url) self.ssl = self.parts.scheme == 'https' self.netloc = self.parts.netloc self.hostname = self.parts.hostname self.port = self.parts.port or (443 if self.ssl else 80) self.path = (self.parts.path or '/') self.query = self.parts.query if self.query: self.full_path = '%s?%s' % (self.path, self.query) else: self.full_path = self.path self.http_version = 'HTTP/1.1' self.method = 'GET' self.headers = [] # type: List[Tuple[str, str]] self.conn = None # type: Connection @asyncio.coroutine def connect(self) -> Generator[Any, None, None]: """Open a connection to the server.""" self.log(1, '* Connecting to %s:%s using %s for %s' % (self.hostname, self.port, 'ssl' if self.ssl else 'tcp', self.url)) self.conn = yield from self.pool.get_connection(self.hostname, self.port, self.ssl) def close(self, recycle: bool = False) -> None: """Close the connection, recycle if requested.""" if self.conn is not None: if not recycle: self.log(1, 'closing connection for', self.conn.key) self.conn.close(recycle) self.conn = None @asyncio.coroutine def putline(self, line: str) -> None: """Write a line to the connection. Used for the request line and headers. """ self.log(2, '>', line) self.conn.writer.write(line.encode('latin-1') + b'\r\n') @asyncio.coroutine def send_request(self) -> Generator[Any, None, None]: """Send the request.""" request_line = '%s %s %s' % (self.method, self.full_path, self.http_version) yield from self.putline(request_line) # TODO: What if a header is already set? self.headers.append(('User-Agent', 'asyncio-example-crawl/0.0')) self.headers.append(('Host', self.netloc)) self.headers.append(('Accept', '*/*')) # self.headers.append(('Accept-Encoding', 'gzip')) for key, value in self.headers: line = '%s: %s' % (key, value) yield from self.putline(line) yield from self.putline('') @asyncio.coroutine def get_response(self) -> Generator[Any, None, 'Response']: """Receive the response.""" response = Response(self.log, self.conn.reader) yield from response.read_headers() return response class Response: """HTTP response. Call read_headers() to receive the request headers. Then check the status attribute and call get_header() to inspect the headers. Finally call read() to receive the body. """ def __init__(self, log: Logger, reader: asyncio.StreamReader) -> None: self.log = log self.reader = reader self.http_version = None # type: str # 'HTTP/1.1' self.status = None # type: int # 200 self.reason = None # type: str # 'Ok' self.headers = [] # type: List[Tuple[str, str]] # [('Content-Type', 'text/html')] @asyncio.coroutine def getline(self) -> Generator[Any, None, str]: """Read one line from the connection.""" line = (yield from self.reader.readline()).decode('latin-1').rstrip() self.log(2, '<', line) return line @asyncio.coroutine def read_headers(self) -> Generator[Any, None, None]: """Read the response status and the request headers.""" status_line = yield from self.getline() status_parts = status_line.split(None, 2) if len(status_parts) != 3: self.log(0, 'bad status_line', repr(status_line)) raise BadStatusLine(status_line) self.http_version, status, self.reason = status_parts self.status = int(status) while True: header_line = yield from self.getline() if not header_line: break # TODO: Continuation lines. key, value = header_line.split(':', 1) self.headers.append((key, value.strip())) def get_redirect_url(self, default: str = '') -> str: """Inspect the status and return the redirect url if appropriate.""" if self.status not in (300, 301, 302, 303, 307): return default return self.get_header('Location', default) def get_header(self, key: str, default: str = '') -> str: """Get one header value, using a case insensitive header name.""" key = key.lower() for k, v in self.headers: if k.lower() == key: return v return default @asyncio.coroutine def read(self) -> Generator[Any, None, bytes]: """Read the response body. This honors Content-Length and Transfer-Encoding: chunked. """ nbytes = None for key, value in self.headers: if key.lower() == 'content-length': nbytes = int(value) break if nbytes is None: if self.get_header('transfer-encoding').lower() == 'chunked': self.log(2, 'parsing chunked response') blocks = [] while True: size_header = yield from self.reader.readline() if not size_header: self.log(0, 'premature end of chunked response') break self.log(3, 'size_header =', repr(size_header)) parts = size_header.split(b';') size = int(parts[0], 16) if size: self.log(3, 'reading chunk of', size, 'bytes') block = yield from self.reader.readexactly(size) assert len(block) == size, (len(block), size) blocks.append(block) crlf = yield from self.reader.readline() assert crlf == b'\r\n', repr(crlf) if not size: break body = b''.join(blocks) self.log(1, 'chunked response had', len(body), 'bytes in', len(blocks), 'blocks') else: self.log(3, 'reading until EOF') body = yield from self.reader.read() # TODO: Should make sure not to recycle the connection # in this case. else: body = yield from self.reader.readexactly(nbytes) return body class Fetcher: """Logic and state for one URL. When found in crawler.busy, this represents a URL to be fetched or in the process of being fetched; when found in crawler.done, this holds the results from fetching it. This is usually associated with a task. This references the crawler for the connection pool and to add more URLs to its todo list. Call fetch() to do the fetching, then report() to print the results. """ def __init__(self, log: Logger, url: str, crawler: 'Crawler', max_redirect: int = 10, max_tries: int = 4) -> None: self.log = log self.url = url self.crawler = crawler # We don't loop resolving redirects here -- we just use this # to decide whether to add the redirect URL to crawler.todo. self.max_redirect = max_redirect # But we do loop to retry on errors a few times. self.max_tries = max_tries # Everything we collect from the response goes here. self.task = None # type: asyncio.Task self.exceptions = [] # type: List[Exception] self.tries = 0 self.request = None # type: Request self.response = None # type: Response self.body = None # type: bytes self.next_url = None # type: str self.ctype = None # type: str self.pdict = None # type: Dict[str, str] self.encoding = None # type: str self.urls = None # type: Set[str] self.new_urls = None # type: Set[str] @asyncio.coroutine def fetch(self) -> Generator[Any, None, None]: """Attempt to fetch the contents of the URL. If successful, and the data is HTML, extract further links and add them to the crawler. Redirects are also added back there. """ while self.tries < self.max_tries: self.tries += 1 self.request = None try: self.request = Request(self.log, self.url, self.crawler.pool) yield from self.request.connect() yield from self.request.send_request() self.response = yield from self.request.get_response() self.body = yield from self.response.read() h_conn = self.response.get_header('connection').lower() if h_conn != 'close': self.request.close(recycle=True) self.request = None if self.tries > 1: self.log(1, 'try', self.tries, 'for', self.url, 'success') break except (BadStatusLine, OSError) as exc: self.exceptions.append(exc) self.log(1, 'try', self.tries, 'for', self.url, 'raised', repr(exc)) # import pdb; pdb.set_trace() # Don't reuse the connection in this case. finally: if self.request is not None: self.request.close() else: # We never broke out of the while loop, i.e. all tries failed. self.log(0, 'no success for', self.url, 'in', self.max_tries, 'tries') return next_url = self.response.get_redirect_url() if next_url: self.next_url = urllib.parse.urljoin(self.url, next_url) if self.max_redirect > 0: self.log(1, 'redirect to', self.next_url, 'from', self.url) self.crawler.add_url(self.next_url, self.max_redirect - 1) else: self.log(0, 'redirect limit reached for', self.next_url, 'from', self.url) else: if self.response.status == 200: self.ctype = self.response.get_header('content-type') self.pdict = {} if self.ctype: self.ctype, self.pdict = cgi.parse_header(self.ctype) self.encoding = self.pdict.get('charset', 'utf-8') if self.ctype == 'text/html': body = self.body.decode(self.encoding, 'replace') # Replace href with (?:href|src) to follow image links. self.urls = set(re.findall(r'(?i)href=["\']?([^\s"\'<>]+)', body)) if self.urls: self.log(1, 'got', len(self.urls), 'distinct urls from', self.url) self.new_urls = set() for url in self.urls: url = unescape(url) url = urllib.parse.urljoin(self.url, url) url, frag = urllib.parse.urldefrag(url) if self.crawler.add_url(url): self.new_urls.add(url) def report(self, stats: 'Stats', file: IO[str] = None) -> None: """Print a report on the state for this URL. Also update the Stats instance. """ if self.task is not None: if not self.task.done(): stats.add('pending') print(self.url, 'pending', file=file) return elif self.task.cancelled(): stats.add('cancelled') print(self.url, 'cancelled', file=file) return elif self.task.exception(): stats.add('exception') exc = self.task.exception() stats.add('exception_' + exc.__class__.__name__) print(self.url, exc, file=file) return if len(self.exceptions) == self.tries: stats.add('fail') exc = self.exceptions[-1] stats.add('fail_' + str(exc.__class__.__name__)) print(self.url, 'error', exc, file=file) elif self.next_url: stats.add('redirect') print(self.url, self.response.status, 'redirect', self.next_url, file=file) elif self.ctype == 'text/html': stats.add('html') size = len(self.body or b'') stats.add('html_bytes', size) if self.log.level: print(self.url, self.response.status, self.ctype, self.encoding, size, '%d/%d' % (len(self.new_urls or ()), len(self.urls or ())), file=file) elif self.response is None: print(self.url, 'no response object') else: size = len(self.body or b'') if self.response.status == 200: stats.add('other') stats.add('other_bytes', size) else: stats.add('error') stats.add('error_bytes', size) stats.add('status_%s' % self.response.status) print(self.url, self.response.status, self.ctype, self.encoding, size, file=file) class Stats: """Record stats of various sorts.""" def __init__(self) -> None: self.stats = {} # type: Dict[str, int] def add(self, key: str, count: int = 1) -> None: self.stats[key] = self.stats.get(key, 0) + count def report(self, file: IO[str] = None) -> None: for key, count in sorted(self.stats.items()): print('%10d' % count, key, file=file) class Crawler: """Crawl a set of URLs. This manages three disjoint sets of URLs (todo, busy, done). The data structures actually store dicts -- the values in todo give the redirect limit, while the values in busy and done are Fetcher instances. """ def __init__(self, log: Logger, roots: Set[str], exclude: str = None, strict: bool = True, # What to crawl. max_redirect: int = 10, max_tries: int = 4, # Per-url limits. max_tasks: int = 10, max_pool: int = 10, # Global limits. ) -> None: self.log = log self.roots = roots self.exclude = exclude self.strict = strict self.max_redirect = max_redirect self.max_tries = max_tries self.max_tasks = max_tasks self.max_pool = max_pool self.todo = {} # type: Dict[str, int] self.busy = {} # type: Dict[str, Fetcher] self.done = {} # type: Dict[str, Fetcher] self.pool = ConnectionPool(self.log, max_pool, max_tasks) self.root_domains = set() # type: Set[str] for root in roots: host = urllib.parse.urlparse(root).hostname if not host: continue if re.match(r'\A[\d\.]*\Z', host): self.root_domains.add(host) else: host = host.lower() if self.strict: self.root_domains.add(host) if host.startswith('www.'): self.root_domains.add(host[4:]) else: self.root_domains.add('www.' + host) else: parts = host.split('.') if len(parts) > 2: host = '.'.join(parts[-2:]) self.root_domains.add(host) for root in roots: self.add_url(root) self.governor = asyncio.Semaphore(max_tasks) self.termination = asyncio.Condition() self.t0 = time.time() self.t1 = None # type: Optional[float] def close(self) -> None: """Close resources (currently only the pool).""" self.pool.close() def host_okay(self, host: str) -> bool: """Check if a host should be crawled. A literal match (after lowercasing) is always good. For hosts that don't look like IP addresses, some approximate matches are okay depending on the strict flag. """ host = host.lower() if host in self.root_domains: return True if re.match(r'\A[\d\.]*\Z', host): return False if self.strict: return self._host_okay_strictish(host) else: return self._host_okay_lenient(host) def _host_okay_strictish(self, host: str) -> bool: """Check if a host should be crawled, strict-ish version. This checks for equality modulo an initial 'www.' component. """ if host.startswith('www.'): if host[4:] in self.root_domains: return True else: if 'www.' + host in self.root_domains: return True return False def _host_okay_lenient(self, host: str) -> bool: """Check if a host should be crawled, lenient version. This compares the last two components of the host. """ parts = host.split('.') if len(parts) > 2: host = '.'.join(parts[-2:]) return host in self.root_domains def add_url(self, url: str, max_redirect: int = None) -> bool: """Add a URL to the todo list if not seen before.""" if self.exclude and re.search(self.exclude, url): return False parsed = urllib.parse.urlparse(url) if parsed.scheme not in ('http', 'https'): self.log(2, 'skipping non-http scheme in', url) return False host = parsed.hostname if not self.host_okay(host): self.log(2, 'skipping non-root host in', url) return False if max_redirect is None: max_redirect = self.max_redirect if url in self.todo or url in self.busy or url in self.done: return False self.log(1, 'adding', url, max_redirect) self.todo[url] = max_redirect return True @asyncio.coroutine def crawl(self) -> Generator[Any, None, None]: """Run the crawler until all finished.""" with (yield from self.termination): while self.todo or self.busy: if self.todo: url, max_redirect = self.todo.popitem() fetcher = Fetcher(self.log, url, crawler=self, max_redirect=max_redirect, max_tries=self.max_tries, ) self.busy[url] = fetcher fetcher.task = asyncio.Task(self.fetch(fetcher)) else: yield from self.termination.wait() self.t1 = time.time() @asyncio.coroutine def fetch(self, fetcher: Fetcher) -> Generator[Any, None, None]: """Call the Fetcher's fetch(), with a limit on concurrency. Once this returns, move the fetcher from busy to done. """ url = fetcher.url with (yield from self.governor): try: yield from fetcher.fetch() # Fetcher gonna fetch. finally: # Force GC of the task, so the error is logged. fetcher.task = None with (yield from self.termination): self.done[url] = fetcher del self.busy[url] self.termination.notify() def report(self, file: IO[str] = None) -> None: """Print a report on all completed URLs.""" if self.t1 is None: self.t1 = time.time() dt = self.t1 - self.t0 if dt and self.max_tasks: speed = len(self.done) / dt / self.max_tasks else: speed = 0 stats = Stats() print('*** Report ***', file=file) try: show = [] # type: List[Tuple[str, Fetcher]] show.extend(self.done.items()) show.extend(self.busy.items()) show.sort() for url, fetcher in show: fetcher.report(stats, file=file) except KeyboardInterrupt: print('\nInterrupted', file=file) print('Finished', len(self.done), 'urls in %.3f secs' % dt, '(max_tasks=%d)' % self.max_tasks, '(%.3f urls/sec/task)' % speed, file=file) stats.report(file=file) print('Todo:', len(self.todo), file=file) print('Busy:', len(self.busy), file=file) print('Done:', len(self.done), file=file) print('Date:', time.ctime(), 'local time', file=file) def main() -> None: """Main program. Parse arguments, set up event loop, run crawler, print report. """ args = ARGS.parse_args() if not args.roots: print('Use --help for command line help') return log = Logger(args.level) if args.iocp: if sys.platform == 'win32': from asyncio import ProactorEventLoop loop = ProactorEventLoop() # type: ignore asyncio.set_event_loop(loop) else: assert False elif args.select: loop = asyncio.SelectorEventLoop() # type: ignore asyncio.set_event_loop(loop) else: loop = asyncio.get_event_loop() roots = {fix_url(root) for root in args.roots} crawler = Crawler(log, roots, exclude=args.exclude, strict=args.strict, max_redirect=args.max_redirect, max_tries=args.max_tries, max_tasks=args.max_tasks, max_pool=args.max_pool, ) try: loop.run_until_complete(crawler.crawl()) # Crawler gonna crawl. except KeyboardInterrupt: sys.stderr.flush() print('\nInterrupted\n') finally: crawler.report() crawler.close() loop.close() if __name__ == '__main__': logging.basicConfig(level=logging.INFO) # type: ignore main() mypy-0.560/test-data/samples/crawl2.py0000644€tŠÔÚ€2›s®0000007553113215007205024000 0ustar jukkaDROPBOX\Domain Users00000000000000#!/usr/bin/env python3.4 """A simple web crawler.""" # This is cloned from /examples/crawl.py, # with type annotations added (PEP 484). # # This version (crawl2.) has also been converted to use `async def` + # `await` (PEP 492). import argparse import asyncio import cgi from http.client import BadStatusLine import logging import re import sys import time import urllib.parse from typing import Any, Awaitable, IO, Optional, Sequence, Set, Tuple, List, Dict ARGS = argparse.ArgumentParser(description="Web crawler") ARGS.add_argument( '--iocp', action='store_true', dest='iocp', default=False, help='Use IOCP event loop (Windows only)') ARGS.add_argument( '--select', action='store_true', dest='select', default=False, help='Use Select event loop instead of default') ARGS.add_argument( 'roots', nargs='*', default=[], help='Root URL (may be repeated)') ARGS.add_argument( '--max_redirect', action='store', type=int, metavar='N', default=10, help='Limit redirection chains (for 301, 302 etc.)') ARGS.add_argument( '--max_tries', action='store', type=int, metavar='N', default=4, help='Limit retries on network errors') ARGS.add_argument( '--max_tasks', action='store', type=int, metavar='N', default=100, help='Limit concurrent connections') ARGS.add_argument( '--max_pool', action='store', type=int, metavar='N', default=100, help='Limit connection pool size') ARGS.add_argument( '--exclude', action='store', metavar='REGEX', help='Exclude matching URLs') ARGS.add_argument( '--strict', action='store_true', default=True, help='Strict host matching (default)') ARGS.add_argument( '--lenient', action='store_false', dest='strict', default=False, help='Lenient host matching') ARGS.add_argument( '-v', '--verbose', action='count', dest='level', default=1, help='Verbose logging (repeat for more verbose)') ARGS.add_argument( '-q', '--quiet', action='store_const', const=0, dest='level', default=1, help='Quiet logging (opposite of --verbose)') ESCAPES = [('quot', '"'), ('gt', '>'), ('lt', '<'), ('amp', '&') # Must be last. ] def unescape(url: str) -> str: """Turn & into &, and so on. This is the inverse of cgi.escape(). """ for name, char in ESCAPES: url = url.replace('&' + name + ';', char) return url def fix_url(url: str) -> str: """Prefix a schema-less URL with http://.""" if '://' not in url: url = 'http://' + url return url class Logger: def __init__(self, level: int) -> None: self.level = level def _log(self, n: int, args: Sequence[Any]) -> None: if self.level >= n: print(*args, file=sys.stderr, flush=True) def log(self, n: int, *args: Any) -> None: self._log(n, args) def __call__(self, n: int, *args: Any) -> None: self._log(n, args) KeyTuple = Tuple[str, int, bool] class ConnectionPool: """A connection pool. To open a connection, use reserve(). To recycle it, use unreserve(). The pool is mostly just a mapping from (host, port, ssl) tuples to lists of Connections. The currently active connections are *not* in the data structure; get_connection() takes the connection out, and recycle_connection() puts it back in. To recycle a connection, call conn.close(recycle=True). There are limits to both the overall pool and the per-key pool. """ def __init__(self, log: Logger, max_pool: int = 10, max_tasks: int = 5) -> None: self.log = log self.max_pool = max_pool # Overall limit. self.max_tasks = max_tasks # Per-key limit. self.loop = asyncio.get_event_loop() self.connections = {} # type: Dict[KeyTuple, List[Connection]] self.queue = [] # type: List[Connection] def close(self) -> None: """Close all connections available for reuse.""" for conns in self.connections.values(): for conn in conns: conn.close() self.connections.clear() self.queue.clear() async def get_connection(self, host: str, port: int, ssl: bool) -> 'Connection': """Create or reuse a connection.""" port = port or (443 if ssl else 80) try: ipaddrs = await self.loop.getaddrinfo(host, port) except Exception as exc: self.log(0, 'Exception %r for (%r, %r)' % (exc, host, port)) raise self.log(1, '* %s resolves to %s' % (host, ', '.join(ip[4][0] for ip in ipaddrs))) # Look for a reusable connection. for _1, _2, _3, _4, (h, p, *_5) in ipaddrs: key = h, p, ssl conn = None conns = self.connections.get(key) while conns: conn = conns.pop(0) self.queue.remove(conn) if not conns: del self.connections[key] if conn.stale(): self.log(1, 'closing stale connection for', key) conn.close() # Just in case. else: self.log(1, '* Reusing pooled connection', key, 'FD =', conn.fileno()) return conn # Create a new connection. conn = Connection(self.log, self, host, port, ssl) await conn.connect() self.log(1, '* New connection', conn.key, 'FD =', conn.fileno()) return conn def recycle_connection(self, conn: 'Connection') -> None: """Make a connection available for reuse. This also prunes the pool if it exceeds the size limits. """ if conn.stale(): conn.close() return key = conn.key conns = self.connections.setdefault(key, []) conns.append(conn) self.queue.append(conn) if len(conns) <= self.max_tasks and len(self.queue) <= self.max_pool: return # Prune the queue. # Close stale connections for this key first. stale = [conn for conn in conns if conn.stale()] if stale: for conn in stale: conns.remove(conn) self.queue.remove(conn) self.log(1, 'closing stale connection for', key) conn.close() if not conns: del self.connections[key] # Close oldest connection(s) for this key if limit reached. while len(conns) > self.max_tasks: conn = conns.pop(0) self.queue.remove(conn) self.log(1, 'closing oldest connection for', key) conn.close() if len(self.queue) <= self.max_pool: return # Close overall stale connections. stale = [conn for conn in self.queue if conn.stale()] if stale: for conn in stale: conns = self.connections.get(conn.key) conns.remove(conn) self.queue.remove(conn) self.log(1, 'closing stale connection for', key) conn.close() # Close oldest overall connection(s) if limit reached. while len(self.queue) > self.max_pool: conn = self.queue.pop(0) conns = self.connections.get(conn.key) c = conns.pop(0) assert conn == c, (conn.key, conn, c, conns) self.log(1, 'closing overall oldest connection for', conn.key) conn.close() class Connection: def __init__(self, log: Logger, pool: ConnectionPool, host: str, port: int, ssl: bool) -> None: self.log = log self.pool = pool self.host = host self.port = port self.ssl = ssl self.reader = None # type: asyncio.StreamReader self.writer = None # type: asyncio.StreamWriter self.key = None # type: KeyTuple def stale(self) -> bool: return self.reader is None or self.reader.at_eof() def fileno(self) -> Optional[int]: writer = self.writer if writer is not None: transport = writer.transport if transport is not None: sock = transport.get_extra_info('socket') if sock is not None: return sock.fileno() return None async def connect(self) -> None: self.reader, self.writer = await asyncio.open_connection( self.host, self.port, ssl=self.ssl) peername = self.writer.get_extra_info('peername') if peername: self.host, self.port = peername[:2] else: self.log(1, 'NO PEERNAME???', self.host, self.port, self.ssl) self.key = self.host, self.port, self.ssl def close(self, recycle: bool = False) -> None: if recycle and not self.stale(): self.pool.recycle_connection(self) else: self.writer.close() self.pool = self.reader = self.writer = None class Request: """HTTP request. Use connect() to open a connection; send_request() to send the request; get_response() to receive the response headers. """ def __init__(self, log: Logger, url: str, pool: ConnectionPool) -> None: self.log = log self.url = url self.pool = pool self.parts = urllib.parse.urlparse(self.url) self.scheme = self.parts.scheme assert self.scheme in ('http', 'https'), repr(url) self.ssl = self.parts.scheme == 'https' self.netloc = self.parts.netloc self.hostname = self.parts.hostname self.port = self.parts.port or (443 if self.ssl else 80) self.path = (self.parts.path or '/') self.query = self.parts.query if self.query: self.full_path = '%s?%s' % (self.path, self.query) else: self.full_path = self.path self.http_version = 'HTTP/1.1' self.method = 'GET' self.headers = [] # type: List[Tuple[str, str]] self.conn = None # type: Connection async def connect(self) -> None: """Open a connection to the server.""" self.log(1, '* Connecting to %s:%s using %s for %s' % (self.hostname, self.port, 'ssl' if self.ssl else 'tcp', self.url)) self.conn = await self.pool.get_connection(self.hostname, self.port, self.ssl) def close(self, recycle: bool = False) -> None: """Close the connection, recycle if requested.""" if self.conn is not None: if not recycle: self.log(1, 'closing connection for', self.conn.key) self.conn.close(recycle) self.conn = None async def putline(self, line: str) -> None: """Write a line to the connection. Used for the request line and headers. """ self.log(2, '>', line) self.conn.writer.write(line.encode('latin-1') + b'\r\n') async def send_request(self) -> None: """Send the request.""" request_line = '%s %s %s' % (self.method, self.full_path, self.http_version) await self.putline(request_line) # TODO: What if a header is already set? self.headers.append(('User-Agent', 'asyncio-example-crawl/0.0')) self.headers.append(('Host', self.netloc)) self.headers.append(('Accept', '*/*')) # self.headers.append(('Accept-Encoding', 'gzip')) for key, value in self.headers: line = '%s: %s' % (key, value) await self.putline(line) await self.putline('') async def get_response(self) -> 'Response': """Receive the response.""" response = Response(self.log, self.conn.reader) await response.read_headers() return response class Response: """HTTP response. Call read_headers() to receive the request headers. Then check the status attribute and call get_header() to inspect the headers. Finally call read() to receive the body. """ def __init__(self, log: Logger, reader: asyncio.StreamReader) -> None: self.log = log self.reader = reader self.http_version = None # type: str # 'HTTP/1.1' self.status = None # type: int # 200 self.reason = None # type: str # 'Ok' self.headers = [] # type: List[Tuple[str, str]] # [('Content-Type', 'text/html')] async def getline(self) -> str: """Read one line from the connection.""" line = (await self.reader.readline()).decode('latin-1').rstrip() self.log(2, '<', line) return line async def read_headers(self) -> None: """Read the response status and the request headers.""" status_line = await self.getline() status_parts = status_line.split(None, 2) if len(status_parts) != 3: self.log(0, 'bad status_line', repr(status_line)) raise BadStatusLine(status_line) self.http_version, status, self.reason = status_parts self.status = int(status) while True: header_line = await self.getline() if not header_line: break # TODO: Continuation lines. key, value = header_line.split(':', 1) self.headers.append((key, value.strip())) def get_redirect_url(self, default: str = '') -> str: """Inspect the status and return the redirect url if appropriate.""" if self.status not in (300, 301, 302, 303, 307): return default return self.get_header('Location', default) def get_header(self, key: str, default: str = '') -> str: """Get one header value, using a case insensitive header name.""" key = key.lower() for k, v in self.headers: if k.lower() == key: return v return default async def read(self) -> bytes: """Read the response body. This honors Content-Length and Transfer-Encoding: chunked. """ nbytes = None for key, value in self.headers: if key.lower() == 'content-length': nbytes = int(value) break if nbytes is None: if self.get_header('transfer-encoding').lower() == 'chunked': self.log(2, 'parsing chunked response') blocks = [] while True: size_header = await self.reader.readline() if not size_header: self.log(0, 'premature end of chunked response') break self.log(3, 'size_header =', repr(size_header)) parts = size_header.split(b';') size = int(parts[0], 16) if size: self.log(3, 'reading chunk of', size, 'bytes') block = await self.reader.readexactly(size) assert len(block) == size, (len(block), size) blocks.append(block) crlf = await self.reader.readline() assert crlf == b'\r\n', repr(crlf) if not size: break body = b''.join(blocks) self.log(1, 'chunked response had', len(body), 'bytes in', len(blocks), 'blocks') else: self.log(3, 'reading until EOF') body = await self.reader.read() # TODO: Should make sure not to recycle the connection # in this case. else: body = await self.reader.readexactly(nbytes) return body class Fetcher: """Logic and state for one URL. When found in crawler.busy, this represents a URL to be fetched or in the process of being fetched; when found in crawler.done, this holds the results from fetching it. This is usually associated with a task. This references the crawler for the connection pool and to add more URLs to its todo list. Call fetch() to do the fetching, then report() to print the results. """ def __init__(self, log: Logger, url: str, crawler: 'Crawler', max_redirect: int = 10, max_tries: int = 4) -> None: self.log = log self.url = url self.crawler = crawler # We don't loop resolving redirects here -- we just use this # to decide whether to add the redirect URL to crawler.todo. self.max_redirect = max_redirect # But we do loop to retry on errors a few times. self.max_tries = max_tries # Everything we collect from the response goes here. self.task = None # type: asyncio.Task self.exceptions = [] # type: List[Exception] self.tries = 0 self.request = None # type: Request self.response = None # type: Response self.body = None # type: bytes self.next_url = None # type: str self.ctype = None # type: str self.pdict = None # type: Dict[str, str] self.encoding = None # type: str self.urls = None # type: Set[str] self.new_urls = None # type: Set[str] async def fetch(self) -> None: """Attempt to fetch the contents of the URL. If successful, and the data is HTML, extract further links and add them to the crawler. Redirects are also added back there. """ while self.tries < self.max_tries: self.tries += 1 self.request = None try: self.request = Request(self.log, self.url, self.crawler.pool) await self.request.connect() await self.request.send_request() self.response = await self.request.get_response() self.body = await self.response.read() h_conn = self.response.get_header('connection').lower() if h_conn != 'close': self.request.close(recycle=True) self.request = None if self.tries > 1: self.log(1, 'try', self.tries, 'for', self.url, 'success') break except (BadStatusLine, OSError) as exc: self.exceptions.append(exc) self.log(1, 'try', self.tries, 'for', self.url, 'raised', repr(exc)) # import pdb; pdb.set_trace() # Don't reuse the connection in this case. finally: if self.request is not None: self.request.close() else: # We never broke out of the while loop, i.e. all tries failed. self.log(0, 'no success for', self.url, 'in', self.max_tries, 'tries') return next_url = self.response.get_redirect_url() if next_url: self.next_url = urllib.parse.urljoin(self.url, next_url) if self.max_redirect > 0: self.log(1, 'redirect to', self.next_url, 'from', self.url) self.crawler.add_url(self.next_url, self.max_redirect - 1) else: self.log(0, 'redirect limit reached for', self.next_url, 'from', self.url) else: if self.response.status == 200: self.ctype = self.response.get_header('content-type') self.pdict = {} if self.ctype: self.ctype, self.pdict = cgi.parse_header(self.ctype) self.encoding = self.pdict.get('charset', 'utf-8') if self.ctype == 'text/html': body = self.body.decode(self.encoding, 'replace') # Replace href with (?:href|src) to follow image links. self.urls = set(re.findall(r'(?i)href=["\']?([^\s"\'<>]+)', body)) if self.urls: self.log(1, 'got', len(self.urls), 'distinct urls from', self.url) self.new_urls = set() for url in self.urls: url = unescape(url) url = urllib.parse.urljoin(self.url, url) url, frag = urllib.parse.urldefrag(url) if self.crawler.add_url(url): self.new_urls.add(url) def report(self, stats: 'Stats', file: IO[str] = None) -> None: """Print a report on the state for this URL. Also update the Stats instance. """ if self.task is not None: if not self.task.done(): stats.add('pending') print(self.url, 'pending', file=file) return elif self.task.cancelled(): stats.add('cancelled') print(self.url, 'cancelled', file=file) return elif self.task.exception(): stats.add('exception') exc = self.task.exception() stats.add('exception_' + exc.__class__.__name__) print(self.url, exc, file=file) return if len(self.exceptions) == self.tries: stats.add('fail') exc = self.exceptions[-1] stats.add('fail_' + str(exc.__class__.__name__)) print(self.url, 'error', exc, file=file) elif self.next_url: stats.add('redirect') print(self.url, self.response.status, 'redirect', self.next_url, file=file) elif self.ctype == 'text/html': stats.add('html') size = len(self.body or b'') stats.add('html_bytes', size) if self.log.level: print(self.url, self.response.status, self.ctype, self.encoding, size, '%d/%d' % (len(self.new_urls or ()), len(self.urls or ())), file=file) elif self.response is None: print(self.url, 'no response object') else: size = len(self.body or b'') if self.response.status == 200: stats.add('other') stats.add('other_bytes', size) else: stats.add('error') stats.add('error_bytes', size) stats.add('status_%s' % self.response.status) print(self.url, self.response.status, self.ctype, self.encoding, size, file=file) class Stats: """Record stats of various sorts.""" def __init__(self) -> None: self.stats = {} # type: Dict[str, int] def add(self, key: str, count: int = 1) -> None: self.stats[key] = self.stats.get(key, 0) + count def report(self, file: IO[str] = None) -> None: for key, count in sorted(self.stats.items()): print('%10d' % count, key, file=file) class Crawler: """Crawl a set of URLs. This manages three disjoint sets of URLs (todo, busy, done). The data structures actually store dicts -- the values in todo give the redirect limit, while the values in busy and done are Fetcher instances. """ def __init__(self, log: Logger, roots: Set[str], exclude: str = None, strict: bool = True, # What to crawl. max_redirect: int = 10, max_tries: int = 4, # Per-url limits. max_tasks: int = 10, max_pool: int = 10, # Global limits. ) -> None: self.log = log self.roots = roots self.exclude = exclude self.strict = strict self.max_redirect = max_redirect self.max_tries = max_tries self.max_tasks = max_tasks self.max_pool = max_pool self.todo = {} # type: Dict[str, int] self.busy = {} # type: Dict[str, Fetcher] self.done = {} # type: Dict[str, Fetcher] self.pool = ConnectionPool(self.log, max_pool, max_tasks) self.root_domains = set() # type: Set[str] for root in roots: host = urllib.parse.urlparse(root).hostname if not host: continue if re.match(r'\A[\d\.]*\Z', host): self.root_domains.add(host) else: host = host.lower() if self.strict: self.root_domains.add(host) if host.startswith('www.'): self.root_domains.add(host[4:]) else: self.root_domains.add('www.' + host) else: parts = host.split('.') if len(parts) > 2: host = '.'.join(parts[-2:]) self.root_domains.add(host) for root in roots: self.add_url(root) self.governor = asyncio.Semaphore(max_tasks) self.termination = asyncio.Condition() self.t0 = time.time() self.t1 = None # type: Optional[float] def close(self) -> None: """Close resources (currently only the pool).""" self.pool.close() def host_okay(self, host: str) -> bool: """Check if a host should be crawled. A literal match (after lowercasing) is always good. For hosts that don't look like IP addresses, some approximate matches are okay depending on the strict flag. """ host = host.lower() if host in self.root_domains: return True if re.match(r'\A[\d\.]*\Z', host): return False if self.strict: return self._host_okay_strictish(host) else: return self._host_okay_lenient(host) def _host_okay_strictish(self, host: str) -> bool: """Check if a host should be crawled, strict-ish version. This checks for equality modulo an initial 'www.' component. """ if host.startswith('www.'): if host[4:] in self.root_domains: return True else: if 'www.' + host in self.root_domains: return True return False def _host_okay_lenient(self, host: str) -> bool: """Check if a host should be crawled, lenient version. This compares the last two components of the host. """ parts = host.split('.') if len(parts) > 2: host = '.'.join(parts[-2:]) return host in self.root_domains def add_url(self, url: str, max_redirect: int = None) -> bool: """Add a URL to the todo list if not seen before.""" if self.exclude and re.search(self.exclude, url): return False parsed = urllib.parse.urlparse(url) if parsed.scheme not in ('http', 'https'): self.log(2, 'skipping non-http scheme in', url) return False host = parsed.hostname if not self.host_okay(host): self.log(2, 'skipping non-root host in', url) return False if max_redirect is None: max_redirect = self.max_redirect if url in self.todo or url in self.busy or url in self.done: return False self.log(1, 'adding', url, max_redirect) self.todo[url] = max_redirect return True async def crawl(self) -> None: """Run the crawler until all finished.""" with (await self.termination): while self.todo or self.busy: if self.todo: url, max_redirect = self.todo.popitem() fetcher = Fetcher(self.log, url, crawler=self, max_redirect=max_redirect, max_tries=self.max_tries, ) self.busy[url] = fetcher fetcher.task = asyncio.Task(self.fetch(fetcher)) else: await self.termination.wait() self.t1 = time.time() async def fetch(self, fetcher: Fetcher) -> None: """Call the Fetcher's fetch(), with a limit on concurrency. Once this returns, move the fetcher from busy to done. """ url = fetcher.url with (await self.governor): try: await fetcher.fetch() # Fetcher gonna fetch. finally: # Force GC of the task, so the error is logged. fetcher.task = None with (await self.termination): self.done[url] = fetcher del self.busy[url] self.termination.notify() def report(self, file: IO[str] = None) -> None: """Print a report on all completed URLs.""" if self.t1 is None: self.t1 = time.time() dt = self.t1 - self.t0 if dt and self.max_tasks: speed = len(self.done) / dt / self.max_tasks else: speed = 0 stats = Stats() print('*** Report ***', file=file) try: show = [] # type: List[Tuple[str, Fetcher]] show.extend(self.done.items()) show.extend(self.busy.items()) show.sort() for url, fetcher in show: fetcher.report(stats, file=file) except KeyboardInterrupt: print('\nInterrupted', file=file) print('Finished', len(self.done), 'urls in %.3f secs' % dt, '(max_tasks=%d)' % self.max_tasks, '(%.3f urls/sec/task)' % speed, file=file) stats.report(file=file) print('Todo:', len(self.todo), file=file) print('Busy:', len(self.busy), file=file) print('Done:', len(self.done), file=file) print('Date:', time.ctime(), 'local time', file=file) def main() -> None: """Main program. Parse arguments, set up event loop, run crawler, print report. """ args = ARGS.parse_args() if not args.roots: print('Use --help for command line help') return log = Logger(args.level) if args.iocp: if sys.platform == 'win32': from asyncio import ProactorEventLoop loop = ProactorEventLoop() # type: ignore asyncio.set_event_loop(loop) else: assert False elif args.select: loop = asyncio.SelectorEventLoop() # type: ignore asyncio.set_event_loop(loop) else: loop = asyncio.get_event_loop() roots = {fix_url(root) for root in args.roots} crawler = Crawler(log, roots, exclude=args.exclude, strict=args.strict, max_redirect=args.max_redirect, max_tries=args.max_tries, max_tasks=args.max_tasks, max_pool=args.max_pool, ) try: loop.run_until_complete(crawler.crawl()) # Crawler gonna crawl. except KeyboardInterrupt: sys.stderr.flush() print('\nInterrupted\n') finally: crawler.report() crawler.close() loop.close() if __name__ == '__main__': logging.basicConfig(level=logging.INFO) # type: ignore main() mypy-0.560/test-data/samples/dict.py0000644€tŠÔÚ€2›s®0000000037213215007205023520 0ustar jukkaDROPBOX\Domain Users00000000000000import typing prices = {'apple': 0.40, 'banana': 0.50} my_purchase = { 'apple': 1, 'banana': 6} grocery_bill = sum(prices[fruit] * my_purchase[fruit] for fruit in my_purchase) print('I owe the grocer $%.2f' % grocery_bill) mypy-0.560/test-data/samples/fib.py0000644€tŠÔÚ€2›s®0000000024713215007205023336 0ustar jukkaDROPBOX\Domain Users00000000000000import typing parents, babies = (1, 1) while babies < 100: print('This generation has {0} babies'.format(babies)) parents, babies = (babies, parents + babies) mypy-0.560/test-data/samples/files.py0000644€tŠÔÚ€2›s®0000000052113215007205023673 0ustar jukkaDROPBOX\Domain Users00000000000000# indent your Python code to put into an email import glob import typing # glob supports Unix style pathname extensions python_files = glob.glob('*.py') for file_name in sorted(python_files): print(' ------' + file_name) f = open(file_name) for line in f: print(' ' + line.rstrip()) f.close() print() mypy-0.560/test-data/samples/for.py0000644€tŠÔÚ€2›s®0000000025213215007205023360 0ustar jukkaDROPBOX\Domain Users00000000000000import typing friends = ['john', 'pat', 'gary', 'michael'] for i, name in enumerate(friends): print("iteration {iteration} is {name}".format(iteration=i, name=name)) mypy-0.560/test-data/samples/generators.py0000644€tŠÔÚ€2›s®0000000113413215007205024743 0ustar jukkaDROPBOX\Domain Users00000000000000# Prime number sieve with generators import itertools from typing import Iterator def iter_primes() -> Iterator[int]: # an iterator of all numbers between 2 and +infinity numbers = itertools.count(2) # generate primes forever while True: # get the first number from the iterator (always a prime) prime = next(numbers) yield prime # this code iteratively builds up a chain of # filters...slightly tricky, but ponder it a bit numbers = filter(prime.__rmod__, numbers) for p in iter_primes(): if p > 1000: break print(p) mypy-0.560/test-data/samples/greet.py0000644€tŠÔÚ€2›s®0000000016013215007205023676 0ustar jukkaDROPBOX\Domain Users00000000000000import typing def greet(name: str) -> None: print('Hello', name) greet('Jack') greet('Jill') greet('Bob') mypy-0.560/test-data/samples/guess.py0000644€tŠÔÚ€2›s®0000000133113215007205023717 0ustar jukkaDROPBOX\Domain Users00000000000000# "Guess the Number" Game (edited) from http://inventwithpython.com import random import typing guesses_made = 0 name = input('Hello! What is your name?\n') number = random.randint(1, 20) print('Well, {0}, I am thinking of a number between 1 and 20.'.format(name)) while guesses_made < 6: guess = int(input('Take a guess: ')) guesses_made += 1 if guess < number: print('Your guess is too low.') if guess > number: print('Your guess is too high.') if guess == number: break if guess == number: print('Good job, {0}! You guessed my number in {1} guesses!'.format( name, guesses_made)) else: print('Nope. The number I was thinking of was {0}'.format(number)) mypy-0.560/test-data/samples/hello.py0000644€tŠÔÚ€2›s®0000000004413215007205023674 0ustar jukkaDROPBOX\Domain Users00000000000000import typing print('Hello, world') mypy-0.560/test-data/samples/input.py0000644€tŠÔÚ€2›s®0000000011313215007205023725 0ustar jukkaDROPBOX\Domain Users00000000000000import typing name = input('What is your name?\n') print('Hi, %s.' % name) mypy-0.560/test-data/samples/itertool.py0000644€tŠÔÚ€2›s®0000000060513215007205024435 0ustar jukkaDROPBOX\Domain Users00000000000000from itertools import groupby import typing lines = ''' This is the first paragraph. This is the second. '''.splitlines() # Use itertools.groupby and bool to return groups of # consecutive lines that either have content or don't. for has_chars, frags in groupby(lines, bool): if has_chars: print(' '.join(frags)) # PRINTS: # This is the first paragraph. # This is the second. mypy-0.560/test-data/samples/readme.txt0000644€tŠÔÚ€2›s®0000000125013215007205024215 0ustar jukkaDROPBOX\Domain Users00000000000000Mypy Sample Programs -------------------- The sample programs use static typing unless otherwise noted in comments. Original credits for sample programs: fib.py - Python Wiki [1] for.py - Python Wiki [1] greet.py - Python Wiki [1] hello.py - Python Wiki [1] input.py - Python Wiki [1] regexp.py - Python Wiki [1] dict.py - Python Wiki [1] cmdline.py - Python Wiki [1] files.py - Python Wiki [1] bottles.py - Python Wiki [1] class.py - Python Wiki [1] guess.py - Python Wiki [1] generators.py - Python Wiki [1] itertool.py - Python Wiki [1] The sample programs were ported to mypy by Jukka Lehtosalo. [1] http://wiki.python.org/moin/SimplePrograms mypy-0.560/test-data/samples/regexp.py0000644€tŠÔÚ€2›s®0000000034513215007205024067 0ustar jukkaDROPBOX\Domain Users00000000000000import typing import re for test_string in ['555-1212', 'ILL-EGAL']: if re.match(r'^\d{3}-\d{4}$', test_string): print(test_string, 'is a valid US local phone number') else: print(test_string, 'rejected') mypy-0.560/test-data/stdlib-samples/0000755€tŠÔÚ€2›s®0000000000013215007242023501 5ustar jukkaDROPBOX\Domain Users00000000000000mypy-0.560/test-data/stdlib-samples/3.2/0000755€tŠÔÚ€2›s®0000000000013215007243024004 5ustar jukkaDROPBOX\Domain Users00000000000000mypy-0.560/test-data/stdlib-samples/3.2/base64.py0000644€tŠÔÚ€2›s®0000003441213215007205025444 0ustar jukkaDROPBOX\Domain Users00000000000000#! /usr/bin/env python3 """RFC 3548: Base16, Base32, Base64 Data Encodings""" # Modified 04-Oct-1995 by Jack Jansen to use binascii module # Modified 30-Dec-2003 by Barry Warsaw to add full RFC 3548 support # Modified 22-May-2007 by Guido van Rossum to use bytes everywhere import re import struct import binascii from typing import Dict, List, AnyStr, IO __all__ = [ # Legacy interface exports traditional RFC 1521 Base64 encodings 'encode', 'decode', 'encodebytes', 'decodebytes', # Generalized interface for other encodings 'b64encode', 'b64decode', 'b32encode', 'b32decode', 'b16encode', 'b16decode', # Standard Base64 encoding 'standard_b64encode', 'standard_b64decode', # Some common Base64 alternatives. As referenced by RFC 3458, see thread # starting at: # # http://zgp.org/pipermail/p2p-hackers/2001-September/000316.html 'urlsafe_b64encode', 'urlsafe_b64decode', ] bytes_types = (bytes, bytearray) # Types acceptable as binary data def _translate(s: bytes, altchars: Dict[AnyStr, bytes]) -> bytes: if not isinstance(s, bytes_types): raise TypeError("expected bytes, not %s" % s.__class__.__name__) translation = bytearray(range(256)) for k, v in altchars.items(): translation[ord(k)] = v[0] return s.translate(translation) # Base64 encoding/decoding uses binascii def b64encode(s: bytes, altchars: bytes = None) -> bytes: """Encode a byte string using Base64. s is the byte string to encode. Optional altchars must be a byte string of length 2 which specifies an alternative alphabet for the '+' and '/' characters. This allows an application to e.g. generate url or filesystem safe Base64 strings. The encoded byte string is returned. """ if not isinstance(s, bytes_types): raise TypeError("expected bytes, not %s" % s.__class__.__name__) # Strip off the trailing newline encoded = binascii.b2a_base64(s)[:-1] if altchars is not None: if not isinstance(altchars, bytes_types): raise TypeError("expected bytes, not %s" % altchars.__class__.__name__) assert len(altchars) == 2, repr(altchars) return _translate(encoded, {'+': altchars[0:1], '/': altchars[1:2]}) return encoded def b64decode(s: bytes, altchars: bytes = None, validate: bool = False) -> bytes: """Decode a Base64 encoded byte string. s is the byte string to decode. Optional altchars must be a string of length 2 which specifies the alternative alphabet used instead of the '+' and '/' characters. The decoded string is returned. A binascii.Error is raised if s is incorrectly padded. If validate is False (the default), non-base64-alphabet characters are discarded prior to the padding check. If validate is True, non-base64-alphabet characters in the input result in a binascii.Error. """ if not isinstance(s, bytes_types): raise TypeError("expected bytes, not %s" % s.__class__.__name__) if altchars is not None: if not isinstance(altchars, bytes_types): raise TypeError("expected bytes, not %s" % altchars.__class__.__name__) assert len(altchars) == 2, repr(altchars) s = _translate(s, {chr(altchars[0]): b'+', chr(altchars[1]): b'/'}) if validate and not re.match(b'^[A-Za-z0-9+/]*={0,2}$', s): raise binascii.Error('Non-base64 digit found') return binascii.a2b_base64(s) def standard_b64encode(s: bytes) -> bytes: """Encode a byte string using the standard Base64 alphabet. s is the byte string to encode. The encoded byte string is returned. """ return b64encode(s) def standard_b64decode(s: bytes) -> bytes: """Decode a byte string encoded with the standard Base64 alphabet. s is the byte string to decode. The decoded byte string is returned. binascii.Error is raised if the input is incorrectly padded or if there are non-alphabet characters present in the input. """ return b64decode(s) def urlsafe_b64encode(s: bytes) -> bytes: """Encode a byte string using a url-safe Base64 alphabet. s is the byte string to encode. The encoded byte string is returned. The alphabet uses '-' instead of '+' and '_' instead of '/'. """ return b64encode(s, b'-_') def urlsafe_b64decode(s: bytes) -> bytes: """Decode a byte string encoded with the standard Base64 alphabet. s is the byte string to decode. The decoded byte string is returned. binascii.Error is raised if the input is incorrectly padded or if there are non-alphabet characters present in the input. The alphabet uses '-' instead of '+' and '_' instead of '/'. """ return b64decode(s, b'-_') # Base32 encoding/decoding must be done in Python _b32alphabet = { 0: b'A', 9: b'J', 18: b'S', 27: b'3', 1: b'B', 10: b'K', 19: b'T', 28: b'4', 2: b'C', 11: b'L', 20: b'U', 29: b'5', 3: b'D', 12: b'M', 21: b'V', 30: b'6', 4: b'E', 13: b'N', 22: b'W', 31: b'7', 5: b'F', 14: b'O', 23: b'X', 6: b'G', 15: b'P', 24: b'Y', 7: b'H', 16: b'Q', 25: b'Z', 8: b'I', 17: b'R', 26: b'2', } _b32tab = [v[0] for k, v in sorted(_b32alphabet.items())] _b32rev = dict([(v[0], k) for k, v in _b32alphabet.items()]) def b32encode(s: bytes) -> bytes: """Encode a byte string using Base32. s is the byte string to encode. The encoded byte string is returned. """ if not isinstance(s, bytes_types): raise TypeError("expected bytes, not %s" % s.__class__.__name__) quanta, leftover = divmod(len(s), 5) # Pad the last quantum with zero bits if necessary if leftover: s = s + bytes(5 - leftover) # Don't use += ! quanta += 1 encoded = bytes() for i in range(quanta): # c1 and c2 are 16 bits wide, c3 is 8 bits wide. The intent of this # code is to process the 40 bits in units of 5 bits. So we take the 1 # leftover bit of c1 and tack it onto c2. Then we take the 2 leftover # bits of c2 and tack them onto c3. The shifts and masks are intended # to give us values of exactly 5 bits in width. c1, c2, c3 = struct.unpack('!HHB', s[i*5:(i+1)*5]) # type: (int, int, int) c2 += (c1 & 1) << 16 # 17 bits wide c3 += (c2 & 3) << 8 # 10 bits wide encoded += bytes([_b32tab[c1 >> 11], # bits 1 - 5 _b32tab[(c1 >> 6) & 0x1f], # bits 6 - 10 _b32tab[(c1 >> 1) & 0x1f], # bits 11 - 15 _b32tab[c2 >> 12], # bits 16 - 20 (1 - 5) _b32tab[(c2 >> 7) & 0x1f], # bits 21 - 25 (6 - 10) _b32tab[(c2 >> 2) & 0x1f], # bits 26 - 30 (11 - 15) _b32tab[c3 >> 5], # bits 31 - 35 (1 - 5) _b32tab[c3 & 0x1f], # bits 36 - 40 (1 - 5) ]) # Adjust for any leftover partial quanta if leftover == 1: return encoded[:-6] + b'======' elif leftover == 2: return encoded[:-4] + b'====' elif leftover == 3: return encoded[:-3] + b'===' elif leftover == 4: return encoded[:-1] + b'=' return encoded def b32decode(s: bytes, casefold: bool = False, map01: bytes = None) -> bytes: """Decode a Base32 encoded byte string. s is the byte string to decode. Optional casefold is a flag specifying whether a lowercase alphabet is acceptable as input. For security purposes, the default is False. RFC 3548 allows for optional mapping of the digit 0 (zero) to the letter O (oh), and for optional mapping of the digit 1 (one) to either the letter I (eye) or letter L (el). The optional argument map01 when not None, specifies which letter the digit 1 should be mapped to (when map01 is not None, the digit 0 is always mapped to the letter O). For security purposes the default is None, so that 0 and 1 are not allowed in the input. The decoded byte string is returned. binascii.Error is raised if the input is incorrectly padded or if there are non-alphabet characters present in the input. """ if not isinstance(s, bytes_types): raise TypeError("expected bytes, not %s" % s.__class__.__name__) quanta, leftover = divmod(len(s), 8) if leftover: raise binascii.Error('Incorrect padding') # Handle section 2.4 zero and one mapping. The flag map01 will be either # False, or the character to map the digit 1 (one) to. It should be # either L (el) or I (eye). if map01 is not None: if not isinstance(map01, bytes_types): raise TypeError("expected bytes, not %s" % map01.__class__.__name__) assert len(map01) == 1, repr(map01) s = _translate(s, {b'0': b'O', b'1': map01}) if casefold: s = s.upper() # Strip off pad characters from the right. We need to count the pad # characters because this will tell us how many null bytes to remove from # the end of the decoded string. padchars = 0 mo = re.search(b'(?P[=]*)$', s) if mo: padchars = len(mo.group('pad')) if padchars > 0: s = s[:-padchars] # Now decode the full quanta parts = [] # type: List[bytes] acc = 0 shift = 35 for c in s: val = _b32rev.get(c) if val is None: raise TypeError('Non-base32 digit found') acc += _b32rev[c] << shift shift -= 5 if shift < 0: parts.append(binascii.unhexlify(bytes('%010x' % acc, "ascii"))) acc = 0 shift = 35 # Process the last, partial quanta last = binascii.unhexlify(bytes('%010x' % acc, "ascii")) if padchars == 0: last = b'' # No characters elif padchars == 1: last = last[:-1] elif padchars == 3: last = last[:-2] elif padchars == 4: last = last[:-3] elif padchars == 6: last = last[:-4] else: raise binascii.Error('Incorrect padding') parts.append(last) return b''.join(parts) # RFC 3548, Base 16 Alphabet specifies uppercase, but hexlify() returns # lowercase. The RFC also recommends against accepting input case # insensitively. def b16encode(s: bytes) -> bytes: """Encode a byte string using Base16. s is the byte string to encode. The encoded byte string is returned. """ if not isinstance(s, bytes_types): raise TypeError("expected bytes, not %s" % s.__class__.__name__) return binascii.hexlify(s).upper() def b16decode(s: bytes, casefold: bool = False) -> bytes: """Decode a Base16 encoded byte string. s is the byte string to decode. Optional casefold is a flag specifying whether a lowercase alphabet is acceptable as input. For security purposes, the default is False. The decoded byte string is returned. binascii.Error is raised if s were incorrectly padded or if there are non-alphabet characters present in the string. """ if not isinstance(s, bytes_types): raise TypeError("expected bytes, not %s" % s.__class__.__name__) if casefold: s = s.upper() if re.search(b'[^0-9A-F]', s): raise binascii.Error('Non-base16 digit found') return binascii.unhexlify(s) # Legacy interface. This code could be cleaned up since I don't believe # binascii has any line length limitations. It just doesn't seem worth it # though. The files should be opened in binary mode. MAXLINESIZE = 76 # Excluding the CRLF MAXBINSIZE = (MAXLINESIZE//4)*3 def encode(input: IO[bytes], output: IO[bytes]) -> None: """Encode a file; input and output are binary files.""" while True: s = input.read(MAXBINSIZE) if not s: break while len(s) < MAXBINSIZE: ns = input.read(MAXBINSIZE-len(s)) if not ns: break s += ns line = binascii.b2a_base64(s) output.write(line) def decode(input: IO[bytes], output: IO[bytes]) -> None: """Decode a file; input and output are binary files.""" while True: line = input.readline() if not line: break s = binascii.a2b_base64(line) output.write(s) def encodebytes(s: bytes) -> bytes: """Encode a bytestring into a bytestring containing multiple lines of base-64 data.""" if not isinstance(s, bytes_types): raise TypeError("expected bytes, not %s" % s.__class__.__name__) pieces = [] # type: List[bytes] for i in range(0, len(s), MAXBINSIZE): chunk = s[i : i + MAXBINSIZE] pieces.append(binascii.b2a_base64(chunk)) return b"".join(pieces) def encodestring(s: bytes) -> bytes: """Legacy alias of encodebytes().""" import warnings warnings.warn("encodestring() is a deprecated alias, use encodebytes()", DeprecationWarning, 2) return encodebytes(s) def decodebytes(s: bytes) -> bytes: """Decode a bytestring of base-64 data into a bytestring.""" if not isinstance(s, bytes_types): raise TypeError("expected bytes, not %s" % s.__class__.__name__) return binascii.a2b_base64(s) def decodestring(s: bytes) -> bytes: """Legacy alias of decodebytes().""" import warnings warnings.warn("decodestring() is a deprecated alias, use decodebytes()", DeprecationWarning, 2) return decodebytes(s) # Usable as a script... def main() -> None: """Small main program""" import sys, getopt try: opts, args = getopt.getopt(sys.argv[1:], 'deut') except getopt.error as msg: sys.stdout = sys.stderr print(msg) print("""usage: %s [-d|-e|-u|-t] [file|-] -d, -u: decode -e: encode (default) -t: encode and decode string 'Aladdin:open sesame'"""%sys.argv[0]) sys.exit(2) func = encode for o, a in opts: if o == '-e': func = encode if o == '-d': func = decode if o == '-u': func = decode if o == '-t': test(); return if args and args[0] != '-': with open(args[0], 'rb') as f: func(f, sys.stdout.buffer) else: func(sys.stdin.buffer, sys.stdout.buffer) def test() -> None: s0 = b"Aladdin:open sesame" print(repr(s0)) s1 = encodebytes(s0) print(repr(s1)) s2 = decodebytes(s1) print(repr(s2)) assert s0 == s2 if __name__ == '__main__': main() mypy-0.560/test-data/stdlib-samples/3.2/fnmatch.py0000644€tŠÔÚ€2›s®0000000671613215007205026006 0ustar jukkaDROPBOX\Domain Users00000000000000"""Filename matching with shell patterns. fnmatch(FILENAME, PATTERN) matches according to the local convention. fnmatchcase(FILENAME, PATTERN) always takes case in account. The functions operate by translating the pattern into a regular expression. They cache the compiled regular expressions for speed. The function translate(PATTERN) returns a regular expression corresponding to PATTERN. (It does not compile it.) """ import os import posixpath import re import functools from typing import Iterable, List, AnyStr, Any, Callable, Match __all__ = ["filter", "fnmatch", "fnmatchcase", "translate"] def fnmatch(name: AnyStr, pat: AnyStr) -> bool: """Test whether FILENAME matches PATTERN. Patterns are Unix shell style: * matches everything ? matches any single character [seq] matches any character in seq [!seq] matches any char not in seq An initial period in FILENAME is not special. Both FILENAME and PATTERN are first case-normalized if the operating system requires it. If you don't want this, use fnmatchcase(FILENAME, PATTERN). """ name = os.path.normcase(name) pat = os.path.normcase(pat) return fnmatchcase(name, pat) @functools.lru_cache(maxsize=250) def _compile_pattern(pat: AnyStr, is_bytes: bool = False) -> Callable[[AnyStr], Match[AnyStr]]: if isinstance(pat, bytes): pat_str = str(pat, 'ISO-8859-1') res_str = translate(pat_str) res = bytes(res_str, 'ISO-8859-1') else: res = translate(pat) return re.compile(res).match def filter(names: Iterable[AnyStr], pat: AnyStr) -> List[AnyStr]: """Return the subset of the list NAMES that match PAT.""" result = [] # type: List[AnyStr] pat = os.path.normcase(pat) match = _compile_pattern(pat, isinstance(pat, bytes)) if os.path is posixpath: # normcase on posix is NOP. Optimize it away from the loop. for name in names: if match(name): result.append(name) else: for name in names: if match(os.path.normcase(name)): result.append(name) return result def fnmatchcase(name: AnyStr, pat: AnyStr) -> bool: """Test whether FILENAME matches PATTERN, including case. This is a version of fnmatch() which doesn't case-normalize its arguments. """ match = _compile_pattern(pat, isinstance(pat, bytes)) return match(name) is not None def translate(pat: str) -> str: """Translate a shell PATTERN to a regular expression. There is no way to quote meta-characters. """ i, n = 0, len(pat) res = '' while i < n: c = pat[i] i = i+1 if c == '*': res = res + '.*' elif c == '?': res = res + '.' elif c == '[': j = i if j < n and pat[j] == '!': j = j+1 if j < n and pat[j] == ']': j = j+1 while j < n and pat[j] != ']': j = j+1 if j >= n: res = res + '\\[' else: stuff = pat[i:j].replace('\\','\\\\') i = j+1 if stuff[0] == '!': stuff = '^' + stuff[1:] elif stuff[0] == '^': stuff = '\\' + stuff res = '%s[%s]' % (res, stuff) else: res = res + re.escape(c) return res + '\Z(?ms)' mypy-0.560/test-data/stdlib-samples/3.2/genericpath.py0000644€tŠÔÚ€2›s®0000000651613215007205026655 0ustar jukkaDROPBOX\Domain Users00000000000000""" Path operations common to more than one OS Do not use directly. The OS specific modules import the appropriate functions from this module themselves. """ import os import stat from typing import ( Any as Any_, List as List_, AnyStr as AnyStr_, Tuple as Tuple_ ) __all__ = ['commonprefix', 'exists', 'getatime', 'getctime', 'getmtime', 'getsize', 'isdir', 'isfile'] # Does a path exist? # This is false for dangling symbolic links on systems that support them. def exists(path: AnyStr_) -> bool: """Test whether a path exists. Returns False for broken symbolic links""" try: os.stat(path) except os.error: return False return True # This follows symbolic links, so both islink() and isdir() can be true # for the same path ono systems that support symlinks def isfile(path: AnyStr_) -> bool: """Test whether a path is a regular file""" try: st = os.stat(path) except os.error: return False return stat.S_ISREG(st.st_mode) # Is a path a directory? # This follows symbolic links, so both islink() and isdir() # can be true for the same path on systems that support symlinks def isdir(s: AnyStr_) -> bool: """Return true if the pathname refers to an existing directory.""" try: st = os.stat(s) except os.error: return False return stat.S_ISDIR(st.st_mode) def getsize(filename: AnyStr_) -> int: """Return the size of a file, reported by os.stat().""" return os.stat(filename).st_size def getmtime(filename: AnyStr_) -> float: """Return the last modification time of a file, reported by os.stat().""" return os.stat(filename).st_mtime def getatime(filename: AnyStr_) -> float: """Return the last access time of a file, reported by os.stat().""" return os.stat(filename).st_atime def getctime(filename: AnyStr_) -> float: """Return the metadata change time of a file, reported by os.stat().""" return os.stat(filename).st_ctime # Return the longest prefix of all list elements. def commonprefix(m: List_[Any_]) -> Any_: "Given a list of pathnames, returns the longest common leading component" if not m: return '' s1 = min(m) s2 = max(m) for i, c in enumerate(s1): if c != s2[i]: return s1[:i] return s1 # Split a path in root and extension. # The extension is everything starting at the last dot in the last # pathname component; the root is everything before that. # It is always true that root + ext == p. # Generic implementation of splitext, to be parametrized with # the separators def _splitext(p: AnyStr_, sep: AnyStr_, altsep: AnyStr_, extsep: AnyStr_) -> Tuple_[AnyStr_, AnyStr_]: """Split the extension from a pathname. Extension is everything from the last dot to the end, ignoring leading dots. Returns "(root, ext)"; ext may be empty.""" # NOTE: This code must work for text and bytes strings. sepIndex = p.rfind(sep) if altsep: altsepIndex = p.rfind(altsep) sepIndex = max(sepIndex, altsepIndex) dotIndex = p.rfind(extsep) if dotIndex > sepIndex: # skip all leading dots filenameIndex = sepIndex + 1 while filenameIndex < dotIndex: if p[filenameIndex:filenameIndex+1] != extsep: return p[:dotIndex], p[dotIndex:] filenameIndex += 1 return p, p[:0] mypy-0.560/test-data/stdlib-samples/3.2/getopt.py0000644€tŠÔÚ€2›s®0000001764313215007205025671 0ustar jukkaDROPBOX\Domain Users00000000000000"""Parser for command line options. This module helps scripts to parse the command line arguments in sys.argv. It supports the same conventions as the Unix getopt() function (including the special meanings of arguments of the form `-' and `--'). Long options similar to those supported by GNU software may be used as well via an optional third argument. This module provides two functions and an exception: getopt() -- Parse command line options gnu_getopt() -- Like getopt(), but allow option and non-option arguments to be intermixed. GetoptError -- exception (class) raised with 'opt' attribute, which is the option involved with the exception. """ # Long option support added by Lars Wirzenius . # # Gerrit Holl moved the string-based exceptions # to class-based exceptions. # # Peter Åstrand added gnu_getopt(). # # TODO for gnu_getopt(): # # - GNU getopt_long_only mechanism # - allow the caller to specify ordering # - RETURN_IN_ORDER option # - GNU extension with '-' as first character of option string # - optional arguments, specified by double colons # - a option string with a W followed by semicolon should # treat "-W foo" as "--foo" __all__ = ["GetoptError","error","getopt","gnu_getopt"] import os from typing import List, Tuple, Iterable class GetoptError(Exception): opt = '' msg = '' def __init__(self, msg: str, opt: str = '') -> None: self.msg = msg self.opt = opt Exception.__init__(self, msg, opt) def __str__(self) -> str: return self.msg error = GetoptError # backward compatibility def getopt(args: List[str], shortopts: str, longopts: Iterable[str] = []) -> Tuple[List[Tuple[str, str]], List[str]]: """getopt(args, options[, long_options]) -> opts, args Parses command line options and parameter list. args is the argument list to be parsed, without the leading reference to the running program. Typically, this means "sys.argv[1:]". shortopts is the string of option letters that the script wants to recognize, with options that require an argument followed by a colon (i.e., the same format that Unix getopt() uses). If specified, longopts is a list of strings with the names of the long options which should be supported. The leading '--' characters should not be included in the option name. Options which require an argument should be followed by an equal sign ('='). The return value consists of two elements: the first is a list of (option, value) pairs; the second is the list of program arguments left after the option list was stripped (this is a trailing slice of the first argument). Each option-and-value pair returned has the option as its first element, prefixed with a hyphen (e.g., '-x'), and the option argument as its second element, or an empty string if the option has no argument. The options occur in the list in the same order in which they were found, thus allowing multiple occurrences. Long and short options may be mixed. """ opts = [] # type: List[Tuple[str, str]] if isinstance(longopts, str): longopts = [longopts] else: longopts = list(longopts) while args and args[0].startswith('-') and args[0] != '-': if args[0] == '--': args = args[1:] break if args[0].startswith('--'): opts, args = do_longs(opts, args[0][2:], longopts, args[1:]) else: opts, args = do_shorts(opts, args[0][1:], shortopts, args[1:]) return opts, args def gnu_getopt(args: List[str], shortopts: str, longopts: Iterable[str] = []) -> Tuple[List[Tuple[str, str]], List[str]]: """getopt(args, options[, long_options]) -> opts, args This function works like getopt(), except that GNU style scanning mode is used by default. This means that option and non-option arguments may be intermixed. The getopt() function stops processing options as soon as a non-option argument is encountered. If the first character of the option string is `+', or if the environment variable POSIXLY_CORRECT is set, then option processing stops as soon as a non-option argument is encountered. """ opts = [] # type: List[Tuple[str, str]] prog_args = [] # type: List[str] if isinstance(longopts, str): longopts = [longopts] else: longopts = list(longopts) # Allow options after non-option arguments? if shortopts.startswith('+'): shortopts = shortopts[1:] all_options_first = True elif os.environ.get("POSIXLY_CORRECT"): all_options_first = True else: all_options_first = False while args: if args[0] == '--': prog_args += args[1:] break if args[0][:2] == '--': opts, args = do_longs(opts, args[0][2:], longopts, args[1:]) elif args[0][:1] == '-' and args[0] != '-': opts, args = do_shorts(opts, args[0][1:], shortopts, args[1:]) else: if all_options_first: prog_args += args break else: prog_args.append(args[0]) args = args[1:] return opts, prog_args def do_longs(opts: List[Tuple[str, str]], opt: str, longopts: List[str], args: List[str]) -> Tuple[List[Tuple[str, str]], List[str]]: try: i = opt.index('=') except ValueError: optarg = None # type: str else: opt, optarg = opt[:i], opt[i+1:] has_arg, opt = long_has_args(opt, longopts) if has_arg: if optarg is None: if not args: raise GetoptError('option --%s requires argument' % opt, opt) optarg, args = args[0], args[1:] elif optarg is not None: raise GetoptError('option --%s must not have an argument' % opt, opt) opts.append(('--' + opt, optarg or '')) return opts, args # Return: # has_arg? # full option name def long_has_args(opt: str, longopts: List[str]) -> Tuple[bool, str]: possibilities = [o for o in longopts if o.startswith(opt)] if not possibilities: raise GetoptError('option --%s not recognized' % opt, opt) # Is there an exact match? if opt in possibilities: return False, opt elif opt + '=' in possibilities: return True, opt # No exact match, so better be unique. if len(possibilities) > 1: # XXX since possibilities contains all valid continuations, might be # nice to work them into the error msg raise GetoptError('option --%s not a unique prefix' % opt, opt) assert len(possibilities) == 1 unique_match = possibilities[0] has_arg = unique_match.endswith('=') if has_arg: unique_match = unique_match[:-1] return has_arg, unique_match def do_shorts(opts: List[Tuple[str, str]], optstring: str, shortopts: str, args: List[str]) -> Tuple[List[Tuple[str, str]], List[str]]: while optstring != '': opt, optstring = optstring[0], optstring[1:] if short_has_arg(opt, shortopts): if optstring == '': if not args: raise GetoptError('option -%s requires argument' % opt, opt) optstring, args = args[0], args[1:] optarg, optstring = optstring, '' else: optarg = '' opts.append(('-' + opt, optarg)) return opts, args def short_has_arg(opt: str, shortopts: str) -> bool: for i in range(len(shortopts)): if opt == shortopts[i] != ':': return shortopts.startswith(':', i+1) raise GetoptError('option -%s not recognized' % opt, opt) if __name__ == '__main__': import sys print(getopt(sys.argv[1:], "a:b", ["alpha=", "beta"])) mypy-0.560/test-data/stdlib-samples/3.2/glob.py0000644€tŠÔÚ€2›s®0000000471313215007205025304 0ustar jukkaDROPBOX\Domain Users00000000000000"""Filename globbing utility.""" import os import re import fnmatch from typing import List, Iterator, Iterable, Any, AnyStr __all__ = ["glob", "iglob"] def glob(pathname: AnyStr) -> List[AnyStr]: """Return a list of paths matching a pathname pattern. The pattern may contain simple shell-style wildcards a la fnmatch. """ return list(iglob(pathname)) def iglob(pathname: AnyStr) -> Iterator[AnyStr]: """Return an iterator which yields the paths matching a pathname pattern. The pattern may contain simple shell-style wildcards a la fnmatch. """ if not has_magic(pathname): if os.path.lexists(pathname): yield pathname return dirname, basename = os.path.split(pathname) if not dirname: for name in glob1(None, basename): yield name return if has_magic(dirname): dirs = iglob(dirname) # type: Iterable[AnyStr] else: dirs = [dirname] if has_magic(basename): glob_in_dir = glob1 # type: Any else: glob_in_dir = glob0 for dirname in dirs: for name in glob_in_dir(dirname, basename): yield os.path.join(dirname, name) # These 2 helper functions non-recursively glob inside a literal directory. # They return a list of basenames. `glob1` accepts a pattern while `glob0` # takes a literal basename (so it only has to check for its existence). def glob1(dirname: AnyStr, pattern: AnyStr) -> List[AnyStr]: if not dirname: if isinstance(pattern, bytes): dirname = bytes(os.curdir, 'ASCII') else: dirname = os.curdir try: names = os.listdir(dirname) except os.error: return [] if pattern[0] != '.': names = [x for x in names if x[0] != '.'] return fnmatch.filter(names, pattern) def glob0(dirname: AnyStr, basename: AnyStr) -> List[AnyStr]: if basename == '': # `os.path.split()` returns an empty basename for paths ending with a # directory separator. 'q*x/' should match only directories. if os.path.isdir(dirname): return [basename] else: if os.path.lexists(os.path.join(dirname, basename)): return [basename] return [] magic_check = re.compile('[*?[]') magic_check_bytes = re.compile(b'[*?[]') def has_magic(s: AnyStr) -> bool: if isinstance(s, bytes): match = magic_check_bytes.search(s) else: match = magic_check.search(s) return match is not None mypy-0.560/test-data/stdlib-samples/3.2/incomplete/0000755€tŠÔÚ€2›s®0000000000013215007242026142 5ustar jukkaDROPBOX\Domain Users00000000000000mypy-0.560/test-data/stdlib-samples/3.2/incomplete/logging/0000755€tŠÔÚ€2›s®0000000000013215007243027571 5ustar jukkaDROPBOX\Domain Users00000000000000mypy-0.560/test-data/stdlib-samples/3.2/incomplete/logging/__init__.py0000644€tŠÔÚ€2›s®0000017510013215007205031704 0ustar jukkaDROPBOX\Domain Users00000000000000# Copyright 2001-2010 by Vinay Sajip. All Rights Reserved. # # Permission to use, copy, modify, and distribute this software and its # documentation for any purpose and without fee is hereby granted, # provided that the above copyright notice appear in all copies and that # both that copyright notice and this permission notice appear in # supporting documentation, and that the name of Vinay Sajip # not be used in advertising or publicity pertaining to distribution # of the software without specific, written prior permission. # VINAY SAJIP DISCLAIMS ALL WARRANTIES WITH REGARD TO THIS SOFTWARE, INCLUDING # ALL IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL # VINAY SAJIP BE LIABLE FOR ANY SPECIAL, INDIRECT OR CONSEQUENTIAL DAMAGES OR # ANY DAMAGES WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER # IN AN ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT # OF OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE. """ Logging package for Python. Based on PEP 282 and comments thereto in comp.lang.python, and influenced by Apache's log4j system. Copyright (C) 2001-2011 Vinay Sajip. All Rights Reserved. To use, simply 'import logging' and log away! """ import sys, os, time, io, traceback, warnings, weakref from string import Template __all__ = ['BASIC_FORMAT', 'BufferingFormatter', 'CRITICAL', 'DEBUG', 'ERROR', 'FATAL', 'FileHandler', 'Filter', 'Formatter', 'Handler', 'INFO', 'LogRecord', 'Logger', 'LoggerAdapter', 'NOTSET', 'NullHandler', 'StreamHandler', 'WARN', 'WARNING', 'addLevelName', 'basicConfig', 'captureWarnings', 'critical', 'debug', 'disable', 'error', 'exception', 'fatal', 'getLevelName', 'getLogger', 'getLoggerClass', 'info', 'log', 'makeLogRecord', 'setLoggerClass', 'warn', 'warning', 'getLogRecordFactory', 'setLogRecordFactory', 'lastResort'] import codecs import _thread as thread import threading __author__ = "Vinay Sajip " __status__ = "production" __version__ = "0.5.1.2" __date__ = "07 February 2010" #--------------------------------------------------------------------------- # Miscellaneous module data #--------------------------------------------------------------------------- # # _srcfile is used when walking the stack to check when we've got the first # caller stack frame. # if hasattr(sys, 'frozen'): #support for py2exe _srcfile = "logging%s__init__%s" % (os.sep, __file__[-4:]) else: _srcfile = __file__ _srcfile = os.path.normcase(_srcfile) # next bit filched from 1.5.2's inspect.py def _currentframe(): """Return the frame object for the caller's stack frame.""" try: raise Exception except: return sys.exc_info()[2].tb_frame.f_back currentframe = _currentframe if hasattr(sys, '_getframe'): currentframe = lambda: sys._getframe(3) # done filching # _srcfile is only used in conjunction with sys._getframe(). # To provide compatibility with older versions of Python, set _srcfile # to None if _getframe() is not available; this value will prevent # findCaller() from being called. #if not hasattr(sys, "_getframe"): # _srcfile = None # #_startTime is used as the base when calculating the relative time of events # _startTime = time.time() # #raiseExceptions is used to see if exceptions during handling should be #propagated # raiseExceptions = 1 # # If you don't want threading information in the log, set this to zero # logThreads = 1 # # If you don't want multiprocessing information in the log, set this to zero # logMultiprocessing = 1 # # If you don't want process information in the log, set this to zero # logProcesses = 1 #--------------------------------------------------------------------------- # Level related stuff #--------------------------------------------------------------------------- # # Default levels and level names, these can be replaced with any positive set # of values having corresponding names. There is a pseudo-level, NOTSET, which # is only really there as a lower limit for user-defined levels. Handlers and # loggers are initialized with NOTSET so that they will log all messages, even # at user-defined levels. # CRITICAL = 50 FATAL = CRITICAL ERROR = 40 WARNING = 30 WARN = WARNING INFO = 20 DEBUG = 10 NOTSET = 0 _levelNames = { CRITICAL : 'CRITICAL', ERROR : 'ERROR', WARNING : 'WARNING', INFO : 'INFO', DEBUG : 'DEBUG', NOTSET : 'NOTSET', 'CRITICAL' : CRITICAL, 'ERROR' : ERROR, 'WARN' : WARNING, 'WARNING' : WARNING, 'INFO' : INFO, 'DEBUG' : DEBUG, 'NOTSET' : NOTSET, } def getLevelName(level): """ Return the textual representation of logging level 'level'. If the level is one of the predefined levels (CRITICAL, ERROR, WARNING, INFO, DEBUG) then you get the corresponding string. If you have associated levels with names using addLevelName then the name you have associated with 'level' is returned. If a numeric value corresponding to one of the defined levels is passed in, the corresponding string representation is returned. Otherwise, the string "Level %s" % level is returned. """ return _levelNames.get(level, ("Level %s" % level)) def addLevelName(level, levelName): """ Associate 'levelName' with 'level'. This is used when converting levels to text during message formatting. """ _acquireLock() try: #unlikely to cause an exception, but you never know... _levelNames[level] = levelName _levelNames[levelName] = level finally: _releaseLock() def _checkLevel(level): if isinstance(level, int): rv = level elif str(level) == level: if level not in _levelNames: raise ValueError("Unknown level: %r" % level) rv = _levelNames[level] else: raise TypeError("Level not an integer or a valid string: %r" % level) return rv #--------------------------------------------------------------------------- # Thread-related stuff #--------------------------------------------------------------------------- # #_lock is used to serialize access to shared data structures in this module. #This needs to be an RLock because fileConfig() creates and configures #Handlers, and so might arbitrary user threads. Since Handler code updates the #shared dictionary _handlers, it needs to acquire the lock. But if configuring, #the lock would already have been acquired - so we need an RLock. #The same argument applies to Loggers and Manager.loggerDict. # if thread: _lock = threading.RLock() else: _lock = None def _acquireLock(): """ Acquire the module-level lock for serializing access to shared data. This should be released with _releaseLock(). """ if _lock: _lock.acquire() def _releaseLock(): """ Release the module-level lock acquired by calling _acquireLock(). """ if _lock: _lock.release() #--------------------------------------------------------------------------- # The logging record #--------------------------------------------------------------------------- class LogRecord(object): """ A LogRecord instance represents an event being logged. LogRecord instances are created every time something is logged. They contain all the information pertinent to the event being logged. The main information passed in is in msg and args, which are combined using str(msg) % args to create the message field of the record. The record also includes information such as when the record was created, the source line where the logging call was made, and any exception information to be logged. """ def __init__(self, name, level, pathname, lineno, msg, args, exc_info, func=None, sinfo=None, **kwargs): """ Initialize a logging record with interesting information. """ ct = time.time() self.name = name self.msg = msg # # The following statement allows passing of a dictionary as a sole # argument, so that you can do something like # logging.debug("a %(a)d b %(b)s", {'a':1, 'b':2}) # Suggested by Stefan Behnel. # Note that without the test for args[0], we get a problem because # during formatting, we test to see if the arg is present using # 'if self.args:'. If the event being logged is e.g. 'Value is %d' # and if the passed arg fails 'if self.args:' then no formatting # is done. For example, logger.warn('Value is %d', 0) would log # 'Value is %d' instead of 'Value is 0'. # For the use case of passing a dictionary, this should not be a # problem. if args and len(args) == 1 and isinstance(args[0], dict) and args[0]: args = args[0] self.args = args self.levelname = getLevelName(level) self.levelno = level self.pathname = pathname try: self.filename = os.path.basename(pathname) self.module = os.path.splitext(self.filename)[0] except (TypeError, ValueError, AttributeError): self.filename = pathname self.module = "Unknown module" self.exc_info = exc_info self.exc_text = None # used to cache the traceback text self.stack_info = sinfo self.lineno = lineno self.funcName = func self.created = ct self.msecs = (ct - int(ct)) * 1000 self.relativeCreated = (self.created - _startTime) * 1000 if logThreads and thread: self.thread = thread.get_ident() self.threadName = threading.current_thread().name else: self.thread = None self.threadName = None if not logMultiprocessing: self.processName = None else: self.processName = 'MainProcess' mp = sys.modules.get('multiprocessing') if mp is not None: # Errors may occur if multiprocessing has not finished loading # yet - e.g. if a custom import hook causes third-party code # to run when multiprocessing calls import. See issue 8200 # for an example try: self.processName = mp.current_process().name except Exception: pass if logProcesses and hasattr(os, 'getpid'): self.process = os.getpid() else: self.process = None def __str__(self): return ''%(self.name, self.levelno, self.pathname, self.lineno, self.msg) def getMessage(self): """ Return the message for this LogRecord. Return the message for this LogRecord after merging any user-supplied arguments with the message. """ msg = str(self.msg) if self.args: msg = msg % self.args return msg # # Determine which class to use when instantiating log records. # _logRecordFactory = LogRecord def setLogRecordFactory(factory): """ Set the factory to be used when instantiating a log record. :param factory: A callable which will be called to instantiate a log record. """ global _logRecordFactory _logRecordFactory = factory def getLogRecordFactory(): """ Return the factory to be used when instantiating a log record. """ return _logRecordFactory def makeLogRecord(dict): """ Make a LogRecord whose attributes are defined by the specified dictionary, This function is useful for converting a logging event received over a socket connection (which is sent as a dictionary) into a LogRecord instance. """ rv = _logRecordFactory(None, None, "", 0, "", (), None, None) rv.__dict__.update(dict) return rv #--------------------------------------------------------------------------- # Formatter classes and functions #--------------------------------------------------------------------------- class PercentStyle(object): default_format = '%(message)s' asctime_format = '%(asctime)s' asctime_search = '%(asctime)' def __init__(self, fmt): self._fmt = fmt or self.default_format def usesTime(self): return self._fmt.find(self.asctime_search) >= 0 def format(self, record): return self._fmt % record.__dict__ class StrFormatStyle(PercentStyle): default_format = '{message}' asctime_format = '{asctime}' asctime_search = '{asctime' def format(self, record): return self._fmt.format(**record.__dict__) class StringTemplateStyle(PercentStyle): default_format = '${message}' asctime_format = '${asctime}' asctime_search = '${asctime}' def __init__(self, fmt): self._fmt = fmt or self.default_format self._tpl = Template(self._fmt) def usesTime(self): fmt = self._fmt return fmt.find('$asctime') >= 0 or fmt.find(self.asctime_format) >= 0 def format(self, record): return self._tpl.substitute(**record.__dict__) _STYLES = { '%': PercentStyle, '{': StrFormatStyle, '$': StringTemplateStyle } class Formatter(object): """ Formatter instances are used to convert a LogRecord to text. Formatters need to know how a LogRecord is constructed. They are responsible for converting a LogRecord to (usually) a string which can be interpreted by either a human or an external system. The base Formatter allows a formatting string to be specified. If none is supplied, the default value of "%s(message)" is used. The Formatter can be initialized with a format string which makes use of knowledge of the LogRecord attributes - e.g. the default value mentioned above makes use of the fact that the user's message and arguments are pre- formatted into a LogRecord's message attribute. Currently, the useful attributes in a LogRecord are described by: %(name)s Name of the logger (logging channel) %(levelno)s Numeric logging level for the message (DEBUG, INFO, WARNING, ERROR, CRITICAL) %(levelname)s Text logging level for the message ("DEBUG", "INFO", "WARNING", "ERROR", "CRITICAL") %(pathname)s Full pathname of the source file where the logging call was issued (if available) %(filename)s Filename portion of pathname %(module)s Module (name portion of filename) %(lineno)d Source line number where the logging call was issued (if available) %(funcName)s Function name %(created)f Time when the LogRecord was created (time.time() return value) %(asctime)s Textual time when the LogRecord was created %(msecs)d Millisecond portion of the creation time %(relativeCreated)d Time in milliseconds when the LogRecord was created, relative to the time the logging module was loaded (typically at application startup time) %(thread)d Thread ID (if available) %(threadName)s Thread name (if available) %(process)d Process ID (if available) %(message)s The result of record.getMessage(), computed just as the record is emitted """ converter = time.localtime def __init__(self, fmt=None, datefmt=None, style='%'): """ Initialize the formatter with specified format strings. Initialize the formatter either with the specified format string, or a default as described above. Allow for specialized date formatting with the optional datefmt argument (if omitted, you get the ISO8601 format). Use a style parameter of '%', '{' or '$' to specify that you want to use one of %-formatting, :meth:`str.format` (``{}``) formatting or :class:`string.Template` formatting in your format string. .. versionchanged: 3.2 Added the ``style`` parameter. """ if style not in _STYLES: raise ValueError('Style must be one of: %s' % ','.join( _STYLES.keys())) self._style = _STYLES[style](fmt) self._fmt = self._style._fmt self.datefmt = datefmt def formatTime(self, record, datefmt=None): """ Return the creation time of the specified LogRecord as formatted text. This method should be called from format() by a formatter which wants to make use of a formatted time. This method can be overridden in formatters to provide for any specific requirement, but the basic behaviour is as follows: if datefmt (a string) is specified, it is used with time.strftime() to format the creation time of the record. Otherwise, the ISO8601 format is used. The resulting string is returned. This function uses a user-configurable function to convert the creation time to a tuple. By default, time.localtime() is used; to change this for a particular formatter instance, set the 'converter' attribute to a function with the same signature as time.localtime() or time.gmtime(). To change it for all formatters, for example if you want all logging times to be shown in GMT, set the 'converter' attribute in the Formatter class. """ ct = self.converter(record.created) if datefmt: s = time.strftime(datefmt, ct) else: t = time.strftime("%Y-%m-%d %H:%M:%S", ct) s = "%s,%03d" % (t, record.msecs) # the use of % here is internal return s def formatException(self, ei): """ Format and return the specified exception information as a string. This default implementation just uses traceback.print_exception() """ sio = io.StringIO() tb = ei[2] # See issues #9427, #1553375. Commented out for now. #if getattr(self, 'fullstack', False): # traceback.print_stack(tb.tb_frame.f_back, file=sio) traceback.print_exception(ei[0], ei[1], tb, None, sio) s = sio.getvalue() sio.close() if s[-1:] == "\n": s = s[:-1] return s def usesTime(self): """ Check if the format uses the creation time of the record. """ return self._style.usesTime() def formatMessage(self, record): return self._style.format(record) def formatStack(self, stack_info): """ This method is provided as an extension point for specialized formatting of stack information. The input data is a string as returned from a call to :func:`traceback.print_stack`, but with the last trailing newline removed. The base implementation just returns the value passed in. """ return stack_info def format(self, record): """ Format the specified record as text. The record's attribute dictionary is used as the operand to a string formatting operation which yields the returned string. Before formatting the dictionary, a couple of preparatory steps are carried out. The message attribute of the record is computed using LogRecord.getMessage(). If the formatting string uses the time (as determined by a call to usesTime(), formatTime() is called to format the event time. If there is exception information, it is formatted using formatException() and appended to the message. """ record.message = record.getMessage() if self.usesTime(): record.asctime = self.formatTime(record, self.datefmt) s = self.formatMessage(record) if record.exc_info: # Cache the traceback text to avoid converting it multiple times # (it's constant anyway) if not record.exc_text: record.exc_text = self.formatException(record.exc_info) if record.exc_text: if s[-1:] != "\n": s = s + "\n" s = s + record.exc_text if record.stack_info: if s[-1:] != "\n": s = s + "\n" s = s + self.formatStack(record.stack_info) return s # # The default formatter to use when no other is specified # _defaultFormatter = Formatter() class BufferingFormatter(object): """ A formatter suitable for formatting a number of records. """ def __init__(self, linefmt=None): """ Optionally specify a formatter which will be used to format each individual record. """ if linefmt: self.linefmt = linefmt else: self.linefmt = _defaultFormatter def formatHeader(self, records): """ Return the header string for the specified records. """ return "" def formatFooter(self, records): """ Return the footer string for the specified records. """ return "" def format(self, records): """ Format the specified records and return the result as a string. """ rv = "" if len(records) > 0: rv = rv + self.formatHeader(records) for record in records: rv = rv + self.linefmt.format(record) rv = rv + self.formatFooter(records) return rv #--------------------------------------------------------------------------- # Filter classes and functions #--------------------------------------------------------------------------- class Filter(object): """ Filter instances are used to perform arbitrary filtering of LogRecords. Loggers and Handlers can optionally use Filter instances to filter records as desired. The base filter class only allows events which are below a certain point in the logger hierarchy. For example, a filter initialized with "A.B" will allow events logged by loggers "A.B", "A.B.C", "A.B.C.D", "A.B.D" etc. but not "A.BB", "B.A.B" etc. If initialized with the empty string, all events are passed. """ def __init__(self, name=''): """ Initialize a filter. Initialize with the name of the logger which, together with its children, will have its events allowed through the filter. If no name is specified, allow every event. """ self.name = name self.nlen = len(name) def filter(self, record): """ Determine if the specified record is to be logged. Is the specified record to be logged? Returns 0 for no, nonzero for yes. If deemed appropriate, the record may be modified in-place. """ if self.nlen == 0: return 1 elif self.name == record.name: return 1 elif record.name.find(self.name, 0, self.nlen) != 0: return 0 return (record.name[self.nlen] == ".") class Filterer(object): """ A base class for loggers and handlers which allows them to share common code. """ def __init__(self): """ Initialize the list of filters to be an empty list. """ self.filters = [] def addFilter(self, filter): """ Add the specified filter to this handler. """ if not (filter in self.filters): self.filters.append(filter) def removeFilter(self, filter): """ Remove the specified filter from this handler. """ if filter in self.filters: self.filters.remove(filter) def filter(self, record): """ Determine if a record is loggable by consulting all the filters. The default is to allow the record to be logged; any filter can veto this and the record is then dropped. Returns a zero value if a record is to be dropped, else non-zero. .. versionchanged: 3.2 Allow filters to be just callables. """ rv = 1 for f in self.filters: if hasattr(f, 'filter'): result = f.filter(record) else: result = f(record) # assume callable - will raise if not if not result: rv = 0 break return rv #--------------------------------------------------------------------------- # Handler classes and functions #--------------------------------------------------------------------------- any _handlers = weakref.WeakValueDictionary() #map of handler names to handlers any _handlerList = [] # added to allow handlers to be removed in reverse of order initialized def _removeHandlerRef(wr): """ Remove a handler reference from the internal cleanup list. """ # This function can be called during module teardown, when globals are # set to None. If _acquireLock is None, assume this is the case and do # nothing. if _acquireLock is not None: _acquireLock() try: if wr in _handlerList: _handlerList.remove(wr) finally: _releaseLock() def _addHandlerRef(handler): """ Add a handler to the internal cleanup list using a weak reference. """ _acquireLock() try: _handlerList.append(weakref.ref(handler, _removeHandlerRef)) finally: _releaseLock() class Handler(Filterer): """ Handler instances dispatch logging events to specific destinations. The base handler class. Acts as a placeholder which defines the Handler interface. Handlers can optionally use Formatter instances to format records as desired. By default, no formatter is specified; in this case, the 'raw' message as determined by record.message is logged. """ def __init__(self, level=NOTSET): """ Initializes the instance - basically setting the formatter to None and the filter list to empty. """ Filterer.__init__(self) self._name = None self.level = _checkLevel(level) self.formatter = None # Add the handler to the global _handlerList (for cleanup on shutdown) _addHandlerRef(self) self.createLock() def get_name(self): return self._name def set_name(self, name): _acquireLock() try: if self._name in _handlers: del _handlers[self._name] self._name = name if name: _handlers[name] = self finally: _releaseLock() #name = property(get_name, set_name) def createLock(self): """ Acquire a thread lock for serializing access to the underlying I/O. """ if thread: self.lock = threading.RLock() else: self.lock = None def acquire(self): """ Acquire the I/O thread lock. """ if self.lock: self.lock.acquire() def release(self): """ Release the I/O thread lock. """ if self.lock: self.lock.release() def setLevel(self, level): """ Set the logging level of this handler. """ self.level = _checkLevel(level) def format(self, record): """ Format the specified record. If a formatter is set, use it. Otherwise, use the default formatter for the module. """ if self.formatter: fmt = self.formatter else: fmt = _defaultFormatter return fmt.format(record) def emit(self, record): """ Do whatever it takes to actually log the specified logging record. This version is intended to be implemented by subclasses and so raises a NotImplementedError. """ raise NotImplementedError('emit must be implemented ' 'by Handler subclasses') def handle(self, record): """ Conditionally emit the specified logging record. Emission depends on filters which may have been added to the handler. Wrap the actual emission of the record with acquisition/release of the I/O thread lock. Returns whether the filter passed the record for emission. """ rv = self.filter(record) if rv: self.acquire() try: self.emit(record) finally: self.release() return rv def setFormatter(self, fmt): """ Set the formatter for this handler. """ self.formatter = fmt def flush(self): """ Ensure all logging output has been flushed. This version does nothing and is intended to be implemented by subclasses. """ pass def close(self): """ Tidy up any resources used by the handler. This version removes the handler from an internal map of handlers, _handlers, which is used for handler lookup by name. Subclasses should ensure that this gets called from overridden close() methods. """ #get the module data lock, as we're updating a shared structure. _acquireLock() try: #unlikely to raise an exception, but you never know... if self._name and self._name in _handlers: del _handlers[self._name] finally: _releaseLock() def handleError(self, record): """ Handle errors which occur during an emit() call. This method should be called from handlers when an exception is encountered during an emit() call. If raiseExceptions is false, exceptions get silently ignored. This is what is mostly wanted for a logging system - most users will not care about errors in the logging system, they are more interested in application errors. You could, however, replace this with a custom handler if you wish. The record which was being processed is passed in to this method. """ if raiseExceptions and sys.stderr: # see issue 13807 ei = sys.exc_info() try: traceback.print_exception(ei[0], ei[1], ei[2], None, sys.stderr) sys.stderr.write('Logged from file %s, line %s\n' % ( record.filename, record.lineno)) except IOError: pass # see issue 5971 finally: ei = None class StreamHandler(Handler): """ A handler class which writes logging records, appropriately formatted, to a stream. Note that this class does not close the stream, as sys.stdout or sys.stderr may be used. """ terminator = '\n' def __init__(self, stream=None): """ Initialize the handler. If stream is not specified, sys.stderr is used. """ Handler.__init__(self) if stream is None: stream = sys.stderr self.stream = stream def flush(self): """ Flushes the stream. """ if self.stream and hasattr(self.stream, "flush"): self.stream.flush() def emit(self, record): """ Emit a record. If a formatter is specified, it is used to format the record. The record is then written to the stream with a trailing newline. If exception information is present, it is formatted using traceback.print_exception and appended to the stream. If the stream has an 'encoding' attribute, it is used to determine how to do the output to the stream. """ try: msg = self.format(record) stream = self.stream stream.write(msg) stream.write(self.terminator) self.flush() except (KeyboardInterrupt, SystemExit): raise except: self.handleError(record) class FileHandler(StreamHandler): """ A handler class which writes formatted logging records to disk files. """ def __init__(self, filename, mode='a', encoding=None, delay=0): """ Open the specified file and use it as the stream for logging. """ #keep the absolute path, otherwise derived classes which use this #may come a cropper when the current directory changes if codecs is None: encoding = None self.baseFilename = os.path.abspath(filename) self.mode = mode self.encoding = encoding if delay: #We don't open the stream, but we still need to call the #Handler constructor to set level, formatter, lock etc. Handler.__init__(self) self.stream = None else: StreamHandler.__init__(self, self._open()) def close(self): """ Closes the stream. """ if self.stream: self.flush() if hasattr(self.stream, "close"): self.stream.close() StreamHandler.close(self) self.stream = None def _open(self): """ Open the current base file with the (original) mode and encoding. Return the resulting stream. """ if self.encoding is None: stream = open(self.baseFilename, self.mode) else: stream = codecs.open(self.baseFilename, self.mode, self.encoding) return stream def emit(self, record): """ Emit a record. If the stream was not opened because 'delay' was specified in the constructor, open it before calling the superclass's emit. """ if self.stream is None: self.stream = self._open() StreamHandler.emit(self, record) class _StderrHandler(StreamHandler): """ This class is like a StreamHandler using sys.stderr, but always uses whatever sys.stderr is currently set to rather than the value of sys.stderr at handler construction time. """ def __init__(self, level=NOTSET): """ Initialize the handler. """ Handler.__init__(self, level) #@property #def stream(self): # return sys.stderr _defaultLastResort = _StderrHandler(WARNING) lastResort = _defaultLastResort #--------------------------------------------------------------------------- # Manager classes and functions #--------------------------------------------------------------------------- class PlaceHolder(object): """ PlaceHolder instances are used in the Manager logger hierarchy to take the place of nodes for which no loggers have been defined. This class is intended for internal use only and not as part of the public API. """ def __init__(self, alogger): """ Initialize with the specified logger being a child of this placeholder. """ self.loggerMap = { alogger : None } def append(self, alogger): """ Add the specified logger as a child of this placeholder. """ if alogger not in self.loggerMap: self.loggerMap[alogger] = None # # Determine which class to use when instantiating loggers. # any _loggerClass = None def setLoggerClass(klass): """ Set the class to be used when instantiating a logger. The class should define __init__() such that only a name argument is required, and the __init__() should call Logger.__init__() """ if klass != Logger: if not issubclass(klass, Logger): raise TypeError("logger not derived from logging.Logger: " + klass.__name__) global _loggerClass _loggerClass = klass def getLoggerClass(): """ Return the class to be used when instantiating a logger. """ return _loggerClass class Manager(object): """ There is [under normal circumstances] just one Manager instance, which holds the hierarchy of loggers. """ def __init__(self, rootnode): """ Initialize the manager with the root node of the logger hierarchy. """ self.root = rootnode self.disable = 0 self.emittedNoHandlerWarning = False self.loggerDict = {} self.loggerClass = None self.logRecordFactory = None def getLogger(self, name): """ Get a logger with the specified name (channel name), creating it if it doesn't yet exist. This name is a dot-separated hierarchical name, such as "a", "a.b", "a.b.c" or similar. If a PlaceHolder existed for the specified name [i.e. the logger didn't exist but a child of it did], replace it with the created logger and fix up the parent/child references which pointed to the placeholder to now point to the logger. """ rv = None if not isinstance(name, str): raise TypeError('A logger name must be a string') _acquireLock() try: if name in self.loggerDict: rv = self.loggerDict[name] if isinstance(rv, PlaceHolder): ph = rv rv = (self.loggerClass or _loggerClass)(name) rv.manager = self self.loggerDict[name] = rv self._fixupChildren(ph, rv) self._fixupParents(rv) else: rv = (self.loggerClass or _loggerClass)(name) rv.manager = self self.loggerDict[name] = rv self._fixupParents(rv) finally: _releaseLock() return rv def setLoggerClass(self, klass): """ Set the class to be used when instantiating a logger with this Manager. """ if klass != Logger: if not issubclass(klass, Logger): raise TypeError("logger not derived from logging.Logger: " + klass.__name__) self.loggerClass = klass def setLogRecordFactory(self, factory): """ Set the factory to be used when instantiating a log record with this Manager. """ self.logRecordFactory = factory def _fixupParents(self, alogger): """ Ensure that there are either loggers or placeholders all the way from the specified logger to the root of the logger hierarchy. """ name = alogger.name i = name.rfind(".") rv = None while (i > 0) and not rv: substr = name[:i] if substr not in self.loggerDict: self.loggerDict[substr] = PlaceHolder(alogger) else: obj = self.loggerDict[substr] if isinstance(obj, Logger): rv = obj else: assert isinstance(obj, PlaceHolder) obj.append(alogger) i = name.rfind(".", 0, i - 1) if not rv: rv = self.root alogger.parent = rv def _fixupChildren(self, ph, alogger): """ Ensure that children of the placeholder ph are connected to the specified logger. """ name = alogger.name namelen = len(name) for c in ph.loggerMap.keys(): #The if means ... if not c.parent.name.startswith(nm) if c.parent.name[:namelen] != name: alogger.parent = c.parent c.parent = alogger #--------------------------------------------------------------------------- # Logger classes and functions #--------------------------------------------------------------------------- class Logger(Filterer): """ Instances of the Logger class represent a single logging channel. A "logging channel" indicates an area of an application. Exactly how an "area" is defined is up to the application developer. Since an application can have any number of areas, logging channels are identified by a unique string. Application areas can be nested (e.g. an area of "input processing" might include sub-areas "read CSV files", "read XLS files" and "read Gnumeric files"). To cater for this natural nesting, channel names are organized into a namespace hierarchy where levels are separated by periods, much like the Java or Python package namespace. So in the instance given above, channel names might be "input" for the upper level, and "input.csv", "input.xls" and "input.gnu" for the sub-levels. There is no arbitrary limit to the depth of nesting. """ any root any manager def __init__(self, name, level=NOTSET): """ Initialize the logger with a name and an optional level. """ Filterer.__init__(self) self.name = name self.level = _checkLevel(level) self.parent = None self.propagate = 1 self.handlers = [] self.disabled = 0 def setLevel(self, level): """ Set the logging level of this logger. """ self.level = _checkLevel(level) def debug(self, msg, *args, **kwargs): """ Log 'msg % args' with severity 'DEBUG'. To pass exception information, use the keyword argument exc_info with a true value, e.g. logger.debug("Houston, we have a %s", "thorny problem", exc_info=1) """ if self.isEnabledFor(DEBUG): self._log(DEBUG, msg, args, **kwargs) def info(self, msg, *args, **kwargs): """ Log 'msg % args' with severity 'INFO'. To pass exception information, use the keyword argument exc_info with a true value, e.g. logger.info("Houston, we have a %s", "interesting problem", exc_info=1) """ if self.isEnabledFor(INFO): self._log(INFO, msg, args, **kwargs) def warning(self, msg, *args, **kwargs): """ Log 'msg % args' with severity 'WARNING'. To pass exception information, use the keyword argument exc_info with a true value, e.g. logger.warning("Houston, we have a %s", "bit of a problem", exc_info=1) """ if self.isEnabledFor(WARNING): self._log(WARNING, msg, args, **kwargs) warn = warning def error(self, msg, *args, **kwargs): """ Log 'msg % args' with severity 'ERROR'. To pass exception information, use the keyword argument exc_info with a true value, e.g. logger.error("Houston, we have a %s", "major problem", exc_info=1) """ if self.isEnabledFor(ERROR): self._log(ERROR, msg, args, **kwargs) def exception(self, msg, *args, **kwargs): """ Convenience method for logging an ERROR with exception information. """ kwargs['exc_info'] = True self.error(msg, *args, **kwargs) def critical(self, msg, *args, **kwargs): """ Log 'msg % args' with severity 'CRITICAL'. To pass exception information, use the keyword argument exc_info with a true value, e.g. logger.critical("Houston, we have a %s", "major disaster", exc_info=1) """ if self.isEnabledFor(CRITICAL): self._log(CRITICAL, msg, args, **kwargs) fatal = critical def log(self, level, msg, *args, **kwargs): """ Log 'msg % args' with the integer severity 'level'. To pass exception information, use the keyword argument exc_info with a true value, e.g. logger.log(level, "We have a %s", "mysterious problem", exc_info=1) """ if not isinstance(level, int): if raiseExceptions: raise TypeError("level must be an integer") else: return if self.isEnabledFor(level): self._log(level, msg, args, **kwargs) def findCaller(self, stack_info=False): """ Find the stack frame of the caller so that we can note the source file name, line number and function name. """ f = currentframe() #On some versions of IronPython, currentframe() returns None if #IronPython isn't run with -X:Frames. if f is not None: f = f.f_back rv = "(unknown file)", 0, "(unknown function)", None while hasattr(f, "f_code"): co = f.f_code filename = os.path.normcase(co.co_filename) if filename == _srcfile: f = f.f_back continue sinfo = None if stack_info: sio = io.StringIO() sio.write('Stack (most recent call last):\n') traceback.print_stack(f, file=sio) sinfo = sio.getvalue() if sinfo[-1] == '\n': sinfo = sinfo[:-1] sio.close() rv = (co.co_filename, f.f_lineno, co.co_name, sinfo) break return rv def makeRecord(self, name, level, fn, lno, msg, args, exc_info, func=None, extra=None, sinfo=None): """ A factory method which can be overridden in subclasses to create specialized LogRecords. """ rv = _logRecordFactory(name, level, fn, lno, msg, args, exc_info, func, sinfo) if extra is not None: for key in extra: if (key in ["message", "asctime"]) or (key in rv.__dict__): raise KeyError("Attempt to overwrite %r in LogRecord" % key) rv.__dict__[key] = extra[key] return rv def _log(self, level, msg, args, exc_info=None, extra=None, stack_info=False): """ Low-level logging routine which creates a LogRecord and then calls all the handlers of this logger to handle the record. """ sinfo = None if _srcfile: #IronPython doesn't track Python frames, so findCaller throws an #exception on some versions of IronPython. We trap it here so that #IronPython can use logging. try: fn, lno, func, sinfo = self.findCaller(stack_info) except ValueError: fn, lno, func = "(unknown file)", 0, "(unknown function)" else: fn, lno, func = "(unknown file)", 0, "(unknown function)" if exc_info: if not isinstance(exc_info, tuple): exc_info = sys.exc_info() record = self.makeRecord(self.name, level, fn, lno, msg, args, exc_info, func, extra, sinfo) self.handle(record) def handle(self, record): """ Call the handlers for the specified record. This method is used for unpickled records received from a socket, as well as those created locally. Logger-level filtering is applied. """ if (not self.disabled) and self.filter(record): self.callHandlers(record) def addHandler(self, hdlr): """ Add the specified handler to this logger. """ _acquireLock() try: if not (hdlr in self.handlers): self.handlers.append(hdlr) finally: _releaseLock() def removeHandler(self, hdlr): """ Remove the specified handler from this logger. """ _acquireLock() try: if hdlr in self.handlers: self.handlers.remove(hdlr) finally: _releaseLock() def hasHandlers(self): """ See if this logger has any handlers configured. Loop through all handlers for this logger and its parents in the logger hierarchy. Return True if a handler was found, else False. Stop searching up the hierarchy whenever a logger with the "propagate" attribute set to zero is found - that will be the last logger which is checked for the existence of handlers. """ c = self rv = False while c: if c.handlers: rv = True break if not c.propagate: break else: c = c.parent return rv def callHandlers(self, record): """ Pass a record to all relevant handlers. Loop through all handlers for this logger and its parents in the logger hierarchy. If no handler was found, output a one-off error message to sys.stderr. Stop searching up the hierarchy whenever a logger with the "propagate" attribute set to zero is found - that will be the last logger whose handlers are called. """ c = self found = 0 while c: for hdlr in c.handlers: found = found + 1 if record.levelno >= hdlr.level: hdlr.handle(record) if not c.propagate: c = None #break out else: c = c.parent if (found == 0): if lastResort: if record.levelno >= lastResort.level: lastResort.handle(record) elif raiseExceptions and not self.manager.emittedNoHandlerWarning: sys.stderr.write("No handlers could be found for logger" " \"%s\"\n" % self.name) self.manager.emittedNoHandlerWarning = True def getEffectiveLevel(self): """ Get the effective level for this logger. Loop through this logger and its parents in the logger hierarchy, looking for a non-zero logging level. Return the first one found. """ logger = self while logger: if logger.level: return logger.level logger = logger.parent return NOTSET def isEnabledFor(self, level): """ Is this logger enabled for level 'level'? """ if self.manager.disable >= level: return 0 return level >= self.getEffectiveLevel() def getChild(self, suffix): """ Get a logger which is a descendant to this one. This is a convenience method, such that logging.getLogger('abc').getChild('def.ghi') is the same as logging.getLogger('abc.def.ghi') It's useful, for example, when the parent logger is named using __name__ rather than a literal string. """ if self.root is not self: suffix = '.'.join((self.name, suffix)) return self.manager.getLogger(suffix) class RootLogger(Logger): """ A root logger is not that different to any other logger, except that it must have a logging level and there is only one instance of it in the hierarchy. """ def __init__(self, level): """ Initialize the logger with the name "root". """ Logger.__init__(self, "root", level) _loggerClass = Logger class LoggerAdapter(object): """ An adapter for loggers which makes it easier to specify contextual information in logging output. """ def __init__(self, logger, extra): """ Initialize the adapter with a logger and a dict-like object which provides contextual information. This constructor signature allows easy stacking of LoggerAdapters, if so desired. You can effectively pass keyword arguments as shown in the following example: adapter = LoggerAdapter(someLogger, dict(p1=v1, p2="v2")) """ self.logger = logger self.extra = extra def process(self, msg, kwargs): """ Process the logging message and keyword arguments passed in to a logging call to insert contextual information. You can either manipulate the message itself, the keyword args or both. Return the message and kwargs modified (or not) to suit your needs. Normally, you'll only need to override this one method in a LoggerAdapter subclass for your specific needs. """ kwargs["extra"] = self.extra return msg, kwargs # # Boilerplate convenience methods # def debug(self, msg, *args, **kwargs): """ Delegate a debug call to the underlying logger. """ self.log(DEBUG, msg, *args, **kwargs) def info(self, msg, *args, **kwargs): """ Delegate an info call to the underlying logger. """ self.log(INFO, msg, *args, **kwargs) def warning(self, msg, *args, **kwargs): """ Delegate a warning call to the underlying logger. """ self.log(WARNING, msg, *args, **kwargs) warn = warning def error(self, msg, *args, **kwargs): """ Delegate an error call to the underlying logger. """ self.log(ERROR, msg, *args, **kwargs) def exception(self, msg, *args, **kwargs): """ Delegate an exception call to the underlying logger. """ kwargs["exc_info"] = 1 self.log(ERROR, msg, *args, **kwargs) def critical(self, msg, *args, **kwargs): """ Delegate a critical call to the underlying logger. """ self.log(CRITICAL, msg, *args, **kwargs) def log(self, level, msg, *args, **kwargs): """ Delegate a log call to the underlying logger, after adding contextual information from this adapter instance. """ if self.isEnabledFor(level): msg, kwargs = self.process(msg, kwargs) self.logger._log(level, msg, args, **kwargs) def isEnabledFor(self, level): """ Is this logger enabled for level 'level'? """ if self.logger.manager.disable >= level: return False return level >= self.getEffectiveLevel() def setLevel(self, level): """ Set the specified level on the underlying logger. """ self.logger.setLevel(level) def getEffectiveLevel(self): """ Get the effective level for the underlying logger. """ return self.logger.getEffectiveLevel() def hasHandlers(self): """ See if the underlying logger has any handlers. """ return self.logger.hasHandlers() root = RootLogger(WARNING) Logger.root = root Logger.manager = Manager(Logger.root) #--------------------------------------------------------------------------- # Configuration classes and functions #--------------------------------------------------------------------------- BASIC_FORMAT = "%(levelname)s:%(name)s:%(message)s" def basicConfig(**kwargs): """ Do basic configuration for the logging system. This function does nothing if the root logger already has handlers configured. It is a convenience method intended for use by simple scripts to do one-shot configuration of the logging package. The default behaviour is to create a StreamHandler which writes to sys.stderr, set a formatter using the BASIC_FORMAT format string, and add the handler to the root logger. A number of optional keyword arguments may be specified, which can alter the default behaviour. filename Specifies that a FileHandler be created, using the specified filename, rather than a StreamHandler. filemode Specifies the mode to open the file, if filename is specified (if filemode is unspecified, it defaults to 'a'). format Use the specified format string for the handler. datefmt Use the specified date/time format. style If a format string is specified, use this to specify the type of format string (possible values '%', '{', '$', for %-formatting, :meth:`str.format` and :class:`string.Template` - defaults to '%'). level Set the root logger level to the specified level. stream Use the specified stream to initialize the StreamHandler. Note that this argument is incompatible with 'filename' - if both are present, 'stream' is ignored. Note that you could specify a stream created using open(filename, mode) rather than passing the filename and mode in. However, it should be remembered that StreamHandler does not close its stream (since it may be using sys.stdout or sys.stderr), whereas FileHandler closes its stream when the handler is closed. .. versionchanged: 3.2 Added the ``style`` parameter. """ # Add thread safety in case someone mistakenly calls # basicConfig() from multiple threads _acquireLock() try: if len(root.handlers) == 0: filename = kwargs.get("filename") if filename: mode = kwargs.get("filemode", 'a') hdlr = FileHandler(filename, mode) else: stream = kwargs.get("stream") hdlr = StreamHandler(stream) fs = kwargs.get("format", BASIC_FORMAT) dfs = kwargs.get("datefmt", None) style = kwargs.get("style", '%') fmt = Formatter(fs, dfs, style) hdlr.setFormatter(fmt) root.addHandler(hdlr) level = kwargs.get("level") if level is not None: root.setLevel(level) finally: _releaseLock() #--------------------------------------------------------------------------- # Utility functions at module level. # Basically delegate everything to the root logger. #--------------------------------------------------------------------------- def getLogger(name=None): """ Return a logger with the specified name, creating it if necessary. If no name is specified, return the root logger. """ if name: return Logger.manager.getLogger(name) else: return root def critical(msg, *args, **kwargs): """ Log a message with severity 'CRITICAL' on the root logger. If the logger has no handlers, call basicConfig() to add a console handler with a pre-defined format. """ if len(root.handlers) == 0: basicConfig() root.critical(msg, *args, **kwargs) fatal = critical def error(msg, *args, **kwargs): """ Log a message with severity 'ERROR' on the root logger. If the logger has no handlers, call basicConfig() to add a console handler with a pre-defined format. """ if len(root.handlers) == 0: basicConfig() root.error(msg, *args, **kwargs) def exception(msg, *args, **kwargs): """ Log a message with severity 'ERROR' on the root logger, with exception information. If the logger has no handlers, basicConfig() is called to add a console handler with a pre-defined format. """ kwargs['exc_info'] = True error(msg, *args, **kwargs) def warning(msg, *args, **kwargs): """ Log a message with severity 'WARNING' on the root logger. If the logger has no handlers, call basicConfig() to add a console handler with a pre-defined format. """ if len(root.handlers) == 0: basicConfig() root.warning(msg, *args, **kwargs) warn = warning def info(msg, *args, **kwargs): """ Log a message with severity 'INFO' on the root logger. If the logger has no handlers, call basicConfig() to add a console handler with a pre-defined format. """ if len(root.handlers) == 0: basicConfig() root.info(msg, *args, **kwargs) def debug(msg, *args, **kwargs): """ Log a message with severity 'DEBUG' on the root logger. If the logger has no handlers, call basicConfig() to add a console handler with a pre-defined format. """ if len(root.handlers) == 0: basicConfig() root.debug(msg, *args, **kwargs) def log(level, msg, *args, **kwargs): """ Log 'msg % args' with the integer severity 'level' on the root logger. If the logger has no handlers, call basicConfig() to add a console handler with a pre-defined format. """ if len(root.handlers) == 0: basicConfig() root.log(level, msg, *args, **kwargs) def disable(level): """ Disable all logging calls of severity 'level' and below. """ root.manager.disable = level def shutdown(handlerList=_handlerList): """ Perform any cleanup actions in the logging system (e.g. flushing buffers). Should be called at application exit. """ for wr in reversed(handlerList[:]): #errors might occur, for example, if files are locked #we just ignore them if raiseExceptions is not set try: h = wr() if h: try: h.acquire() h.flush() h.close() except (IOError, ValueError): # Ignore errors which might be caused # because handlers have been closed but # references to them are still around at # application exit. pass finally: h.release() except: if raiseExceptions: raise #else, swallow #Let's try and shutdown automatically on application exit... import atexit atexit.register(shutdown) # Null handler class NullHandler(Handler): """ This handler does nothing. It's intended to be used to avoid the "No handlers could be found for logger XXX" one-off warning. This is important for library code, which may contain code to log events. If a user of the library does not configure logging, the one-off warning might be produced; to avoid this, the library developer simply needs to instantiate a NullHandler and add it to the top-level logger of the library module or package. """ def handle(self, record): pass def emit(self, record): pass def createLock(self): self.lock = None # Warnings integration any _warnings_showwarning = None def _showwarning(message, category, filename, lineno, file=None, line=None): """ Implementation of showwarnings which redirects to logging, which will first check to see if the file parameter is None. If a file is specified, it will delegate to the original warnings implementation of showwarning. Otherwise, it will call warnings.formatwarning and will log the resulting string to a warnings logger named "py.warnings" with level logging.WARNING. """ if file is not None: if _warnings_showwarning is not None: _warnings_showwarning(message, category, filename, lineno, file, line) else: s = warnings.formatwarning(message, category, filename, lineno, line) logger = getLogger("py.warnings") if not logger.handlers: logger.addHandler(NullHandler()) logger.warning("%s", s) def captureWarnings(capture): """ If capture is true, redirect all warnings to the logging package. If capture is False, ensure that warnings are not redirected to logging but to their original destinations. """ global _warnings_showwarning if capture: if _warnings_showwarning is None: _warnings_showwarning = warnings.showwarning warnings.showwarning = _showwarning else: if _warnings_showwarning is not None: warnings.showwarning = _warnings_showwarning _warnings_showwarning = None mypy-0.560/test-data/stdlib-samples/3.2/incomplete/urllib/0000755€tŠÔÚ€2›s®0000000000013215007243027434 5ustar jukkaDROPBOX\Domain Users00000000000000mypy-0.560/test-data/stdlib-samples/3.2/incomplete/urllib/__init__.py0000644€tŠÔÚ€2›s®0000000000013215007205031531 0ustar jukkaDROPBOX\Domain Users00000000000000mypy-0.560/test-data/stdlib-samples/3.2/incomplete/urllib/parse.py0000644€tŠÔÚ€2›s®0000010510013215007205031113 0ustar jukkaDROPBOX\Domain Users00000000000000"""Parse (absolute and relative) URLs. urlparse module is based upon the following RFC specifications. RFC 3986 (STD66): "Uniform Resource Identifiers" by T. Berners-Lee, R. Fielding and L. Masinter, January 2005. RFC 2732 : "Format for Literal IPv6 Addresses in URLs" by R.Hinden, B.Carpenter and L.Masinter, December 1999. RFC 2396: "Uniform Resource Identifiers (URI)": Generic Syntax by T. Berners-Lee, R. Fielding, and L. Masinter, August 1998. RFC 2368: "The mailto URL scheme", by P.Hoffman , L Masinter, J. Zawinski, July 1998. RFC 1808: "Relative Uniform Resource Locators", by R. Fielding, UC Irvine, June 1995. RFC 1738: "Uniform Resource Locators (URL)" by T. Berners-Lee, L. Masinter, M. McCahill, December 1994 RFC 3986 is considered the current standard and any future changes to urlparse module should conform with it. The urlparse module is currently not entirely compliant with this RFC due to defacto scenarios for parsing, and for backward compatibility purposes, some parsing quirks from older RFCs are retained. The testcases in test_urlparse.py provides a good indicator of parsing behavior. """ import sys import collections __all__ = ["urlparse", "urlunparse", "urljoin", "urldefrag", "urlsplit", "urlunsplit", "urlencode", "parse_qs", "parse_qsl", "quote", "quote_plus", "quote_from_bytes", "unquote", "unquote_plus", "unquote_to_bytes"] # A classification of schemes ('' means apply by default) uses_relative = ['ftp', 'http', 'gopher', 'nntp', 'imap', 'wais', 'file', 'https', 'shttp', 'mms', 'prospero', 'rtsp', 'rtspu', '', 'sftp', 'svn', 'svn+ssh'] uses_netloc = ['ftp', 'http', 'gopher', 'nntp', 'telnet', 'imap', 'wais', 'file', 'mms', 'https', 'shttp', 'snews', 'prospero', 'rtsp', 'rtspu', 'rsync', '', 'svn', 'svn+ssh', 'sftp', 'nfs', 'git', 'git+ssh'] non_hierarchical = ['gopher', 'hdl', 'mailto', 'news', 'telnet', 'wais', 'imap', 'snews', 'sip', 'sips'] uses_params = ['ftp', 'hdl', 'prospero', 'http', 'imap', 'https', 'shttp', 'rtsp', 'rtspu', 'sip', 'sips', 'mms', '', 'sftp'] uses_query = ['http', 'wais', 'imap', 'https', 'shttp', 'mms', 'gopher', 'rtsp', 'rtspu', 'sip', 'sips', ''] uses_fragment = ['ftp', 'hdl', 'http', 'gopher', 'news', 'nntp', 'wais', 'https', 'shttp', 'snews', 'file', 'prospero', ''] # Characters valid in scheme names scheme_chars = ('abcdefghijklmnopqrstuvwxyz' 'ABCDEFGHIJKLMNOPQRSTUVWXYZ' '0123456789' '+-.') # XXX: Consider replacing with functools.lru_cache MAX_CACHE_SIZE = 20 _parse_cache = {} def clear_cache(): """Clear the parse cache and the quoters cache.""" _parse_cache.clear() _safe_quoters.clear() # Helpers for bytes handling # For 3.2, we deliberately require applications that # handle improperly quoted URLs to do their own # decoding and encoding. If valid use cases are # presented, we may relax this by using latin-1 # decoding internally for 3.3 _implicit_encoding = 'ascii' _implicit_errors = 'strict' def _noop(obj): return obj def _encode_result(obj, encoding=_implicit_encoding, errors=_implicit_errors): return obj.encode(encoding, errors) def _decode_args(args, encoding=_implicit_encoding, errors=_implicit_errors): return tuple(x.decode(encoding, errors) if x else '' for x in args) def _coerce_args(*args): # Invokes decode if necessary to create str args # and returns the coerced inputs along with # an appropriate result coercion function # - noop for str inputs # - encoding function otherwise str_input = isinstance(args[0], str) for arg in args[1:]: # We special-case the empty string to support the # "scheme=''" default argument to some functions if arg and isinstance(arg, str) != str_input: raise TypeError("Cannot mix str and non-str arguments") if str_input: return args + (_noop,) return _decode_args(args) + (_encode_result,) # Result objects are more helpful than simple tuples class _ResultMixinStr(object): """Standard approach to encoding parsed results from str to bytes""" __slots__ = () def encode(self, encoding='ascii', errors='strict'): return self._encoded_counterpart(*(x.encode(encoding, errors) for x in self)) class _ResultMixinBytes(object): """Standard approach to decoding parsed results from bytes to str""" __slots__ = () def decode(self, encoding='ascii', errors='strict'): return self._decoded_counterpart(*(x.decode(encoding, errors) for x in self)) class _NetlocResultMixinBase(object): """Shared methods for the parsed result objects containing a netloc element""" __slots__ = () @property def username(self): return self._userinfo[0] @property def password(self): return self._userinfo[1] @property def hostname(self): hostname = self._hostinfo[0] if not hostname: hostname = None elif hostname is not None: hostname = hostname.lower() return hostname @property def port(self): port = self._hostinfo[1] if port is not None: port = int(port, 10) return port class _NetlocResultMixinStr(_NetlocResultMixinBase, _ResultMixinStr): __slots__ = () @property def _userinfo(self): netloc = self.netloc userinfo, have_info, hostinfo = netloc.rpartition('@') if have_info: username, have_password, password = userinfo.partition(':') if not have_password: password = None else: username = password = None return username, password @property def _hostinfo(self): netloc = self.netloc _, _, hostinfo = netloc.rpartition('@') _, have_open_br, bracketed = hostinfo.partition('[') if have_open_br: hostname, _, port = bracketed.partition(']') _, have_port, port = port.partition(':') else: hostname, have_port, port = hostinfo.partition(':') if not have_port: port = None return hostname, port class _NetlocResultMixinBytes(_NetlocResultMixinBase, _ResultMixinBytes): __slots__ = () @property def _userinfo(self): netloc = self.netloc userinfo, have_info, hostinfo = netloc.rpartition(b'@') if have_info: username, have_password, password = userinfo.partition(b':') if not have_password: password = None else: username = password = None return username, password @property def _hostinfo(self): netloc = self.netloc _, _, hostinfo = netloc.rpartition(b'@') _, have_open_br, bracketed = hostinfo.partition(b'[') if have_open_br: hostname, _, port = bracketed.partition(b']') _, have_port, port = port.partition(b':') else: hostname, have_port, port = hostinfo.partition(b':') if not have_port: port = None return hostname, port from collections import namedtuple _DefragResultBase = namedtuple('DefragResult', 'url fragment') _SplitResultBase = namedtuple('SplitResult', 'scheme netloc path query fragment') _ParseResultBase = namedtuple('ParseResult', 'scheme netloc path params query fragment') # For backwards compatibility, alias _NetlocResultMixinStr # ResultBase is no longer part of the documented API, but it is # retained since deprecating it isn't worth the hassle ResultBase = _NetlocResultMixinStr # Structured result objects for string data class DefragResult(_DefragResultBase, _ResultMixinStr): __slots__ = () def geturl(self): if self.fragment: return self.url + '#' + self.fragment else: return self.url class SplitResult(_SplitResultBase, _NetlocResultMixinStr): __slots__ = () def geturl(self): return urlunsplit(self) class ParseResult(_ParseResultBase, _NetlocResultMixinStr): __slots__ = () def geturl(self): return urlunparse(self) # Structured result objects for bytes data class DefragResultBytes(_DefragResultBase, _ResultMixinBytes): __slots__ = () def geturl(self): if self.fragment: return self.url + b'#' + self.fragment else: return self.url class SplitResultBytes(_SplitResultBase, _NetlocResultMixinBytes): __slots__ = () def geturl(self): return urlunsplit(self) class ParseResultBytes(_ParseResultBase, _NetlocResultMixinBytes): __slots__ = () def geturl(self): return urlunparse(self) # Set up the encode/decode result pairs def _fix_result_transcoding(): _result_pairs = ( (DefragResult, DefragResultBytes), (SplitResult, SplitResultBytes), (ParseResult, ParseResultBytes), ) for _decoded, _encoded in _result_pairs: _decoded._encoded_counterpart = _encoded _encoded._decoded_counterpart = _decoded _fix_result_transcoding() del _fix_result_transcoding def urlparse(url, scheme='', allow_fragments=True): """Parse a URL into 6 components: :///;?# Return a 6-tuple: (scheme, netloc, path, params, query, fragment). Note that we don't break the components up in smaller bits (e.g. netloc is a single string) and we don't expand % escapes.""" url, scheme, _coerce_result = _coerce_args(url, scheme) tuple = urlsplit(url, scheme, allow_fragments) scheme, netloc, url, query, fragment = tuple if scheme in uses_params and ';' in url: url, params = _splitparams(url) else: params = '' result = ParseResult(scheme, netloc, url, params, query, fragment) return _coerce_result(result) def _splitparams(url): if '/' in url: i = url.find(';', url.rfind('/')) if i < 0: return url, '' else: i = url.find(';') return url[:i], url[i+1:] def _splitnetloc(url, start=0): delim = len(url) # position of end of domain part of url, default is end for c in '/?#': # look for delimiters; the order is NOT important wdelim = url.find(c, start) # find first of this delim if wdelim >= 0: # if found delim = min(delim, wdelim) # use earliest delim position return url[start:delim], url[delim:] # return (domain, rest) def urlsplit(url, scheme='', allow_fragments=True): """Parse a URL into 5 components: :///?# Return a 5-tuple: (scheme, netloc, path, query, fragment). Note that we don't break the components up in smaller bits (e.g. netloc is a single string) and we don't expand % escapes.""" url, scheme, _coerce_result = _coerce_args(url, scheme) allow_fragments = bool(allow_fragments) key = url, scheme, allow_fragments, type(url), type(scheme) cached = _parse_cache.get(key, None) if cached: return _coerce_result(cached) if len(_parse_cache) >= MAX_CACHE_SIZE: # avoid runaway growth clear_cache() netloc = query = fragment = '' i = url.find(':') if i > 0: if url[:i] == 'http': # optimize the common case scheme = url[:i].lower() url = url[i+1:] if url[:2] == '//': netloc, url = _splitnetloc(url, 2) if (('[' in netloc and ']' not in netloc) or (']' in netloc and '[' not in netloc)): raise ValueError("Invalid IPv6 URL") if allow_fragments and '#' in url: url, fragment = url.split('#', 1) if '?' in url: url, query = url.split('?', 1) v = SplitResult(scheme, netloc, url, query, fragment) _parse_cache[key] = v return _coerce_result(v) for c in url[:i]: if c not in scheme_chars: break else: try: # make sure "url" is not actually a port number (in which case # "scheme" is really part of the path _testportnum = int(url[i+1:]) except ValueError: scheme, url = url[:i].lower(), url[i+1:] if url[:2] == '//': netloc, url = _splitnetloc(url, 2) if (('[' in netloc and ']' not in netloc) or (']' in netloc and '[' not in netloc)): raise ValueError("Invalid IPv6 URL") if allow_fragments and scheme in uses_fragment and '#' in url: url, fragment = url.split('#', 1) if scheme in uses_query and '?' in url: url, query = url.split('?', 1) v = SplitResult(scheme, netloc, url, query, fragment) _parse_cache[key] = v return _coerce_result(v) def urlunparse(components): """Put a parsed URL back together again. This may result in a slightly different, but equivalent URL, if the URL that was parsed originally had redundant delimiters, e.g. a ? with an empty query (the draft states that these are equivalent).""" scheme, netloc, url, params, query, fragment, _coerce_result = ( _coerce_args(*components)) if params: url = "%s;%s" % (url, params) return _coerce_result(urlunsplit((scheme, netloc, url, query, fragment))) def urlunsplit(components): """Combine the elements of a tuple as returned by urlsplit() into a complete URL as a string. The data argument can be any five-item iterable. This may result in a slightly different, but equivalent URL, if the URL that was parsed originally had unnecessary delimiters (for example, a ? with an empty query; the RFC states that these are equivalent).""" scheme, netloc, url, query, fragment, _coerce_result = ( _coerce_args(*components)) if netloc or (scheme and scheme in uses_netloc and url[:2] != '//'): if url and url[:1] != '/': url = '/' + url url = '//' + (netloc or '') + url if scheme: url = scheme + ':' + url if query: url = url + '?' + query if fragment: url = url + '#' + fragment return _coerce_result(url) def urljoin(base, url, allow_fragments=True): """Join a base URL and a possibly relative URL to form an absolute interpretation of the latter.""" if not base: return url if not url: return base base, url, _coerce_result = _coerce_args(base, url) bscheme, bnetloc, bpath, bparams, bquery, bfragment = \ urlparse(base, '', allow_fragments) scheme, netloc, path, params, query, fragment = \ urlparse(url, bscheme, allow_fragments) if scheme != bscheme or scheme not in uses_relative: return _coerce_result(url) if scheme in uses_netloc: if netloc: return _coerce_result(urlunparse((scheme, netloc, path, params, query, fragment))) netloc = bnetloc if path[:1] == '/': return _coerce_result(urlunparse((scheme, netloc, path, params, query, fragment))) if not path and not params: path = bpath params = bparams if not query: query = bquery return _coerce_result(urlunparse((scheme, netloc, path, params, query, fragment))) segments = bpath.split('/')[:-1] + path.split('/') # XXX The stuff below is bogus in various ways... if segments[-1] == '.': segments[-1] = '' while '.' in segments: segments.remove('.') while 1: i = 1 n = len(segments) - 1 while i < n: if (segments[i] == '..' and segments[i-1] not in ('', '..')): del segments[i-1:i+1] break i = i+1 else: break if segments == ['', '..']: segments[-1] = '' elif len(segments) >= 2 and segments[-1] == '..': segments[-2:] = [''] return _coerce_result(urlunparse((scheme, netloc, '/'.join(segments), params, query, fragment))) def urldefrag(url): """Removes any existing fragment from URL. Returns a tuple of the defragmented URL and the fragment. If the URL contained no fragments, the second element is the empty string. """ url, _coerce_result = _coerce_args(url) if '#' in url: s, n, p, a, q, frag = urlparse(url) defrag = urlunparse((s, n, p, a, q, '')) else: frag = '' defrag = url return _coerce_result(DefragResult(defrag, frag)) def unquote_to_bytes(string): """unquote_to_bytes('abc%20def') -> b'abc def'.""" # Note: strings are encoded as UTF-8. This is only an issue if it contains # unescaped non-ASCII characters, which URIs should not. if not string: # Is it a string-like object? string.split return b'' if isinstance(string, str): string = string.encode('utf-8') res = string.split(b'%') if len(res) == 1: return string string = res[0] for item in res[1:]: try: string += bytes([int(item[:2], 16)]) + item[2:] except ValueError: string += b'%' + item return string def unquote(string, encoding='utf-8', errors='replace'): """Replace %xx escapes by their single-character equivalent. The optional encoding and errors parameters specify how to decode percent-encoded sequences into Unicode characters, as accepted by the bytes.decode() method. By default, percent-encoded sequences are decoded with UTF-8, and invalid sequences are replaced by a placeholder character. unquote('abc%20def') -> 'abc def'. """ if string == '': return string res = string.split('%') if len(res) == 1: return string if encoding is None: encoding = 'utf-8' if errors is None: errors = 'replace' # pct_sequence: contiguous sequence of percent-encoded bytes, decoded pct_sequence = b'' string = res[0] for item in res[1:]: try: if not item: raise ValueError pct_sequence += bytes.fromhex(item[:2]) rest = item[2:] if not rest: # This segment was just a single percent-encoded character. # May be part of a sequence of code units, so delay decoding. # (Stored in pct_sequence). continue except ValueError: rest = '%' + item # Encountered non-percent-encoded characters. Flush the current # pct_sequence. string += pct_sequence.decode(encoding, errors) + rest pct_sequence = b'' if pct_sequence: # Flush the final pct_sequence string += pct_sequence.decode(encoding, errors) return string def parse_qs(qs, keep_blank_values=False, strict_parsing=False, encoding='utf-8', errors='replace'): """Parse a query given as a string argument. Arguments: qs: percent-encoded query string to be parsed keep_blank_values: flag indicating whether blank values in percent-encoded queries should be treated as blank strings. A true value indicates that blanks should be retained as blank strings. The default false value indicates that blank values are to be ignored and treated as if they were not included. strict_parsing: flag indicating what to do with parsing errors. If false (the default), errors are silently ignored. If true, errors raise a ValueError exception. encoding and errors: specify how to decode percent-encoded sequences into Unicode characters, as accepted by the bytes.decode() method. """ dict = {} pairs = parse_qsl(qs, keep_blank_values, strict_parsing, encoding=encoding, errors=errors) for name, value in pairs: if name in dict: dict[name].append(value) else: dict[name] = [value] return dict def parse_qsl(qs, keep_blank_values=False, strict_parsing=False, encoding='utf-8', errors='replace'): """Parse a query given as a string argument. Arguments: qs: percent-encoded query string to be parsed keep_blank_values: flag indicating whether blank values in percent-encoded queries should be treated as blank strings. A true value indicates that blanks should be retained as blank strings. The default false value indicates that blank values are to be ignored and treated as if they were not included. strict_parsing: flag indicating what to do with parsing errors. If false (the default), errors are silently ignored. If true, errors raise a ValueError exception. encoding and errors: specify how to decode percent-encoded sequences into Unicode characters, as accepted by the bytes.decode() method. Returns a list, as G-d intended. """ qs, _coerce_result = _coerce_args(qs) pairs = [] for s1 in qs.split('&'): for s2 in s1.split(';'): pairs.append(s2) r = [] for name_value in pairs: if not name_value and not strict_parsing: continue nv = name_value.split('=', 1) if len(nv) != 2: if strict_parsing: raise ValueError("bad query field: %r" % (name_value,)) # Handle case of a control-name with no equal sign if keep_blank_values: nv.append('') else: continue if len(nv[1]) or keep_blank_values: name = nv[0].replace('+', ' ') name = unquote(name, encoding=encoding, errors=errors) name = _coerce_result(name) value = nv[1].replace('+', ' ') value = unquote(value, encoding=encoding, errors=errors) value = _coerce_result(value) r.append((name, value)) return r def unquote_plus(string, encoding='utf-8', errors='replace'): """Like unquote(), but also replace plus signs by spaces, as required for unquoting HTML form values. unquote_plus('%7e/abc+def') -> '~/abc def' """ string = string.replace('+', ' ') return unquote(string, encoding, errors) _ALWAYS_SAFE = frozenset(b'ABCDEFGHIJKLMNOPQRSTUVWXYZ' b'abcdefghijklmnopqrstuvwxyz' b'0123456789' b'_.-') _ALWAYS_SAFE_BYTES = bytes(_ALWAYS_SAFE) _safe_quoters = {} class Quoter(collections.defaultdict): """A mapping from bytes (in range(0,256)) to strings. String values are percent-encoded byte values, unless the key < 128, and in the "safe" set (either the specified safe set, or default set). """ # Keeps a cache internally, using defaultdict, for efficiency (lookups # of cached keys don't call Python code at all). def __init__(self, safe): """safe: bytes object.""" self.safe = _ALWAYS_SAFE.union(safe) def __repr__(self): # Without this, will just display as a defaultdict return "" % dict(self) def __missing__(self, b): # Handle a cache miss. Store quoted string in cache and return. res = chr(b) if b in self.safe else '%{:02X}'.format(b) self[b] = res return res def quote(string, safe='/', encoding=None, errors=None): """quote('abc def') -> 'abc%20def' Each part of a URL, e.g. the path info, the query, etc., has a different set of reserved characters that must be quoted. RFC 2396 Uniform Resource Identifiers (URI): Generic Syntax lists the following reserved characters. reserved = ";" | "/" | "?" | ":" | "@" | "&" | "=" | "+" | "$" | "," Each of these characters is reserved in some component of a URL, but not necessarily in all of them. By default, the quote function is intended for quoting the path section of a URL. Thus, it will not encode '/'. This character is reserved, but in typical usage the quote function is being called on a path where the existing slash characters are used as reserved characters. string and safe may be either str or bytes objects. encoding must not be specified if string is a str. The optional encoding and errors parameters specify how to deal with non-ASCII characters, as accepted by the str.encode method. By default, encoding='utf-8' (characters are encoded with UTF-8), and errors='strict' (unsupported characters raise a UnicodeEncodeError). """ if isinstance(string, str): if not string: return string if encoding is None: encoding = 'utf-8' if errors is None: errors = 'strict' string = string.encode(encoding, errors) else: if encoding is not None: raise TypeError("quote() doesn't support 'encoding' for bytes") if errors is not None: raise TypeError("quote() doesn't support 'errors' for bytes") return quote_from_bytes(string, safe) def quote_plus(string, safe='', encoding=None, errors=None): """Like quote(), but also replace ' ' with '+', as required for quoting HTML form values. Plus signs in the original string are escaped unless they are included in safe. It also does not have safe default to '/'. """ # Check if ' ' in string, where string may either be a str or bytes. If # there are no spaces, the regular quote will produce the right answer. if ((isinstance(string, str) and ' ' not in string) or (isinstance(string, bytes) and b' ' not in string)): return quote(string, safe, encoding, errors) if isinstance(safe, str): space = ' ' else: space = b' ' string = quote(string, safe + space, encoding, errors) return string.replace(' ', '+') def quote_from_bytes(bs, safe='/'): """Like quote(), but accepts a bytes object rather than a str, and does not perform string-to-bytes encoding. It always returns an ASCII string. quote_from_bytes(b'abc def\xab') -> 'abc%20def%AB' """ if not isinstance(bs, (bytes, bytearray)): raise TypeError("quote_from_bytes() expected bytes") if not bs: return '' if isinstance(safe, str): # Normalize 'safe' by converting to bytes and removing non-ASCII chars safe = safe.encode('ascii', 'ignore') else: safe = bytes([c for c in safe if c < 128]) if not bs.rstrip(_ALWAYS_SAFE_BYTES + safe): return bs.decode() try: quoter = _safe_quoters[safe] except KeyError: _safe_quoters[safe] = quoter = Quoter(safe).__getitem__ return ''.join([quoter(char) for char in bs]) def urlencode(query, doseq=False, safe='', encoding=None, errors=None): """Encode a sequence of two-element tuples or dictionary into a URL query string. If any values in the query arg are sequences and doseq is true, each sequence element is converted to a separate parameter. If the query arg is a sequence of two-element tuples, the order of the parameters in the output will match the order of parameters in the input. The query arg may be either a string or a bytes type. When query arg is a string, the safe, encoding and error parameters are sent the quote_plus for encoding. """ if hasattr(query, "items"): query = query.items() else: # It's a bother at times that strings and string-like objects are # sequences. try: # non-sequence items should not work with len() # non-empty strings will fail this if len(query) and not isinstance(query[0], tuple): raise TypeError # Zero-length sequences of all types will get here and succeed, # but that's a minor nit. Since the original implementation # allowed empty dicts that type of behavior probably should be # preserved for consistency except TypeError: ty, va, tb = sys.exc_info() raise TypeError("not a valid non-string sequence " "or mapping object").with_traceback(tb) l = [] if not doseq: for k, v in query: if isinstance(k, bytes): k = quote_plus(k, safe) else: k = quote_plus(str(k), safe, encoding, errors) if isinstance(v, bytes): v = quote_plus(v, safe) else: v = quote_plus(str(v), safe, encoding, errors) l.append(k + '=' + v) else: for k, v in query: if isinstance(k, bytes): k = quote_plus(k, safe) else: k = quote_plus(str(k), safe, encoding, errors) if isinstance(v, bytes): v = quote_plus(v, safe) l.append(k + '=' + v) elif isinstance(v, str): v = quote_plus(v, safe, encoding, errors) l.append(k + '=' + v) else: try: # Is this a sufficient test for sequence-ness? x = len(v) except TypeError: # not a sequence v = quote_plus(str(v), safe, encoding, errors) l.append(k + '=' + v) else: # loop over the sequence for elt in v: if isinstance(elt, bytes): elt = quote_plus(elt, safe) else: elt = quote_plus(str(elt), safe, encoding, errors) l.append(k + '=' + elt) return '&'.join(l) # Utilities to parse URLs (most of these return None for missing parts): # unwrap('') --> 'type://host/path' # splittype('type:opaquestring') --> 'type', 'opaquestring' # splithost('//host[:port]/path') --> 'host[:port]', '/path' # splituser('user[:passwd]@host[:port]') --> 'user[:passwd]', 'host[:port]' # splitpasswd('user:passwd') -> 'user', 'passwd' # splitport('host:port') --> 'host', 'port' # splitquery('/path?query') --> '/path', 'query' # splittag('/path#tag') --> '/path', 'tag' # splitattr('/path;attr1=value1;attr2=value2;...') -> # '/path', ['attr1=value1', 'attr2=value2', ...] # splitvalue('attr=value') --> 'attr', 'value' # urllib.parse.unquote('abc%20def') -> 'abc def' # quote('abc def') -> 'abc%20def') def to_bytes(url): """to_bytes(u"URL") --> 'URL'.""" # Most URL schemes require ASCII. If that changes, the conversion # can be relaxed. # XXX get rid of to_bytes() if isinstance(url, str): try: url = url.encode("ASCII").decode() except UnicodeError: raise UnicodeError("URL " + repr(url) + " contains non-ASCII characters") return url def unwrap(url): """unwrap('') --> 'type://host/path'.""" url = str(url).strip() if url[:1] == '<' and url[-1:] == '>': url = url[1:-1].strip() if url[:4] == 'URL:': url = url[4:].strip() return url _typeprog = None def splittype(url): """splittype('type:opaquestring') --> 'type', 'opaquestring'.""" global _typeprog if _typeprog is None: import re _typeprog = re.compile('^([^/:]+):') match = _typeprog.match(url) if match: scheme = match.group(1) return scheme.lower(), url[len(scheme) + 1:] return None, url _hostprog = None def splithost(url): """splithost('//host[:port]/path') --> 'host[:port]', '/path'.""" global _hostprog if _hostprog is None: import re _hostprog = re.compile('^//([^/?]*)(.*)$') match = _hostprog.match(url) if match: host_port = match.group(1) path = match.group(2) if path and not path.startswith('/'): path = '/' + path return host_port, path return None, url _userprog = None def splituser(host): """splituser('user[:passwd]@host[:port]') --> 'user[:passwd]', 'host[:port]'.""" global _userprog if _userprog is None: import re _userprog = re.compile('^(.*)@(.*)$') match = _userprog.match(host) if match: return match.group(1, 2) return None, host _passwdprog = None def splitpasswd(user): """splitpasswd('user:passwd') -> 'user', 'passwd'.""" global _passwdprog if _passwdprog is None: import re _passwdprog = re.compile('^([^:]*):(.*)$',re.S) match = _passwdprog.match(user) if match: return match.group(1, 2) return user, None # splittag('/path#tag') --> '/path', 'tag' _portprog = None def splitport(host): """splitport('host:port') --> 'host', 'port'.""" global _portprog if _portprog is None: import re _portprog = re.compile('^(.*):([0-9]+)$') match = _portprog.match(host) if match: return match.group(1, 2) return host, None _nportprog = None def splitnport(host, defport=-1): """Split host and port, returning numeric port. Return given default port if no ':' found; defaults to -1. Return numerical port if a valid number are found after ':'. Return None if ':' but not a valid number.""" global _nportprog if _nportprog is None: import re _nportprog = re.compile('^(.*):(.*)$') match = _nportprog.match(host) if match: host, port = match.group(1, 2) try: if not port: raise ValueError("no digits") nport = int(port) except ValueError: nport = None return host, nport return host, defport _queryprog = None def splitquery(url): """splitquery('/path?query') --> '/path', 'query'.""" global _queryprog if _queryprog is None: import re _queryprog = re.compile('^(.*)\?([^?]*)$') match = _queryprog.match(url) if match: return match.group(1, 2) return url, None _tagprog = None def splittag(url): """splittag('/path#tag') --> '/path', 'tag'.""" global _tagprog if _tagprog is None: import re _tagprog = re.compile('^(.*)#([^#]*)$') match = _tagprog.match(url) if match: return match.group(1, 2) return url, None def splitattr(url): """splitattr('/path;attr1=value1;attr2=value2;...') -> '/path', ['attr1=value1', 'attr2=value2', ...].""" words = url.split(';') return words[0], words[1:] _valueprog = None def splitvalue(attr): """splitvalue('attr=value') --> 'attr', 'value'.""" global _valueprog if _valueprog is None: import re _valueprog = re.compile('^([^=]*)=(.*)$') match = _valueprog.match(attr) if match: return match.group(1, 2) return attr, None mypy-0.560/test-data/stdlib-samples/3.2/posixpath.py0000644€tŠÔÚ€2›s®0000003413613215007205026402 0ustar jukkaDROPBOX\Domain Users00000000000000"""Common operations on Posix pathnames. Instead of importing this module directly, import os and refer to this module as os.path. The "os.path" name is an alias for this module on Posix systems; on other systems (e.g. Mac, Windows), os.path provides the same operations in a manner specific to that platform, and is an alias to another module (e.g. macpath, ntpath). Some of this can actually be useful on non-Posix systems too, e.g. for manipulation of the pathname component of URLs. """ import os import sys import stat import genericpath from genericpath import * from typing import ( Tuple, BinaryIO, TextIO, Pattern, AnyStr, List, Set, Any, Union, cast ) __all__ = ["normcase","isabs","join","splitdrive","split","splitext", "basename","dirname","commonprefix","getsize","getmtime", "getatime","getctime","islink","exists","lexists","isdir","isfile", "ismount", "expanduser","expandvars","normpath","abspath", "samefile","sameopenfile","samestat", "curdir","pardir","sep","pathsep","defpath","altsep","extsep", "devnull","realpath","supports_unicode_filenames","relpath"] # Strings representing various path-related bits and pieces. # These are primarily for export; internally, they are hardcoded. curdir = '.' pardir = '..' extsep = '.' sep = '/' pathsep = ':' defpath = ':/bin:/usr/bin' altsep = None # type: str devnull = '/dev/null' def _get_sep(path: AnyStr) -> AnyStr: if isinstance(path, bytes): return b'/' else: return '/' # Normalize the case of a pathname. Trivial in Posix, string.lower on Mac. # On MS-DOS this may also turn slashes into backslashes; however, other # normalizations (such as optimizing '../' away) are not allowed # (another function should be defined to do that). def normcase(s: AnyStr) -> AnyStr: """Normalize case of pathname. Has no effect under Posix""" # TODO: on Mac OS X, this should really return s.lower(). if not isinstance(s, (bytes, str)): raise TypeError("normcase() argument must be str or bytes, " "not '{}'".format(s.__class__.__name__)) return cast(AnyStr, s) # Return whether a path is absolute. # Trivial in Posix, harder on the Mac or MS-DOS. def isabs(s: AnyStr) -> bool: """Test whether a path is absolute""" sep = _get_sep(s) return s.startswith(sep) # Join pathnames. # Ignore the previous parts if a part is absolute. # Insert a '/' unless the first part is empty or already ends in '/'. def join(a: AnyStr, *p: AnyStr) -> AnyStr: """Join two or more pathname components, inserting '/' as needed. If any component is an absolute path, all previous path components will be discarded.""" sep = _get_sep(a) path = a for b in p: if b.startswith(sep): path = b elif not path or path.endswith(sep): path += b else: path += sep + b return path # Split a path in head (everything up to the last '/') and tail (the # rest). If the path ends in '/', tail will be empty. If there is no # '/' in the path, head will be empty. # Trailing '/'es are stripped from head unless it is the root. def split(p: AnyStr) -> Tuple[AnyStr, AnyStr]: """Split a pathname. Returns tuple "(head, tail)" where "tail" is everything after the final slash. Either part may be empty.""" sep = _get_sep(p) i = p.rfind(sep) + 1 head, tail = p[:i], p[i:] if head and head != sep*len(head): head = head.rstrip(sep) return head, tail # Split a path in root and extension. # The extension is everything starting at the last dot in the last # pathname component; the root is everything before that. # It is always true that root + ext == p. def splitext(p: AnyStr) -> Tuple[AnyStr, AnyStr]: if isinstance(p, bytes): sep = b'/' extsep = b'.' else: sep = '/' extsep = '.' return genericpath._splitext(p, sep, None, extsep) splitext.__doc__ = genericpath._splitext.__doc__ # Split a pathname into a drive specification and the rest of the # path. Useful on DOS/Windows/NT; on Unix, the drive is always empty. def splitdrive(p: AnyStr) -> Tuple[AnyStr, AnyStr]: """Split a pathname into drive and path. On Posix, drive is always empty.""" return p[:0], p # Return the tail (basename) part of a path, same as split(path)[1]. def basename(p: AnyStr) -> AnyStr: """Returns the final component of a pathname""" sep = _get_sep(p) i = p.rfind(sep) + 1 return p[i:] # Return the head (dirname) part of a path, same as split(path)[0]. def dirname(p: AnyStr) -> AnyStr: """Returns the directory component of a pathname""" sep = _get_sep(p) i = p.rfind(sep) + 1 head = p[:i] if head and head != sep*len(head): head = head.rstrip(sep) return head # Is a path a symbolic link? # This will always return false on systems where os.lstat doesn't exist. def islink(path: AnyStr) -> bool: """Test whether a path is a symbolic link""" try: st = os.lstat(path) except (os.error, AttributeError): return False return stat.S_ISLNK(st.st_mode) # Being true for dangling symbolic links is also useful. def lexists(path: AnyStr) -> bool: """Test whether a path exists. Returns True for broken symbolic links""" try: os.lstat(path) except os.error: return False return True # Are two filenames really pointing to the same file? def samefile(f1: AnyStr, f2: AnyStr) -> bool: """Test whether two pathnames reference the same actual file""" s1 = os.stat(f1) s2 = os.stat(f2) return samestat(s1, s2) # Are two open files really referencing the same file? # (Not necessarily the same file descriptor!) def sameopenfile(fp1: int, fp2: int) -> bool: """Test whether two open file objects reference the same file""" s1 = os.fstat(fp1) s2 = os.fstat(fp2) return samestat(s1, s2) # Are two stat buffers (obtained from stat, fstat or lstat) # describing the same file? def samestat(s1: os.stat_result, s2: os.stat_result) -> bool: """Test whether two stat buffers reference the same file""" return s1.st_ino == s2.st_ino and \ s1.st_dev == s2.st_dev # Is a path a mount point? # (Does this work for all UNIXes? Is it even guaranteed to work by Posix?) def ismount(path: AnyStr) -> bool: """Test whether a path is a mount point""" if islink(path): # A symlink can never be a mount point return False try: s1 = os.lstat(path) if isinstance(path, bytes): parent = join(path, b'..') else: parent = join(path, '..') s2 = os.lstat(parent) except os.error: return False # It doesn't exist -- so not a mount point :-) dev1 = s1.st_dev dev2 = s2.st_dev if dev1 != dev2: return True # path/.. on a different device as path ino1 = s1.st_ino ino2 = s2.st_ino if ino1 == ino2: return True # path/.. is the same i-node as path return False # Expand paths beginning with '~' or '~user'. # '~' means $HOME; '~user' means that user's home directory. # If the path doesn't begin with '~', or if the user or $HOME is unknown, # the path is returned unchanged (leaving error reporting to whatever # function is called with the expanded path as argument). # See also module 'glob' for expansion of *, ? and [...] in pathnames. # (A function should also be defined to do full *sh-style environment # variable expansion.) def expanduser(path: AnyStr) -> AnyStr: """Expand ~ and ~user constructions. If user or $HOME is unknown, do nothing.""" if isinstance(path, bytes): tilde = b'~' else: tilde = '~' if not path.startswith(tilde): return path sep = _get_sep(path) i = path.find(sep, 1) if i < 0: i = len(path) if i == 1: userhome = None # type: Union[str, bytes] if 'HOME' not in os.environ: import pwd userhome = pwd.getpwuid(os.getuid()).pw_dir else: userhome = os.environ['HOME'] else: import pwd name = path[1:i] # type: Union[str, bytes] if isinstance(name, bytes): name = str(name, 'ASCII') try: pwent = pwd.getpwnam(name) except KeyError: return path userhome = pwent.pw_dir if isinstance(path, bytes): userhome = os.fsencode(userhome) root = b'/' else: root = '/' userhome = userhome.rstrip(root) return (userhome + path[i:]) or root # Expand paths containing shell variable substitutions. # This expands the forms $variable and ${variable} only. # Non-existent variables are left unchanged. _varprog = None # type: Pattern[str] _varprogb = None # type: Pattern[bytes] def expandvars(path: AnyStr) -> AnyStr: """Expand shell variables of form $var and ${var}. Unknown variables are left unchanged.""" global _varprog, _varprogb if isinstance(path, bytes): if b'$' not in path: return path if not _varprogb: import re _varprogb = re.compile(br'\$(\w+|\{[^}]*\})', re.ASCII) search = _varprogb.search start = b'{' end = b'}' else: if '$' not in path: return path if not _varprog: import re _varprog = re.compile(r'\$(\w+|\{[^}]*\})', re.ASCII) search = _varprog.search start = '{' end = '}' i = 0 while True: m = search(path, i) if not m: break i, j = m.span(0) name = None # type: Union[str, bytes] name = m.group(1) if name.startswith(start) and name.endswith(end): name = name[1:-1] if isinstance(name, bytes): name = str(name, 'ASCII') if name in os.environ: tail = path[j:] value = None # type: Union[str, bytes] value = os.environ[name] if isinstance(path, bytes): value = value.encode('ASCII') path = path[:i] + value i = len(path) path += tail else: i = j return path # Normalize a path, e.g. A//B, A/./B and A/foo/../B all become A/B. # It should be understood that this may change the meaning of the path # if it contains symbolic links! def normpath(path: AnyStr) -> AnyStr: """Normalize path, eliminating double slashes, etc.""" if isinstance(path, bytes): sep = b'/' empty = b'' dot = b'.' dotdot = b'..' else: sep = '/' empty = '' dot = '.' dotdot = '..' if path == empty: return dot initial_slashes = path.startswith(sep) # type: int # POSIX allows one or two initial slashes, but treats three or more # as single slash. if (initial_slashes and path.startswith(sep*2) and not path.startswith(sep*3)): initial_slashes = 2 comps = path.split(sep) new_comps = [] # type: List[AnyStr] for comp in comps: if comp in (empty, dot): continue if (comp != dotdot or (not initial_slashes and not new_comps) or (new_comps and new_comps[-1] == dotdot)): new_comps.append(comp) elif new_comps: new_comps.pop() comps = new_comps path = sep.join(comps) if initial_slashes: path = sep*initial_slashes + path return path or dot def abspath(path: AnyStr) -> AnyStr: """Return an absolute path.""" if not isabs(path): if isinstance(path, bytes): cwd = os.getcwdb() else: cwd = os.getcwd() path = join(cwd, path) return normpath(path) # Return a canonical path (i.e. the absolute location of a file on the # filesystem). def realpath(filename: AnyStr) -> AnyStr: """Return the canonical path of the specified filename, eliminating any symbolic links encountered in the path.""" if isinstance(filename, bytes): sep = b'/' empty = b'' else: sep = '/' empty = '' if isabs(filename): bits = [sep] + filename.split(sep)[1:] else: bits = [empty] + filename.split(sep) for i in range(2, len(bits)+1): component = join(*bits[0:i]) # Resolve symbolic links. if islink(component): resolved = _resolve_link(component) if resolved is None: # Infinite loop -- return original component + rest of the path return abspath(join(*([component] + bits[i:]))) else: newpath = join(*([resolved] + bits[i:])) return realpath(newpath) return abspath(filename) def _resolve_link(path: AnyStr) -> AnyStr: """Internal helper function. Takes a path and follows symlinks until we either arrive at something that isn't a symlink, or encounter a path we've seen before (meaning that there's a loop). """ paths_seen = set() # type: Set[AnyStr] while islink(path): if path in paths_seen: # Already seen this path, so we must have a symlink loop return None paths_seen.add(path) # Resolve where the link points to resolved = os.readlink(path) if not isabs(resolved): dir = dirname(path) path = normpath(join(dir, resolved)) else: path = normpath(resolved) return path supports_unicode_filenames = (sys.platform == 'darwin') def relpath(path: AnyStr, start: AnyStr = None) -> AnyStr: """Return a relative version of a path""" if not path: raise ValueError("no path specified") if isinstance(path, bytes): curdir = b'.' sep = b'/' pardir = b'..' else: curdir = '.' sep = '/' pardir = '..' if start is None: start = curdir start_list = [x for x in abspath(start).split(sep) if x] path_list = [x for x in abspath(path).split(sep) if x] # Work out how much of the filepath is shared by start and path. i = len(commonprefix([start_list, path_list])) rel_list = [pardir] * (len(start_list)-i) + path_list[i:] if not rel_list: return curdir return join(*rel_list) mypy-0.560/test-data/stdlib-samples/3.2/pprint.py0000644€tŠÔÚ€2›s®0000003252513215007205025677 0ustar jukkaDROPBOX\Domain Users00000000000000# Author: Fred L. Drake, Jr. # fdrake@acm.org # # This is a simple little module I wrote to make life easier. I didn't # see anything quite like it in the library, though I may have overlooked # something. I wrote this when I was trying to read some heavily nested # tuples with fairly non-descriptive content. This is modeled very much # after Lisp/Scheme - style pretty-printing of lists. If you find it # useful, thank small children who sleep at night. """Support to pretty-print lists, tuples, & dictionaries recursively. Very simple, but useful, especially in debugging data structures. Classes ------- PrettyPrinter() Handle pretty-printing operations onto a stream using a configured set of formatting parameters. Functions --------- pformat() Format a Python object into a pretty-printed representation. pprint() Pretty-print a Python object to a stream [default is sys.stdout]. saferepr() Generate a 'standard' repr()-like value, but protect against recursive data structures. """ import sys as _sys from collections import OrderedDict as _OrderedDict from io import StringIO as _StringIO from typing import Any, Tuple, Dict, TextIO, cast, List __all__ = ["pprint","pformat","isreadable","isrecursive","saferepr", "PrettyPrinter"] # cache these for faster access: _commajoin = ", ".join _id = id _len = len _type = type def pprint(object: object, stream: TextIO = None, indent: int = 1, width: int = 80, depth: int = None) -> None: """Pretty-print a Python object to a stream [default is sys.stdout].""" printer = PrettyPrinter( stream=stream, indent=indent, width=width, depth=depth) printer.pprint(object) def pformat(object: object, indent: int = 1, width: int = 80, depth: int = None) -> str: """Format a Python object into a pretty-printed representation.""" return PrettyPrinter(indent=indent, width=width, depth=depth).pformat(object) def saferepr(object: object) -> str: """Version of repr() which can handle recursive data structures.""" return _safe_repr(object, {}, None, 0)[0] def isreadable(object: object) -> bool: """Determine if saferepr(object) is readable by eval().""" return _safe_repr(object, {}, None, 0)[1] def isrecursive(object: object) -> bool: """Determine if object requires a recursive representation.""" return _safe_repr(object, {}, None, 0)[2] class _safe_key: """Helper function for key functions when sorting unorderable objects. The wrapped-object will fallback to an Py2.x style comparison for unorderable types (sorting first comparing the type name and then by the obj ids). Does not work recursively, so dict.items() must have _safe_key applied to both the key and the value. """ __slots__ = ['obj'] def __init__(self, obj: Any) -> None: self.obj = obj def __lt__(self, other: Any) -> Any: rv = self.obj.__lt__(other.obj) # type: Any if rv is NotImplemented: rv = (str(type(self.obj)), id(self.obj)) < \ (str(type(other.obj)), id(other.obj)) return rv def _safe_tuple(t: Tuple[Any, Any]) -> Tuple[_safe_key, _safe_key]: "Helper function for comparing 2-tuples" return _safe_key(t[0]), _safe_key(t[1]) class PrettyPrinter: def __init__(self, indent: int = 1, width: int = 80, depth: int = None, stream: TextIO = None) -> None: """Handle pretty printing operations onto a stream using a set of configured parameters. indent Number of spaces to indent for each level of nesting. width Attempted maximum number of columns in the output. depth The maximum depth to print out nested structures. stream The desired output stream. If omitted (or false), the standard output stream available at construction will be used. """ indent = int(indent) width = int(width) assert indent >= 0, "indent must be >= 0" assert depth is None or depth > 0, "depth must be > 0" assert width, "width must be != 0" self._depth = depth self._indent_per_level = indent self._width = width if stream is not None: self._stream = stream else: self._stream = _sys.stdout def pprint(self, object: object) -> None: self._format(object, self._stream, 0, 0, {}, 0) self._stream.write("\n") def pformat(self, object: object) -> str: sio = _StringIO() self._format(object, sio, 0, 0, {}, 0) return sio.getvalue() def isrecursive(self, object: object) -> int: return self.format(object, {}, 0, 0)[2] def isreadable(self, object: object) -> int: s, readable, recursive = self.format(object, {}, 0, 0) return readable and not recursive def _format(self, object: object, stream: TextIO, indent: int, allowance: int, context: Dict[int, int], level: int) -> None: level = level + 1 objid = _id(object) if objid in context: stream.write(_recursion(object)) self._recursive = True self._readable = False return rep = self._repr(object, context, level - 1) typ = _type(object) sepLines = _len(rep) > (self._width - 1 - indent - allowance) write = stream.write if self._depth and level > self._depth: write(rep) return if sepLines: r = getattr(typ, "__repr__", None) if isinstance(object, dict): write('{') if self._indent_per_level > 1: write((self._indent_per_level - 1) * ' ') length = _len(object) if length: context[objid] = 1 indent = indent + self._indent_per_level if issubclass(typ, _OrderedDict): items = list(object.items()) else: items = sorted(object.items(), key=_safe_tuple) key, ent = items[0] rep = self._repr(key, context, level) write(rep) write(': ') self._format(ent, stream, indent + _len(rep) + 2, allowance + 1, context, level) if length > 1: for key, ent in items[1:]: rep = self._repr(key, context, level) write(',\n%s%s: ' % (' '*indent, rep)) self._format(ent, stream, indent + _len(rep) + 2, allowance + 1, context, level) indent = indent - self._indent_per_level del context[objid] write('}') return if ((issubclass(typ, list) and r is list.__repr__) or (issubclass(typ, tuple) and r is tuple.__repr__) or (issubclass(typ, set) and r is set.__repr__) or (issubclass(typ, frozenset) and r is frozenset.__repr__) ): anyobj = cast(Any, object) # TODO Collection? length = _len(anyobj) if issubclass(typ, list): write('[') endchar = ']' lst = anyobj elif issubclass(typ, set): if not length: write('set()') return write('{') endchar = '}' lst = sorted(anyobj, key=_safe_key) elif issubclass(typ, frozenset): if not length: write('frozenset()') return write('frozenset({') endchar = '})' lst = sorted(anyobj, key=_safe_key) indent += 10 else: write('(') endchar = ')' lst = list(anyobj) if self._indent_per_level > 1: write((self._indent_per_level - 1) * ' ') if length: context[objid] = 1 indent = indent + self._indent_per_level self._format(lst[0], stream, indent, allowance + 1, context, level) if length > 1: for ent in lst[1:]: write(',\n' + ' '*indent) self._format(ent, stream, indent, allowance + 1, context, level) indent = indent - self._indent_per_level del context[objid] if issubclass(typ, tuple) and length == 1: write(',') write(endchar) return write(rep) def _repr(self, object: object, context: Dict[int, int], level: int) -> str: repr, readable, recursive = self.format(object, context.copy(), self._depth, level) if not readable: self._readable = False if recursive: self._recursive = True return repr def format(self, object: object, context: Dict[int, int], maxlevels: int, level: int) -> Tuple[str, int, int]: """Format object for a specific context, returning a string and flags indicating whether the representation is 'readable' and whether the object represents a recursive construct. """ return _safe_repr(object, context, maxlevels, level) # Return triple (repr_string, isreadable, isrecursive). def _safe_repr(object: object, context: Dict[int, int], maxlevels: int, level: int) -> Tuple[str, bool, bool]: typ = _type(object) if typ is str: s = cast(str, object) if 'locale' not in _sys.modules: return repr(object), True, False if "'" in s and '"' not in s: closure = '"' quotes = {'"': '\\"'} else: closure = "'" quotes = {"'": "\\'"} qget = quotes.get sio = _StringIO() write = sio.write for char in s: if char.isalpha(): write(char) else: write(qget(char, repr(char)[1:-1])) return ("%s%s%s" % (closure, sio.getvalue(), closure)), True, False r = getattr(typ, "__repr__", None) if issubclass(typ, dict) and r is dict.__repr__: if not object: return "{}", True, False objid = _id(object) if maxlevels and level >= maxlevels: return "{...}", False, objid in context if objid in context: return _recursion(object), False, True context[objid] = 1 readable = True recursive = False components = [] # type: List[str] append = components.append level += 1 saferepr = _safe_repr items = sorted((cast(dict, object)).items(), key=_safe_tuple) for k, v in items: krepr, kreadable, krecur = saferepr(k, context, maxlevels, level) vrepr, vreadable, vrecur = saferepr(v, context, maxlevels, level) append("%s: %s" % (krepr, vrepr)) readable = readable and kreadable and vreadable if krecur or vrecur: recursive = True del context[objid] return "{%s}" % _commajoin(components), readable, recursive if (issubclass(typ, list) and r is list.__repr__) or \ (issubclass(typ, tuple) and r is tuple.__repr__): anyobj = cast(Any, object) # TODO Sequence? if issubclass(typ, list): if not object: return "[]", True, False format = "[%s]" elif _len(anyobj) == 1: format = "(%s,)" else: if not object: return "()", True, False format = "(%s)" objid = _id(object) if maxlevels and level >= maxlevels: return format % "...", False, objid in context if objid in context: return _recursion(object), False, True context[objid] = 1 readable = True recursive = False components = [] append = components.append level += 1 for o in anyobj: orepr, oreadable, orecur = _safe_repr(o, context, maxlevels, level) append(orepr) if not oreadable: readable = False if orecur: recursive = True del context[objid] return format % _commajoin(components), readable, recursive rep = repr(object) return rep, bool(rep and not rep.startswith('<')), False def _recursion(object: object) -> str: return ("" % (_type(object).__name__, _id(object))) def _perfcheck(object: object = None) -> None: import time if object is None: object = [("string", (1, 2), [3, 4], {5: 6, 7: 8})] * 100000 p = PrettyPrinter() t1 = time.time() _safe_repr(object, {}, None, 0) t2 = time.time() p.pformat(object) t3 = time.time() print("_safe_repr:", t2 - t1) print("pformat:", t3 - t2) if __name__ == "__main__": _perfcheck() mypy-0.560/test-data/stdlib-samples/3.2/random.py0000644€tŠÔÚ€2›s®0000006406413215007205025646 0ustar jukkaDROPBOX\Domain Users00000000000000"""Random variable generators. integers -------- uniform within range sequences --------- pick random element pick random sample generate random permutation distributions on the real line: ------------------------------ uniform triangular normal (Gaussian) lognormal negative exponential gamma beta pareto Weibull distributions on the circle (angles 0 to 2pi) --------------------------------------------- circular uniform von Mises General notes on the underlying Mersenne Twister core generator: * The period is 2**19937-1. * It is one of the most extensively tested generators in existence. * The random() method is implemented in C, executes in a single Python step, and is, therefore, threadsafe. """ from warnings import warn as _warn from types import MethodType as _MethodType, BuiltinMethodType as _BuiltinMethodType from math import log as _log, exp as _exp, pi as _pi, e as _e, ceil as _ceil from math import sqrt as _sqrt, acos as _acos, cos as _cos, sin as _sin from os import urandom as _urandom from collections import Set as _Set, Sequence as _Sequence from hashlib import sha512 as _sha512 from typing import ( Any, TypeVar, Iterable, Sequence, List, Callable, Set, cast, SupportsInt, Union ) __all__ = ["Random","seed","random","uniform","randint","choice","sample", "randrange","shuffle","normalvariate","lognormvariate", "expovariate","vonmisesvariate","gammavariate","triangular", "gauss","betavariate","paretovariate","weibullvariate", "getstate","setstate", "getrandbits", "SystemRandom"] NV_MAGICCONST = 4 * _exp(-0.5)/_sqrt(2.0) TWOPI = 2.0*_pi LOG4 = _log(4.0) SG_MAGICCONST = 1.0 + _log(4.5) BPF = 53 # Number of bits in a float RECIP_BPF = 2**-BPF # type: float # Translated by Guido van Rossum from C source provided by # Adrian Baddeley. Adapted by Raymond Hettinger for use with # the Mersenne Twister and os.urandom() core generators. import _random T = TypeVar('T') class Random(_random.Random): """Random number generator base class used by bound module functions. Used to instantiate instances of Random to get generators that don't share state. Class Random can also be subclassed if you want to use a different basic generator of your own devising: in that case, override the following methods: random(), seed(), getstate(), and setstate(). Optionally, implement a getrandbits() method so that randrange() can cover arbitrarily large ranges. """ VERSION = 3 # used by getstate/setstate gauss_next = 0.0 def __init__(self, x: object = None) -> None: """Initialize an instance. Optional argument x controls seeding, as for Random.seed(). """ self.seed(x) self.gauss_next = None def seed(self, a: Any = None, version: int = 2) -> None: """Initialize internal state from hashable object. None or no argument seeds from current time or from an operating system specific randomness source if available. For version 2 (the default), all of the bits are used if *a *is a str, bytes, or bytearray. For version 1, the hash() of *a* is used instead. If *a* is an int, all bits are used. """ if a is None: try: a = int.from_bytes(_urandom(32), 'big') except NotImplementedError: import time a = int(time.time() * 256) # use fractional seconds if version == 2: if isinstance(a, (str, bytes, bytearray)): if isinstance(a, str): a = a.encode() a += _sha512(a).digest() a = int.from_bytes(a, 'big') super().seed(a) self.gauss_next = None def getstate(self) -> tuple: """Return internal state; can be passed to setstate() later.""" return self.VERSION, super().getstate(), self.gauss_next def setstate(self, state: tuple) -> None: """Restore internal state from object returned by getstate().""" version = state[0] if version == 3: version, internalstate, self.gauss_next = state super().setstate(internalstate) elif version == 2: version, internalstate, self.gauss_next = state # In version 2, the state was saved as signed ints, which causes # inconsistencies between 32/64-bit systems. The state is # really unsigned 32-bit ints, so we convert negative ints from # version 2 to positive longs for version 3. try: internalstate = tuple(x % (2**32) for x in internalstate) except ValueError as e: raise TypeError() super().setstate(internalstate) else: raise ValueError("state with version %s passed to " "Random.setstate() of version %s" % (version, self.VERSION)) ## ---- Methods below this point do not need to be overridden when ## ---- subclassing for the purpose of using a different core generator. ## -------------------- pickle support ------------------- def __getstate__(self) -> object: # for pickle return self.getstate() def __setstate__(self, state: Any) -> None: # for pickle self.setstate(state) def __reduce__(self) -> tuple: return self.__class__, (), self.getstate() ## -------------------- integer methods ------------------- def randrange(self, start: SupportsInt, stop: SupportsInt = None, step: int = 1, int: Callable[[SupportsInt], int] = int) -> int: """Choose a random item from range(start, stop[, step]). This fixes the problem with randint() which includes the endpoint; in Python this is usually not what you want. Do not supply the 'int' argument. """ # This code is a bit messy to make it fast for the # common case while still doing adequate error checking. istart = int(start) if istart != start: raise ValueError("non-integer arg 1 for randrange()") if stop is None: if istart > 0: return self._randbelow(istart) raise ValueError("empty range for randrange()") # stop argument supplied. istop = int(stop) if istop != stop: raise ValueError("non-integer stop for randrange()") width = istop - istart if step == 1 and width > 0: return istart + self._randbelow(width) if step == 1: raise ValueError("empty range for randrange() (%d,%d, %d)" % (istart, istop, width)) # Non-unit step argument supplied. istep = int(step) if istep != step: raise ValueError("non-integer step for randrange()") if istep > 0: n = (width + istep - 1) // istep elif istep < 0: n = (width + istep + 1) // istep else: raise ValueError("zero step for randrange()") if n <= 0: raise ValueError("empty range for randrange()") return istart + istep*self._randbelow(n) def randint(self, a: int, b: int) -> int: """Return random integer in range [a, b], including both end points. """ return self.randrange(a, b+1) def _randbelow(self, n: int, int: Callable[[float], int] = int, maxsize: int = 1< int: "Return a random int in the range [0,n). Raises ValueError if n==0." getrandbits = self.getrandbits # Only call self.getrandbits if the original random() builtin method # has not been overridden or if a new getrandbits() was supplied. if type(self.random) is BuiltinMethod or type(getrandbits) is Method: k = n.bit_length() # don't use (n-1) here because n can be 1 r = getrandbits(k) # 0 <= r < 2**k while r >= n: r = getrandbits(k) return r # There's an overriden random() method but no new getrandbits() method, # so we can only use random() from here. random = self.random if n >= maxsize: _warn("Underlying random() generator does not supply \n" "enough bits to choose from a population range this large.\n" "To remove the range limitation, add a getrandbits() method.") return int(random() * n) rem = maxsize % n limit = (maxsize - rem) / maxsize # int(limit * maxsize) % n == 0 s = random() while s >= limit: s = random() return int(s*maxsize) % n ## -------------------- sequence methods ------------------- def choice(self, seq: Sequence[T]) -> T: """Choose a random element from a non-empty sequence.""" try: i = self._randbelow(len(seq)) except ValueError: raise IndexError('Cannot choose from an empty sequence') return seq[i] def shuffle(self, x: List[T], random: Callable[[], float] = None, int: Callable[[float], int] = int) -> None: """x, random=random.random -> shuffle list x in place; return None. Optional arg random is a 0-argument function returning a random float in [0.0, 1.0); by default, the standard random.random. """ randbelow = self._randbelow for i in reversed(range(1, len(x))): # pick an element in x[:i+1] with which to exchange x[i] j = randbelow(i+1) if random is None else int(random() * (i+1)) x[i], x[j] = x[j], x[i] def sample(self, population: Union[_Set[T], _Sequence[T]], k: int) -> List[T]: """Chooses k unique random elements from a population sequence or set. Returns a new list containing elements from the population while leaving the original population unchanged. The resulting list is in selection order so that all sub-slices will also be valid random samples. This allows raffle winners (the sample) to be partitioned into grand prize and second place winners (the subslices). Members of the population need not be hashable or unique. If the population contains repeats, then each occurrence is a possible selection in the sample. To choose a sample in a range of integers, use range as an argument. This is especially fast and space efficient for sampling from a large population: sample(range(10000000), 60) """ # Sampling without replacement entails tracking either potential # selections (the pool) in a list or previous selections in a set. # When the number of selections is small compared to the # population, then tracking selections is efficient, requiring # only a small set and an occasional reselection. For # a larger number of selections, the pool tracking method is # preferred since the list takes less space than the # set and it doesn't suffer from frequent reselections. if isinstance(population, _Set): population = list(population) if not isinstance(population, _Sequence): raise TypeError("Population must be a sequence or set. For dicts, use list(d).") randbelow = self._randbelow n = len(population) if not (0 <= k and k <= n): raise ValueError("Sample larger than population") result = [cast(T, None)] * k setsize = 21 # size of a small set minus size of an empty list if k > 5: setsize += 4 ** _ceil(_log(k * 3, 4)) # table size for big sets if n <= setsize: # An n-length list is smaller than a k-length set pool = list(population) for i in range(k): # invariant: non-selected at [0,n-i) j = randbelow(n-i) result[i] = pool[j] pool[j] = pool[n-i-1] # move non-selected item into vacancy else: selected = set() # type: Set[int] selected_add = selected.add for i in range(k): j = randbelow(n) while j in selected: j = randbelow(n) selected_add(j) result[i] = population[j] return result ## -------------------- real-valued distributions ------------------- ## -------------------- uniform distribution ------------------- def uniform(self, a: float, b: float) -> float: "Get a random number in the range [a, b) or [a, b] depending on rounding." return a + (b-a) * self.random() ## -------------------- triangular -------------------- def triangular(self, low: float = 0.0, high: float = 1.0, mode: float = None) -> float: """Triangular distribution. Continuous distribution bounded by given lower and upper limits, and having a given mode value in-between. http://en.wikipedia.org/wiki/Triangular_distribution """ u = self.random() c = 0.5 if mode is None else (mode - low) / (high - low) if u > c: u = 1.0 - u c = 1.0 - c low, high = high, low return low + (high - low) * (u * c) ** 0.5 ## -------------------- normal distribution -------------------- def normalvariate(self, mu: float, sigma: float) -> float: """Normal distribution. mu is the mean, and sigma is the standard deviation. """ # mu = mean, sigma = standard deviation # Uses Kinderman and Monahan method. Reference: Kinderman, # A.J. and Monahan, J.F., "Computer generation of random # variables using the ratio of uniform deviates", ACM Trans # Math Software, 3, (1977), pp257-260. random = self.random while 1: u1 = random() u2 = 1.0 - random() z = NV_MAGICCONST*(u1-0.5)/u2 zz = z*z/4.0 if zz <= -_log(u2): break return mu + z*sigma ## -------------------- lognormal distribution -------------------- def lognormvariate(self, mu: float, sigma: float) -> float: """Log normal distribution. If you take the natural logarithm of this distribution, you'll get a normal distribution with mean mu and standard deviation sigma. mu can have any value, and sigma must be greater than zero. """ return _exp(self.normalvariate(mu, sigma)) ## -------------------- exponential distribution -------------------- def expovariate(self, lambd: float) -> float: """Exponential distribution. lambd is 1.0 divided by the desired mean. It should be nonzero. (The parameter would be called "lambda", but that is a reserved word in Python.) Returned values range from 0 to positive infinity if lambd is positive, and from negative infinity to 0 if lambd is negative. """ # lambd: rate lambd = 1/mean # ('lambda' is a Python reserved word) # we use 1-random() instead of random() to preclude the # possibility of taking the log of zero. return -_log(1.0 - self.random())/lambd ## -------------------- von Mises distribution -------------------- def vonmisesvariate(self, mu: float, kappa: float) -> float: """Circular data distribution. mu is the mean angle, expressed in radians between 0 and 2*pi, and kappa is the concentration parameter, which must be greater than or equal to zero. If kappa is equal to zero, this distribution reduces to a uniform random angle over the range 0 to 2*pi. """ # mu: mean angle (in radians between 0 and 2*pi) # kappa: concentration parameter kappa (>= 0) # if kappa = 0 generate uniform random angle # Based upon an algorithm published in: Fisher, N.I., # "Statistical Analysis of Circular Data", Cambridge # University Press, 1993. # Thanks to Magnus Kessler for a correction to the # implementation of step 4. random = self.random if kappa <= 1e-6: return TWOPI * random() a = 1.0 + _sqrt(1.0 + 4.0 * kappa * kappa) b = (a - _sqrt(2.0 * a))/(2.0 * kappa) r = (1.0 + b * b)/(2.0 * b) while 1: u1 = random() z = _cos(_pi * u1) f = (1.0 + r * z)/(r + z) c = kappa * (r - f) u2 = random() if u2 < c * (2.0 - c) or u2 <= c * _exp(1.0 - c): break u3 = random() if u3 > 0.5: theta = (mu % TWOPI) + _acos(f) else: theta = (mu % TWOPI) - _acos(f) return theta ## -------------------- gamma distribution -------------------- def gammavariate(self, alpha: float, beta: float) -> float: """Gamma distribution. Not the gamma function! Conditions on the parameters are alpha > 0 and beta > 0. The probability distribution function is: x ** (alpha - 1) * math.exp(-x / beta) pdf(x) = -------------------------------------- math.gamma(alpha) * beta ** alpha """ # alpha > 0, beta > 0, mean is alpha*beta, variance is alpha*beta**2 # Warning: a few older sources define the gamma distribution in terms # of alpha > -1.0 if alpha <= 0.0 or beta <= 0.0: raise ValueError('gammavariate: alpha and beta must be > 0.0') random = self.random if alpha > 1.0: # Uses R.C.H. Cheng, "The generation of Gamma # variables with non-integral shape parameters", # Applied Statistics, (1977), 26, No. 1, p71-74 ainv = _sqrt(2.0 * alpha - 1.0) bbb = alpha - LOG4 ccc = alpha + ainv while 1: u1 = random() if not (1e-7 < u1 and u1 < .9999999): continue u2 = 1.0 - random() v = _log(u1/(1.0-u1))/ainv x = alpha*_exp(v) z = u1*u1*u2 r = bbb+ccc*v-x if r + SG_MAGICCONST - 4.5*z >= 0.0 or r >= _log(z): return x * beta elif alpha == 1.0: # expovariate(1) u = random() while u <= 1e-7: u = random() return -_log(u) * beta else: # alpha is between 0 and 1 (exclusive) # Uses ALGORITHM GS of Statistical Computing - Kennedy & Gentle while 1: u = random() b = (_e + alpha)/_e p = b*u if p <= 1.0: x = p ** (1.0/alpha) else: x = -_log((b-p)/alpha) u1 = random() if p > 1.0: if u1 <= x ** (alpha - 1.0): break elif u1 <= _exp(-x): break return x * beta ## -------------------- Gauss (faster alternative) -------------------- def gauss(self, mu: float, sigma: float) -> float: """Gaussian distribution. mu is the mean, and sigma is the standard deviation. This is slightly faster than the normalvariate() function. Not thread-safe without a lock around calls. """ # When x and y are two variables from [0, 1), uniformly # distributed, then # # cos(2*pi*x)*sqrt(-2*log(1-y)) # sin(2*pi*x)*sqrt(-2*log(1-y)) # # are two *independent* variables with normal distribution # (mu = 0, sigma = 1). # (Lambert Meertens) # (corrected version; bug discovered by Mike Miller, fixed by LM) # Multithreading note: When two threads call this function # simultaneously, it is possible that they will receive the # same return value. The window is very small though. To # avoid this, you have to use a lock around all calls. (I # didn't want to slow this down in the serial case by using a # lock here.) random = self.random z = self.gauss_next self.gauss_next = None if z is None: x2pi = random() * TWOPI g2rad = _sqrt(-2.0 * _log(1.0 - random())) z = _cos(x2pi) * g2rad self.gauss_next = _sin(x2pi) * g2rad return mu + z*sigma ## -------------------- beta -------------------- ## See ## http://mail.python.org/pipermail/python-bugs-list/2001-January/003752.html ## for Ivan Frohne's insightful analysis of why the original implementation: ## ## def betavariate(self, alpha, beta): ## # Discrete Event Simulation in C, pp 87-88. ## ## y = self.expovariate(alpha) ## z = self.expovariate(1.0/beta) ## return z/(y+z) ## ## was dead wrong, and how it probably got that way. def betavariate(self, alpha: float, beta: float) -> 'float': """Beta distribution. Conditions on the parameters are alpha > 0 and beta > 0. Returned values range between 0 and 1. """ # This version due to Janne Sinkkonen, and matches all the std # texts (e.g., Knuth Vol 2 Ed 3 pg 134 "the beta distribution"). y = self.gammavariate(alpha, 1.) if y == 0: return 0.0 else: return y / (y + self.gammavariate(beta, 1.)) ## -------------------- Pareto -------------------- def paretovariate(self, alpha: float) -> float: """Pareto distribution. alpha is the shape parameter.""" # Jain, pg. 495 u = 1.0 - self.random() return 1.0 / u ** (1.0/alpha) ## -------------------- Weibull -------------------- def weibullvariate(self, alpha: float, beta: float) -> float: """Weibull distribution. alpha is the scale parameter and beta is the shape parameter. """ # Jain, pg. 499; bug fix courtesy Bill Arms u = 1.0 - self.random() return alpha * (-_log(u)) ** (1.0/beta) ## --------------- Operating System Random Source ------------------ class SystemRandom(Random): """Alternate random number generator using sources provided by the operating system (such as /dev/urandom on Unix or CryptGenRandom on Windows). Not available on all systems (see os.urandom() for details). """ def random(self) -> float: """Get the next random number in the range [0.0, 1.0).""" return (int.from_bytes(_urandom(7), 'big') >> 3) * RECIP_BPF def getrandbits(self, k: int) -> int: """getrandbits(k) -> x. Generates a long int with k random bits.""" if k <= 0: raise ValueError('number of bits must be greater than zero') if k != int(k): raise TypeError('number of bits should be an integer') numbytes = (k + 7) // 8 # bits / 8 and rounded up x = int.from_bytes(_urandom(numbytes), 'big') return x >> (numbytes * 8 - k) # trim excess bits def seed(self, a: object = None, version: int = None) -> None: "Stub method. Not used for a system random number generator." return def _notimplemented(self, *args: Any, **kwds: Any) -> Any: "Method should not be called for a system random number generator." raise NotImplementedError('System entropy source does not have state.') getstate = setstate = _notimplemented # Create one instance, seeded from current time, and export its methods # as module-level functions. The functions share state across all uses #(both in the user's code and in the Python libraries), but that's fine # for most programs and is easier for the casual user than making them # instantiate their own Random() instance. _inst = Random() seed = _inst.seed random = _inst.random uniform = _inst.uniform triangular = _inst.triangular randint = _inst.randint choice = _inst.choice randrange = _inst.randrange sample = _inst.sample shuffle = _inst.shuffle normalvariate = _inst.normalvariate lognormvariate = _inst.lognormvariate expovariate = _inst.expovariate vonmisesvariate = _inst.vonmisesvariate gammavariate = _inst.gammavariate gauss = _inst.gauss betavariate = _inst.betavariate paretovariate = _inst.paretovariate weibullvariate = _inst.weibullvariate getstate = _inst.getstate setstate = _inst.setstate getrandbits = _inst.getrandbits ## -------------------- test program -------------------- def _test_generator(n: int, func: Any, args: tuple) -> None: import time print(n, 'times', func.__name__) total = 0.0 sqsum = 0.0 smallest = 1e10 largest = -1e10 t0 = time.time() for i in range(n): x = func(*args) # type: float total += x sqsum = sqsum + x*x smallest = min(x, smallest) largest = max(x, largest) t1 = time.time() print(round(t1-t0, 3), 'sec,', end=' ') avg = total/n stddev = _sqrt(sqsum/n - avg*avg) print('avg %g, stddev %g, min %g, max %g' % \ (avg, stddev, smallest, largest)) def _test(N: int = 2000) -> None: _test_generator(N, random, ()) _test_generator(N, normalvariate, (0.0, 1.0)) _test_generator(N, lognormvariate, (0.0, 1.0)) _test_generator(N, vonmisesvariate, (0.0, 1.0)) _test_generator(N, gammavariate, (0.01, 1.0)) _test_generator(N, gammavariate, (0.1, 1.0)) _test_generator(N, gammavariate, (0.1, 2.0)) _test_generator(N, gammavariate, (0.5, 1.0)) _test_generator(N, gammavariate, (0.9, 1.0)) _test_generator(N, gammavariate, (1.0, 1.0)) _test_generator(N, gammavariate, (2.0, 1.0)) _test_generator(N, gammavariate, (20.0, 1.0)) _test_generator(N, gammavariate, (200.0, 1.0)) _test_generator(N, gauss, (0.0, 1.0)) _test_generator(N, betavariate, (3.0, 3.0)) _test_generator(N, triangular, (0.0, 1.0, 1.0/3.0)) if __name__ == '__main__': _test() mypy-0.560/test-data/stdlib-samples/3.2/shutil.py0000644€tŠÔÚ€2›s®0000006606313215007205025677 0ustar jukkaDROPBOX\Domain Users00000000000000"""Utility functions for copying and archiving files and directory trees. XXX The functions here don't copy the resource fork or other metadata on Mac. """ import os import sys import stat from os.path import abspath import fnmatch import collections import errno import tarfile import builtins from typing import ( Any, AnyStr, IO, List, Iterable, Callable, Tuple, Dict, Sequence, cast ) from types import TracebackType try: import bz2 _BZ2_SUPPORTED = True except ImportError: _BZ2_SUPPORTED = False try: from pwd import getpwnam as _getpwnam getpwnam = _getpwnam except ImportError: getpwnam = None try: from grp import getgrnam as _getgrnam getgrnam = _getgrnam except ImportError: getgrnam = None __all__ = ["copyfileobj", "copyfile", "copymode", "copystat", "copy", "copy2", "copytree", "move", "rmtree", "Error", "SpecialFileError", "ExecError", "make_archive", "get_archive_formats", "register_archive_format", "unregister_archive_format", "get_unpack_formats", "register_unpack_format", "unregister_unpack_format", "unpack_archive", "ignore_patterns"] class Error(EnvironmentError): pass class SpecialFileError(EnvironmentError): """Raised when trying to do a kind of operation (e.g. copying) which is not supported on a special file (e.g. a named pipe)""" class ExecError(EnvironmentError): """Raised when a command could not be executed""" class ReadError(EnvironmentError): """Raised when an archive cannot be read""" class RegistryError(Exception): """Raised when a registery operation with the archiving and unpacking registeries fails""" try: _WindowsError = WindowsError # type: type except NameError: _WindowsError = None # Function aliases to be patched in test cases rename = os.rename open = builtins.open def copyfileobj(fsrc: IO[AnyStr], fdst: IO[AnyStr], length: int = 16*1024) -> None: """copy data from file-like object fsrc to file-like object fdst""" while 1: buf = fsrc.read(length) if not buf: break fdst.write(buf) def _samefile(src: str, dst: str) -> bool: # Macintosh, Unix. if hasattr(os.path, 'samefile'): try: return os.path.samefile(src, dst) except OSError: return False # All other platforms: check for same pathname. return (os.path.normcase(os.path.abspath(src)) == os.path.normcase(os.path.abspath(dst))) def copyfile(src: str, dst: str) -> None: """Copy data from src to dst""" if _samefile(src, dst): raise Error("`%s` and `%s` are the same file" % (src, dst)) for fn in [src, dst]: try: st = os.stat(fn) except OSError: # File most likely does not exist pass else: # XXX What about other special files? (sockets, devices...) if stat.S_ISFIFO(st.st_mode): raise SpecialFileError("`%s` is a named pipe" % fn) with open(src, 'rb') as fsrc: with open(dst, 'wb') as fdst: copyfileobj(fsrc, fdst) def copymode(src: str, dst: str) -> None: """Copy mode bits from src to dst""" if hasattr(os, 'chmod'): st = os.stat(src) mode = stat.S_IMODE(st.st_mode) os.chmod(dst, mode) def copystat(src: str, dst: str) -> None: """Copy all stat info (mode bits, atime, mtime, flags) from src to dst""" st = os.stat(src) mode = stat.S_IMODE(st.st_mode) if hasattr(os, 'utime'): os.utime(dst, (st.st_atime, st.st_mtime)) if hasattr(os, 'chmod'): os.chmod(dst, mode) if hasattr(os, 'chflags') and hasattr(st, 'st_flags'): try: os.chflags(dst, st.st_flags) except OSError as why: if (not hasattr(errno, 'EOPNOTSUPP') or why.errno != errno.EOPNOTSUPP): raise def copy(src: str, dst: str) -> None: """Copy data and mode bits ("cp src dst"). The destination may be a directory. """ if os.path.isdir(dst): dst = os.path.join(dst, os.path.basename(src)) copyfile(src, dst) copymode(src, dst) def copy2(src: str, dst: str) -> None: """Copy data and all stat info ("cp -p src dst"). The destination may be a directory. """ if os.path.isdir(dst): dst = os.path.join(dst, os.path.basename(src)) copyfile(src, dst) copystat(src, dst) def ignore_patterns(*patterns: str) -> Callable[[str, List[str]], Iterable[str]]: """Function that can be used as copytree() ignore parameter. Patterns is a sequence of glob-style patterns that are used to exclude files""" def _ignore_patterns(path: str, names: List[str]) -> Iterable[str]: ignored_names = [] # type: List[str] for pattern in patterns: ignored_names.extend(fnmatch.filter(names, pattern)) return set(ignored_names) return _ignore_patterns def copytree(src: str, dst: str, symlinks: bool = False, ignore: Callable[[str, List[str]], Iterable[str]] = None, copy_function: Callable[[str, str], None] = copy2, ignore_dangling_symlinks: bool = False) -> None: """Recursively copy a directory tree. The destination directory must not already exist. If exception(s) occur, an Error is raised with a list of reasons. If the optional symlinks flag is true, symbolic links in the source tree result in symbolic links in the destination tree; if it is false, the contents of the files pointed to by symbolic links are copied. If the file pointed by the symlink doesn't exist, an exception will be added in the list of errors raised in an Error exception at the end of the copy process. You can set the optional ignore_dangling_symlinks flag to true if you want to silence this exception. Notice that this has no effect on platforms that don't support os.symlink. The optional ignore argument is a callable. If given, it is called with the `src` parameter, which is the directory being visited by copytree(), and `names` which is the list of `src` contents, as returned by os.listdir(): callable(src, names) -> ignored_names Since copytree() is called recursively, the callable will be called once for each directory that is copied. It returns a list of names relative to the `src` directory that should not be copied. The optional copy_function argument is a callable that will be used to copy each file. It will be called with the source path and the destination path as arguments. By default, copy2() is used, but any function that supports the same signature (like copy()) can be used. """ names = os.listdir(src) if ignore is not None: ignored_names = ignore(src, names) else: ignored_names = set() os.makedirs(dst) errors = [] # type: List[Tuple[str, str, str]] for name in names: if name in ignored_names: continue srcname = os.path.join(src, name) dstname = os.path.join(dst, name) try: if os.path.islink(srcname): linkto = os.readlink(srcname) if symlinks: os.symlink(linkto, dstname) else: # ignore dangling symlink if the flag is on if not os.path.exists(linkto) and ignore_dangling_symlinks: continue # otherwise let the copy occurs. copy2 will raise an error copy_function(srcname, dstname) elif os.path.isdir(srcname): copytree(srcname, dstname, symlinks, ignore, copy_function) else: # Will raise a SpecialFileError for unsupported file types copy_function(srcname, dstname) # catch the Error from the recursive copytree so that we can # continue with other files except Error as err: errors.extend(err.args[0]) except EnvironmentError as why: errors.append((srcname, dstname, str(why))) try: copystat(src, dst) except OSError as why: if _WindowsError is not None and isinstance(why, _WindowsError): # Copying file access times may fail on Windows pass else: errors.append((src, dst, str(why))) if errors: raise Error(errors) def rmtree(path: str, ignore_errors: bool = False, onerror: Callable[[Any, str, Tuple[type, BaseException, TracebackType]], None] = None) -> None: """Recursively delete a directory tree. If ignore_errors is set, errors are ignored; otherwise, if onerror is set, it is called to handle the error with arguments (func, path, exc_info) where func is os.listdir, os.remove, or os.rmdir; path is the argument to that function that caused it to fail; and exc_info is a tuple returned by sys.exc_info(). If ignore_errors is false and onerror is None, an exception is raised. """ if ignore_errors: def _onerror(x: Any, y: str, z: Tuple[type, BaseException, TracebackType]) -> None: pass onerror = _onerror elif onerror is None: def __onerror(x: Any, y: str, z: Tuple[type, BaseException, TracebackType]) -> None: raise onerror = __onerror try: if os.path.islink(path): # symlinks to directories are forbidden, see bug #1669 raise OSError("Cannot call rmtree on a symbolic link") except OSError: onerror(os.path.islink, path, sys.exc_info()) # can't continue even if onerror hook returns return names = [] # type: List[str] try: names = os.listdir(path) except os.error as err: onerror(os.listdir, path, sys.exc_info()) for name in names: fullname = os.path.join(path, name) try: mode = os.lstat(fullname).st_mode except os.error: mode = 0 if stat.S_ISDIR(mode): rmtree(fullname, ignore_errors, onerror) else: try: os.remove(fullname) except os.error as err: onerror(os.remove, fullname, sys.exc_info()) try: os.rmdir(path) except os.error: onerror(os.rmdir, path, sys.exc_info()) def _basename(path: str) -> str: # A basename() variant which first strips the trailing slash, if present. # Thus we always get the last component of the path, even for directories. return os.path.basename(path.rstrip(os.path.sep)) def move(src: str, dst: str) -> None: """Recursively move a file or directory to another location. This is similar to the Unix "mv" command. If the destination is a directory or a symlink to a directory, the source is moved inside the directory. The destination path must not already exist. If the destination already exists but is not a directory, it may be overwritten depending on os.rename() semantics. If the destination is on our current filesystem, then rename() is used. Otherwise, src is copied to the destination and then removed. A lot more could be done here... A look at a mv.c shows a lot of the issues this implementation glosses over. """ real_dst = dst if os.path.isdir(dst): if _samefile(src, dst): # We might be on a case insensitive filesystem, # perform the rename anyway. os.rename(src, dst) return real_dst = os.path.join(dst, _basename(src)) if os.path.exists(real_dst): raise Error("Destination path '%s' already exists" % real_dst) try: os.rename(src, real_dst) except OSError as exc: if os.path.isdir(src): if _destinsrc(src, dst): raise Error("Cannot move a directory '%s' into itself '%s'." % (src, dst)) copytree(src, real_dst, symlinks=True) rmtree(src) else: copy2(src, real_dst) os.unlink(src) def _destinsrc(src: str, dst: str) -> bool: src = abspath(src) dst = abspath(dst) if not src.endswith(os.path.sep): src += os.path.sep if not dst.endswith(os.path.sep): dst += os.path.sep return dst.startswith(src) def _get_gid(name: str) -> int: """Returns a gid, given a group name.""" if getgrnam is None or name is None: return None try: result = getgrnam(name) except KeyError: result = None if result is not None: return result.gr_gid return None def _get_uid(name: str) -> int: """Returns an uid, given a user name.""" if getpwnam is None or name is None: return None try: result = getpwnam(name) except KeyError: result = None if result is not None: return result.pw_uid return None def _make_tarball(base_name: str, base_dir: str, compress: str = "gzip", verbose: bool = False, dry_run: bool = False, owner: str = None, group: str = None, logger: Any = None) -> str: """Create a (possibly compressed) tar file from all the files under 'base_dir'. 'compress' must be "gzip" (the default), "bzip2", or None. 'owner' and 'group' can be used to define an owner and a group for the archive that is being built. If not provided, the current owner and group will be used. The output tar file will be named 'base_name' + ".tar", possibly plus the appropriate compression extension (".gz", or ".bz2"). Returns the output filename. """ tar_compression = {'gzip': 'gz', None: ''} compress_ext = {'gzip': '.gz'} if _BZ2_SUPPORTED: tar_compression['bzip2'] = 'bz2' compress_ext['bzip2'] = '.bz2' # flags for compression program, each element of list will be an argument if compress is not None and compress not in compress_ext.keys(): raise ValueError("bad value for 'compress', or compression format not " "supported : {0}".format(compress)) archive_name = base_name + '.tar' + compress_ext.get(compress, '') archive_dir = os.path.dirname(archive_name) if not os.path.exists(archive_dir): if logger is not None: logger.info("creating %s", archive_dir) if not dry_run: os.makedirs(archive_dir) # creating the tarball if logger is not None: logger.info('Creating tar archive') uid = _get_uid(owner) gid = _get_gid(group) def _set_uid_gid(tarinfo): if gid is not None: tarinfo.gid = gid tarinfo.gname = group if uid is not None: tarinfo.uid = uid tarinfo.uname = owner return tarinfo if not dry_run: tar = tarfile.open(archive_name, 'w|%s' % tar_compression[compress]) try: tar.add(base_dir, filter=_set_uid_gid) finally: tar.close() return archive_name def _call_external_zip(base_dir: str, zip_filename: str, verbose: bool = False, dry_run: bool = False) -> None: # XXX see if we want to keep an external call here if verbose: zipoptions = "-r" else: zipoptions = "-rq" from distutils.errors import DistutilsExecError from distutils.spawn import spawn try: spawn(["zip", zipoptions, zip_filename, base_dir], dry_run=dry_run) except DistutilsExecError: # XXX really should distinguish between "couldn't find # external 'zip' command" and "zip failed". raise ExecError(("unable to create zip file '%s': " "could neither import the 'zipfile' module nor " "find a standalone zip utility") % zip_filename) def _make_zipfile(base_name: str, base_dir: str, verbose: bool = False, dry_run: bool = False, logger: Any = None) -> str: """Create a zip file from all the files under 'base_dir'. The output zip file will be named 'base_name' + ".zip". Uses either the "zipfile" Python module (if available) or the InfoZIP "zip" utility (if installed and found on the default search path). If neither tool is available, raises ExecError. Returns the name of the output zip file. """ zip_filename = base_name + ".zip" archive_dir = os.path.dirname(base_name) if not os.path.exists(archive_dir): if logger is not None: logger.info("creating %s", archive_dir) if not dry_run: os.makedirs(archive_dir) # If zipfile module is not available, try spawning an external 'zip' # command. try: import zipfile except ImportError: zipfile = None if zipfile is None: _call_external_zip(base_dir, zip_filename, verbose, dry_run) else: if logger is not None: logger.info("creating '%s' and adding '%s' to it", zip_filename, base_dir) if not dry_run: zip = zipfile.ZipFile(zip_filename, "w", compression=zipfile.ZIP_DEFLATED) for dirpath, dirnames, filenames in os.walk(base_dir): for name in filenames: path = os.path.normpath(os.path.join(dirpath, name)) if os.path.isfile(path): zip.write(path, path) if logger is not None: logger.info("adding '%s'", path) zip.close() return zip_filename _ARCHIVE_FORMATS = { 'gztar': (_make_tarball, [('compress', 'gzip')], "gzip'ed tar-file"), 'tar': (_make_tarball, [('compress', None)], "uncompressed tar file"), 'zip': (_make_zipfile, [],"ZIP file") } # type: Dict[str, Tuple[Any, Sequence[Tuple[str, str]], str]] if _BZ2_SUPPORTED: _ARCHIVE_FORMATS['bztar'] = (_make_tarball, [('compress', 'bzip2')], "bzip2'ed tar-file") def get_archive_formats() -> List[Tuple[str, str]]: """Returns a list of supported formats for archiving and unarchiving. Each element of the returned sequence is a tuple (name, description) """ formats = [(name, registry[2]) for name, registry in _ARCHIVE_FORMATS.items()] formats.sort() return formats def register_archive_format(name: str, function: Any, extra_args: Sequence[Tuple[str, Any]] = None, description: str = '') -> None: """Registers an archive format. name is the name of the format. function is the callable that will be used to create archives. If provided, extra_args is a sequence of (name, value) tuples that will be passed as arguments to the callable. description can be provided to describe the format, and will be returned by the get_archive_formats() function. """ if extra_args is None: extra_args = [] if not callable(function): raise TypeError('The %s object is not callable' % function) if not isinstance(extra_args, (tuple, list)): raise TypeError('extra_args needs to be a sequence') for element in extra_args: if not isinstance(element, (tuple, list)) or len(cast(tuple, element)) !=2 : raise TypeError('extra_args elements are : (arg_name, value)') _ARCHIVE_FORMATS[name] = (function, extra_args, description) def unregister_archive_format(name: str) -> None: del _ARCHIVE_FORMATS[name] def make_archive(base_name: str, format: str, root_dir: str = None, base_dir: str = None, verbose: bool = False, dry_run: bool = False, owner: str = None, group: str = None, logger: Any = None) -> str: """Create an archive file (eg. zip or tar). 'base_name' is the name of the file to create, minus any format-specific extension; 'format' is the archive format: one of "zip", "tar", "bztar" or "gztar". 'root_dir' is a directory that will be the root directory of the archive; ie. we typically chdir into 'root_dir' before creating the archive. 'base_dir' is the directory where we start archiving from; ie. 'base_dir' will be the common prefix of all files and directories in the archive. 'root_dir' and 'base_dir' both default to the current directory. Returns the name of the archive file. 'owner' and 'group' are used when creating a tar archive. By default, uses the current owner and group. """ save_cwd = os.getcwd() if root_dir is not None: if logger is not None: logger.debug("changing into '%s'", root_dir) base_name = os.path.abspath(base_name) if not dry_run: os.chdir(root_dir) if base_dir is None: base_dir = os.curdir kwargs = {'dry_run': dry_run, 'logger': logger} try: format_info = _ARCHIVE_FORMATS[format] except KeyError: raise ValueError("unknown archive format '%s'" % format) func = format_info[0] for arg, val in format_info[1]: kwargs[arg] = val if format != 'zip': kwargs['owner'] = owner kwargs['group'] = group try: filename = func(base_name, base_dir, **kwargs) finally: if root_dir is not None: if logger is not None: logger.debug("changing back to '%s'", save_cwd) os.chdir(save_cwd) return filename def get_unpack_formats() -> List[Tuple[str, List[str], str]]: """Returns a list of supported formats for unpacking. Each element of the returned sequence is a tuple (name, extensions, description) """ formats = [(name, info[0], info[3]) for name, info in _UNPACK_FORMATS.items()] formats.sort() return formats def _check_unpack_options(extensions: List[str], function: Any, extra_args: Sequence[Tuple[str, Any]]) -> None: """Checks what gets registered as an unpacker.""" # first make sure no other unpacker is registered for this extension existing_extensions = {} # type: Dict[str, str] for name, info in _UNPACK_FORMATS.items(): for ext in info[0]: existing_extensions[ext] = name for extension in extensions: if extension in existing_extensions: msg = '%s is already registered for "%s"' raise RegistryError(msg % (extension, existing_extensions[extension])) if not callable(function): raise TypeError('The registered function must be a callable') def register_unpack_format(name: str, extensions: List[str], function: Any, extra_args: Sequence[Tuple[str, Any]] = None, description: str = '') -> None: """Registers an unpack format. `name` is the name of the format. `extensions` is a list of extensions corresponding to the format. `function` is the callable that will be used to unpack archives. The callable will receive archives to unpack. If it's unable to handle an archive, it needs to raise a ReadError exception. If provided, `extra_args` is a sequence of (name, value) tuples that will be passed as arguments to the callable. description can be provided to describe the format, and will be returned by the get_unpack_formats() function. """ if extra_args is None: extra_args = [] _check_unpack_options(extensions, function, extra_args) _UNPACK_FORMATS[name] = extensions, function, extra_args, description def unregister_unpack_format(name: str) -> None: """Removes the pack format from the registery.""" del _UNPACK_FORMATS[name] def _ensure_directory(path: str) -> None: """Ensure that the parent directory of `path` exists""" dirname = os.path.dirname(path) if not os.path.isdir(dirname): os.makedirs(dirname) def _unpack_zipfile(filename: str, extract_dir: str) -> None: """Unpack zip `filename` to `extract_dir` """ try: import zipfile except ImportError: raise ReadError('zlib not supported, cannot unpack this archive.') if not zipfile.is_zipfile(filename): raise ReadError("%s is not a zip file" % filename) zip = zipfile.ZipFile(filename) try: for info in zip.infolist(): name = info.filename # don't extract absolute paths or ones with .. in them if name.startswith('/') or '..' in name: continue target = os.path.join(extract_dir, *name.split('/')) if not target: continue _ensure_directory(target) if not name.endswith('/'): # file data = zip.read(info.filename) f = open(target,'wb') try: f.write(data) finally: f.close() del data finally: zip.close() def _unpack_tarfile(filename: str, extract_dir: str) -> None: """Unpack tar/tar.gz/tar.bz2 `filename` to `extract_dir` """ try: tarobj = tarfile.open(filename) except tarfile.TarError: raise ReadError( "%s is not a compressed or uncompressed tar file" % filename) try: tarobj.extractall(extract_dir) finally: tarobj.close() _UNPACK_FORMATS = { 'gztar': (['.tar.gz', '.tgz'], _unpack_tarfile, [], "gzip'ed tar-file"), 'tar': (['.tar'], _unpack_tarfile, [], "uncompressed tar file"), 'zip': (['.zip'], _unpack_zipfile, [], "ZIP file") } # type: Dict[str, Tuple[List[str], Any, Sequence[Tuple[str, Any]], str]] if _BZ2_SUPPORTED: _UNPACK_FORMATS['bztar'] = (['.bz2'], _unpack_tarfile, [], "bzip2'ed tar-file") def _find_unpack_format(filename: str) -> str: for name, info in _UNPACK_FORMATS.items(): for extension in info[0]: if filename.endswith(extension): return name return None def unpack_archive(filename: str, extract_dir: str = None, format: str = None) -> None: """Unpack an archive. `filename` is the name of the archive. `extract_dir` is the name of the target directory, where the archive is unpacked. If not provided, the current working directory is used. `format` is the archive format: one of "zip", "tar", or "gztar". Or any other registered format. If not provided, unpack_archive will use the filename extension and see if an unpacker was registered for that extension. In case none is found, a ValueError is raised. """ if extract_dir is None: extract_dir = os.getcwd() if format is not None: try: format_info = _UNPACK_FORMATS[format] except KeyError: raise ValueError("Unknown unpack format '{0}'".format(format)) func = format_info[1] func(filename, extract_dir, **dict(format_info[2])) else: # we need to look at the registered unpackers supported extensions format = _find_unpack_format(filename) if format is None: raise ReadError("Unknown archive format '{0}'".format(filename)) func = _UNPACK_FORMATS[format][1] kwargs = dict(_UNPACK_FORMATS[format][2]) func(filename, extract_dir, **kwargs) mypy-0.560/test-data/stdlib-samples/3.2/subprocess.py0000644€tŠÔÚ€2›s®0000020102413215007205026543 0ustar jukkaDROPBOX\Domain Users00000000000000# subprocess - Subprocesses with accessible I/O streams # # For more information about this module, see PEP 324. # # Copyright (c) 2003-2005 by Peter Astrand # # Licensed to PSF under a Contributor Agreement. # See http://www.python.org/2.4/license for licensing details. r"""subprocess - Subprocesses with accessible I/O streams This module allows you to spawn processes, connect to their input/output/error pipes, and obtain their return codes. This module intends to replace several other, older modules and functions, like: os.system os.spawn* Information about how the subprocess module can be used to replace these modules and functions can be found below. Using the subprocess module =========================== This module defines one class called Popen: class Popen(args, bufsize=0, executable=None, stdin=None, stdout=None, stderr=None, preexec_fn=None, close_fds=True, shell=False, cwd=None, env=None, universal_newlines=False, startupinfo=None, creationflags=0, restore_signals=True, start_new_session=False, pass_fds=()): Arguments are: args should be a string, or a sequence of program arguments. The program to execute is normally the first item in the args sequence or string, but can be explicitly set by using the executable argument. On POSIX, with shell=False (default): In this case, the Popen class uses os.execvp() to execute the child program. args should normally be a sequence. A string will be treated as a sequence with the string as the only item (the program to execute). On POSIX, with shell=True: If args is a string, it specifies the command string to execute through the shell. If args is a sequence, the first item specifies the command string, and any additional items will be treated as additional shell arguments. On Windows: the Popen class uses CreateProcess() to execute the child program, which operates on strings. If args is a sequence, it will be converted to a string using the list2cmdline method. Please note that not all MS Windows applications interpret the command line the same way: The list2cmdline is designed for applications using the same rules as the MS C runtime. bufsize, if given, has the same meaning as the corresponding argument to the built-in open() function: 0 means unbuffered, 1 means line buffered, any other positive value means use a buffer of (approximately) that size. A negative bufsize means to use the system default, which usually means fully buffered. The default value for bufsize is 0 (unbuffered). stdin, stdout and stderr specify the executed programs' standard input, standard output and standard error file handles, respectively. Valid values are PIPE, an existing file descriptor (a positive integer), an existing file object, and None. PIPE indicates that a new pipe to the child should be created. With None, no redirection will occur; the child's file handles will be inherited from the parent. Additionally, stderr can be STDOUT, which indicates that the stderr data from the applications should be captured into the same file handle as for stdout. On POSIX, if preexec_fn is set to a callable object, this object will be called in the child process just before the child is executed. The use of preexec_fn is not thread safe, using it in the presence of threads could lead to a deadlock in the child process before the new executable is executed. If close_fds is true, all file descriptors except 0, 1 and 2 will be closed before the child process is executed. The default for close_fds varies by platform: Always true on POSIX. True when stdin/stdout/stderr are None on Windows, false otherwise. pass_fds is an optional sequence of file descriptors to keep open between the parent and child. Providing any pass_fds implicitly sets close_fds to true. if shell is true, the specified command will be executed through the shell. If cwd is not None, the current directory will be changed to cwd before the child is executed. On POSIX, if restore_signals is True all signals that Python sets to SIG_IGN are restored to SIG_DFL in the child process before the exec. Currently this includes the SIGPIPE, SIGXFZ and SIGXFSZ signals. This parameter does nothing on Windows. On POSIX, if start_new_session is True, the setsid() system call will be made in the child process prior to executing the command. If env is not None, it defines the environment variables for the new process. If universal_newlines is true, the file objects stdout and stderr are opened as a text files, but lines may be terminated by any of '\n', the Unix end-of-line convention, '\r', the old Macintosh convention or '\r\n', the Windows convention. All of these external representations are seen as '\n' by the Python program. Note: This feature is only available if Python is built with universal newline support (the default). Also, the newlines attribute of the file objects stdout, stdin and stderr are not updated by the communicate() method. The startupinfo and creationflags, if given, will be passed to the underlying CreateProcess() function. They can specify things such as appearance of the main window and priority for the new process. (Windows only) This module also defines some shortcut functions: call(*popenargs, **kwargs): Run command with arguments. Wait for command to complete, then return the returncode attribute. The arguments are the same as for the Popen constructor. Example: >>> retcode = subprocess.call(["ls", "-l"]) check_call(*popenargs, **kwargs): Run command with arguments. Wait for command to complete. If the exit code was zero then return, otherwise raise CalledProcessError. The CalledProcessError object will have the return code in the returncode attribute. The arguments are the same as for the Popen constructor. Example: >>> subprocess.check_call(["ls", "-l"]) 0 getstatusoutput(cmd): Return (status, output) of executing cmd in a shell. Execute the string 'cmd' in a shell with os.popen() and return a 2-tuple (status, output). cmd is actually run as '{ cmd ; } 2>&1', so that the returned output will contain output or error messages. A trailing newline is stripped from the output. The exit status for the command can be interpreted according to the rules for the C function wait(). Example: >>> subprocess.getstatusoutput('ls /bin/ls') (0, '/bin/ls') >>> subprocess.getstatusoutput('cat /bin/junk') (256, 'cat: /bin/junk: No such file or directory') >>> subprocess.getstatusoutput('/bin/junk') (256, 'sh: /bin/junk: not found') getoutput(cmd): Return output (stdout or stderr) of executing cmd in a shell. Like getstatusoutput(), except the exit status is ignored and the return value is a string containing the command's output. Example: >>> subprocess.getoutput('ls /bin/ls') '/bin/ls' check_output(*popenargs, **kwargs): Run command with arguments and return its output as a byte string. If the exit code was non-zero it raises a CalledProcessError. The CalledProcessError object will have the return code in the returncode attribute and output in the output attribute. The arguments are the same as for the Popen constructor. Example: >>> output = subprocess.check_output(["ls", "-l", "/dev/null"]) Exceptions ---------- Exceptions raised in the child process, before the new program has started to execute, will be re-raised in the parent. Additionally, the exception object will have one extra attribute called 'child_traceback', which is a string containing traceback information from the childs point of view. The most common exception raised is OSError. This occurs, for example, when trying to execute a non-existent file. Applications should prepare for OSErrors. A ValueError will be raised if Popen is called with invalid arguments. check_call() and check_output() will raise CalledProcessError, if the called process returns a non-zero return code. Security -------- Unlike some other popen functions, this implementation will never call /bin/sh implicitly. This means that all characters, including shell metacharacters, can safely be passed to child processes. Popen objects ============= Instances of the Popen class have the following methods: poll() Check if child process has terminated. Returns returncode attribute. wait() Wait for child process to terminate. Returns returncode attribute. communicate(input=None) Interact with process: Send data to stdin. Read data from stdout and stderr, until end-of-file is reached. Wait for process to terminate. The optional input argument should be a string to be sent to the child process, or None, if no data should be sent to the child. communicate() returns a tuple (stdout, stderr). Note: The data read is buffered in memory, so do not use this method if the data size is large or unlimited. The following attributes are also available: stdin If the stdin argument is PIPE, this attribute is a file object that provides input to the child process. Otherwise, it is None. stdout If the stdout argument is PIPE, this attribute is a file object that provides output from the child process. Otherwise, it is None. stderr If the stderr argument is PIPE, this attribute is file object that provides error output from the child process. Otherwise, it is None. pid The process ID of the child process. returncode The child return code. A None value indicates that the process hasn't terminated yet. A negative value -N indicates that the child was terminated by signal N (POSIX only). Replacing older functions with the subprocess module ==================================================== In this section, "a ==> b" means that b can be used as a replacement for a. Note: All functions in this section fail (more or less) silently if the executed program cannot be found; this module raises an OSError exception. In the following examples, we assume that the subprocess module is imported with "from subprocess import *". Replacing /bin/sh shell backquote --------------------------------- output=`mycmd myarg` ==> output = Popen(["mycmd", "myarg"], stdout=PIPE).communicate()[0] Replacing shell pipe line ------------------------- output=`dmesg | grep hda` ==> p1 = Popen(["dmesg"], stdout=PIPE) p2 = Popen(["grep", "hda"], stdin=p1.stdout, stdout=PIPE) output = p2.communicate()[0] Replacing os.system() --------------------- sts = os.system("mycmd" + " myarg") ==> p = Popen("mycmd" + " myarg", shell=True) pid, sts = os.waitpid(p.pid, 0) Note: * Calling the program through the shell is usually not required. * It's easier to look at the returncode attribute than the exitstatus. A more real-world example would look like this: try: retcode = call("mycmd" + " myarg", shell=True) if retcode < 0: print("Child was terminated by signal", -retcode, file=sys.stderr) else: print("Child returned", retcode, file=sys.stderr) except OSError as e: print("Execution failed:", e, file=sys.stderr) Replacing os.spawn* ------------------- P_NOWAIT example: pid = os.spawnlp(os.P_NOWAIT, "/bin/mycmd", "mycmd", "myarg") ==> pid = Popen(["/bin/mycmd", "myarg"]).pid P_WAIT example: retcode = os.spawnlp(os.P_WAIT, "/bin/mycmd", "mycmd", "myarg") ==> retcode = call(["/bin/mycmd", "myarg"]) Vector example: os.spawnvp(os.P_NOWAIT, path, args) ==> Popen([path] + args[1:]) Environment example: os.spawnlpe(os.P_NOWAIT, "/bin/mycmd", "mycmd", "myarg", env) ==> Popen(["/bin/mycmd", "myarg"], env={"PATH": "/usr/bin"}) """ import sys mswindows = (sys.platform == "win32") import io import os import traceback import gc import signal import builtins import warnings import errno from typing import ( Any, Tuple, List, Sequence, Callable, Mapping, cast, Set, Dict, IO, TextIO, AnyStr ) from types import TracebackType # Exception classes used by this module. class CalledProcessError(Exception): """This exception is raised when a process run by check_call() or check_output() returns a non-zero exit status. The exit status will be stored in the returncode attribute; check_output() will also store the output in the output attribute. """ def __init__(self, returncode: int, cmd: str, output: Any = None) -> None: self.returncode = returncode self.cmd = cmd self.output = output def __str__(self) -> str: return "Command '%s' returned non-zero exit status %d" % (self.cmd, self.returncode) if mswindows: import threading import msvcrt import _subprocess class STARTUPINFO: dwFlags = 0 hStdInput = cast(Any, None) hStdOutput = cast(Any, None) hStdError = cast(Any, None) wShowWindow = 0 class pywintypes: error = IOError else: import select _has_poll = hasattr(select, 'poll') import fcntl import pickle try: import _posixsubprocess have_posixsubprocess = True except ImportError: have_posixsubprocess = False warnings.warn("The _posixsubprocess module is not being used. " "Child process reliability may suffer if your " "program uses threads.", RuntimeWarning) # When select or poll has indicated that the file is writable, # we can write up to _PIPE_BUF bytes without risk of blocking. # POSIX defines PIPE_BUF as >= 512. _PIPE_BUF = getattr(select, 'PIPE_BUF', 512) # type: int _FD_CLOEXEC = getattr(fcntl, 'FD_CLOEXEC', 1) # type: int def _set_cloexec(fd: int, cloexec: bool) -> None: old = fcntl.fcntl(fd, fcntl.F_GETFD) if cloexec: fcntl.fcntl(fd, fcntl.F_SETFD, old | _FD_CLOEXEC) else: fcntl.fcntl(fd, fcntl.F_SETFD, old & ~_FD_CLOEXEC) if have_posixsubprocess: _create_pipe = _posixsubprocess.cloexec_pipe else: def __create_pipe() -> Tuple[int, int]: fds = os.pipe() _set_cloexec(fds[0], True) _set_cloexec(fds[1], True) return fds _create_pipe = __create_pipe __all__ = ["Popen", "PIPE", "STDOUT", "call", "check_call", "getstatusoutput", "getoutput", "check_output", "CalledProcessError"] if mswindows: from _subprocess import (CREATE_NEW_CONSOLE, CREATE_NEW_PROCESS_GROUP, STD_INPUT_HANDLE, STD_OUTPUT_HANDLE, STD_ERROR_HANDLE, SW_HIDE, STARTF_USESTDHANDLES, STARTF_USESHOWWINDOW) __all__.extend(["CREATE_NEW_CONSOLE", "CREATE_NEW_PROCESS_GROUP", "STD_INPUT_HANDLE", "STD_OUTPUT_HANDLE", "STD_ERROR_HANDLE", "SW_HIDE", "STARTF_USESTDHANDLES", "STARTF_USESHOWWINDOW"]) try: MAXFD = os.sysconf("SC_OPEN_MAX") except: MAXFD = 256 # This lists holds Popen instances for which the underlying process had not # exited at the time its __del__ method got called: those processes are wait()ed # for synchronously from _cleanup() when a new Popen object is created, to avoid # zombie processes. _active = [] # type: List[Popen] def _cleanup() -> None: for inst in _active[:]: res = inst._internal_poll(_deadstate=sys.maxsize) if res is not None: try: _active.remove(inst) except ValueError: # This can happen if two threads create a new Popen instance. # It's harmless that it was already removed, so ignore. pass PIPE = -1 STDOUT = -2 def _eintr_retry_call(func: Any, *args: Any) -> Any: while True: try: return func(*args) except (OSError, IOError) as e: if e.errno == errno.EINTR: continue raise def call(*popenargs: Any, **kwargs: Any) -> int: """Run command with arguments. Wait for command to complete, then return the returncode attribute. The arguments are the same as for the Popen constructor. Example: retcode = call(["ls", "-l"]) """ return Popen(*popenargs, **kwargs).wait() def check_call(*popenargs: Any, **kwargs: Any) -> int: """Run command with arguments. Wait for command to complete. If the exit code was zero then return, otherwise raise CalledProcessError. The CalledProcessError object will have the return code in the returncode attribute. The arguments are the same as for the Popen constructor. Example: check_call(["ls", "-l"]) """ retcode = call(*popenargs, **kwargs) if retcode: cmd = kwargs.get("args") if cmd is None: cmd = popenargs[0] raise CalledProcessError(retcode, cmd) return 0 def check_output(*popenargs: Any, **kwargs: Any) -> bytes: r"""Run command with arguments and return its output as a byte string. If the exit code was non-zero it raises a CalledProcessError. The CalledProcessError object will have the return code in the returncode attribute and output in the output attribute. The arguments are the same as for the Popen constructor. Example: >>> check_output(["ls", "-l", "/dev/null"]) b'crw-rw-rw- 1 root root 1, 3 Oct 18 2007 /dev/null\n' The stdout argument is not allowed as it is used internally. To capture standard error in the result, use stderr=STDOUT. >>> check_output(["/bin/sh", "-c", ... "ls -l non_existent_file ; exit 0"], ... stderr=STDOUT) b'ls: non_existent_file: No such file or directory\n' """ if 'stdout' in kwargs: raise ValueError('stdout argument not allowed, it will be overridden.') kwargs['stdout'] = PIPE process = Popen(*popenargs, **kwargs) output, unused_err = process.communicate() retcode = process.poll() if retcode: cmd = kwargs.get("args") if cmd is None: cmd = popenargs[0] raise CalledProcessError(retcode, cmd, output=output) return output def list2cmdline(seq: Sequence[str]) -> str: """ Translate a sequence of arguments into a command line string, using the same rules as the MS C runtime: 1) Arguments are delimited by white space, which is either a space or a tab. 2) A string surrounded by double quotation marks is interpreted as a single argument, regardless of white space contained within. A quoted string can be embedded in an argument. 3) A double quotation mark preceded by a backslash is interpreted as a literal double quotation mark. 4) Backslashes are interpreted literally, unless they immediately precede a double quotation mark. 5) If backslashes immediately precede a double quotation mark, every pair of backslashes is interpreted as a literal backslash. If the number of backslashes is odd, the last backslash escapes the next double quotation mark as described in rule 3. """ # See # http://msdn.microsoft.com/en-us/library/17w5ykft.aspx # or search http://msdn.microsoft.com for # "Parsing C++ Command-Line Arguments" result = [] # type: List[str] needquote = False for arg in seq: bs_buf = [] # type: List[str] # Add a space to separate this argument from the others if result: result.append(' ') needquote = (" " in arg) or ("\t" in arg) or not arg if needquote: result.append('"') for c in arg: if c == '\\': # Don't know if we need to double yet. bs_buf.append(c) elif c == '"': # Double backslashes. result.append('\\' * len(bs_buf)*2) bs_buf = [] result.append('\\"') else: # Normal char if bs_buf: result.extend(bs_buf) bs_buf = [] result.append(c) # Add remaining backslashes, if any. if bs_buf: result.extend(bs_buf) if needquote: result.extend(bs_buf) result.append('"') return ''.join(result) # Various tools for executing commands and looking at their output and status. # # NB This only works (and is only relevant) for POSIX. def getstatusoutput(cmd: str) -> Tuple[int, str]: """Return (status, output) of executing cmd in a shell. Execute the string 'cmd' in a shell with os.popen() and return a 2-tuple (status, output). cmd is actually run as '{ cmd ; } 2>&1', so that the returned output will contain output or error messages. A trailing newline is stripped from the output. The exit status for the command can be interpreted according to the rules for the C function wait(). Example: >>> import subprocess >>> subprocess.getstatusoutput('ls /bin/ls') (0, '/bin/ls') >>> subprocess.getstatusoutput('cat /bin/junk') (256, 'cat: /bin/junk: No such file or directory') >>> subprocess.getstatusoutput('/bin/junk') (256, 'sh: /bin/junk: not found') """ pipe = os.popen('{ ' + cmd + '; } 2>&1', 'r') text = pipe.read() sts = pipe.close() if sts is None: sts = 0 if text[-1:] == '\n': text = text[:-1] return sts, text def getoutput(cmd: str) -> str: """Return output (stdout or stderr) of executing cmd in a shell. Like getstatusoutput(), except the exit status is ignored and the return value is a string containing the command's output. Example: >>> import subprocess >>> subprocess.getoutput('ls /bin/ls') '/bin/ls' """ return getstatusoutput(cmd)[1] _PLATFORM_DEFAULT_CLOSE_FDS = object() class Popen(object): def __init__(self, args: Sequence[Any], bufsize: int = 0, executable: str = None, stdin: Any = None, stdout: Any = None, stderr: Any = None, preexec_fn: Callable[[], Any] = None, close_fds: Any = _PLATFORM_DEFAULT_CLOSE_FDS, shell: int = False, cwd: str = None, env: Mapping[str, str] = None, universal_newlines: int = False, startupinfo: 'STARTUPINFO' = None, creationflags: int = 0, restore_signals: bool = True, start_new_session: bool = False, pass_fds: Any = ()) -> None: """Create new Popen instance.""" _cleanup() self._child_created = False if bufsize is None: bufsize = 0 # Restore default if not isinstance(bufsize, int): raise TypeError("bufsize must be an integer") if mswindows: if preexec_fn is not None: raise ValueError("preexec_fn is not supported on Windows " "platforms") any_stdio_set = (stdin is not None or stdout is not None or stderr is not None) if close_fds is _PLATFORM_DEFAULT_CLOSE_FDS: if any_stdio_set: close_fds = False else: close_fds = True elif close_fds and any_stdio_set: raise ValueError( "close_fds is not supported on Windows platforms" " if you redirect stdin/stdout/stderr") else: # POSIX if close_fds is _PLATFORM_DEFAULT_CLOSE_FDS: close_fds = True if pass_fds and not close_fds: warnings.warn("pass_fds overriding close_fds.", RuntimeWarning) close_fds = True if startupinfo is not None: raise ValueError("startupinfo is only supported on Windows " "platforms") if creationflags != 0: raise ValueError("creationflags is only supported on Windows " "platforms") self.stdin = None # type: IO[Any] self.stdout = None # type: IO[Any] self.stderr = None # type: IO[Any] self.pid = None # type: int self.returncode = None # type: int self.universal_newlines = universal_newlines # Input and output objects. The general principle is like # this: # # Parent Child # ------ ----- # p2cwrite ---stdin---> p2cread # c2pread <--stdout--- c2pwrite # errread <--stderr--- errwrite # # On POSIX, the child objects are file descriptors. On # Windows, these are Windows file handles. The parent objects # are file descriptors on both platforms. The parent objects # are -1 when not using PIPEs. The child objects are -1 # when not redirecting. (p2cread, p2cwrite, c2pread, c2pwrite, errread, errwrite) = self._get_handles(stdin, stdout, stderr) # We wrap OS handles *before* launching the child, otherwise a # quickly terminating child could make our fds unwrappable # (see #8458). if mswindows: if p2cwrite != -1: p2cwrite = msvcrt.open_osfhandle(p2cwrite.Detach(), 0) if c2pread != -1: c2pread = msvcrt.open_osfhandle(c2pread.Detach(), 0) if errread != -1: errread = msvcrt.open_osfhandle(errread.Detach(), 0) if p2cwrite != -1: self.stdin = io.open(p2cwrite, 'wb', bufsize) if self.universal_newlines: self.stdin = io.TextIOWrapper(self.stdin, write_through=True) if c2pread != -1: self.stdout = io.open(c2pread, 'rb', bufsize) if universal_newlines: self.stdout = io.TextIOWrapper(self.stdout) if errread != -1: self.stderr = io.open(errread, 'rb', bufsize) if universal_newlines: self.stderr = io.TextIOWrapper(self.stderr) try: self._execute_child(args, executable, preexec_fn, close_fds, pass_fds, cwd, env, universal_newlines, startupinfo, creationflags, shell, p2cread, p2cwrite, c2pread, c2pwrite, errread, errwrite, restore_signals, start_new_session) except: # Cleanup if the child failed starting for f in filter(None, [self.stdin, self.stdout, self.stderr]): try: f.close() except EnvironmentError: # Ignore EBADF or other errors pass raise def _translate_newlines(self, data: bytes, encoding: str) -> str: data = data.replace(b"\r\n", b"\n").replace(b"\r", b"\n") return data.decode(encoding) def __enter__(self) -> 'Popen': return self def __exit__(self, type: type, value: BaseException, traceback: TracebackType) -> bool: if self.stdout: self.stdout.close() if self.stderr: self.stderr.close() if self.stdin: self.stdin.close() # Wait for the process to terminate, to avoid zombies. self.wait() return False def __del__(self, _maxsize: int = sys.maxsize, _active: List['Popen'] = _active) -> None: # If __init__ hasn't had a chance to execute (e.g. if it # was passed an undeclared keyword argument), we don't # have a _child_created attribute at all. if not getattr(self, '_child_created', False): # We didn't get to successfully create a child process. return # In case the child hasn't been waited on, check if it's done. self._internal_poll(_deadstate=_maxsize) if self.returncode is None and _active is not None: # Child is still running, keep us alive until we can wait on it. _active.append(self) def communicate(self, input: Any = None) -> Tuple[Any, Any]: """Interact with process: Send data to stdin. Read data from stdout and stderr, until end-of-file is reached. Wait for process to terminate. The optional input argument should be a string to be sent to the child process, or None, if no data should be sent to the child. communicate() returns a tuple (stdout, stderr).""" # Optimization: If we are only using one pipe, or no pipe at # all, using select() or threads is unnecessary. if [self.stdin, self.stdout, self.stderr].count(None) >= 2: stdout = None # type: IO[Any] stderr = None # type: IO[Any] if self.stdin: if input: try: self.stdin.write(input) except IOError as e: if e.errno != errno.EPIPE and e.errno != errno.EINVAL: raise self.stdin.close() elif self.stdout: stdout = _eintr_retry_call(self.stdout.read) self.stdout.close() elif self.stderr: stderr = _eintr_retry_call(self.stderr.read) self.stderr.close() self.wait() return (stdout, stderr) return self._communicate(input) def poll(self) -> int: return self._internal_poll() if mswindows: # # Windows methods # def _get_handles(self, stdin: Any, stdout: Any, stderr: Any) -> Tuple[Any, Any, Any, Any, Any, Any]: """Construct and return tuple with IO objects: p2cread, p2cwrite, c2pread, c2pwrite, errread, errwrite """ if stdin is None and stdout is None and stderr is None: return (-1, -1, -1, -1, -1, -1) p2cread, p2cwrite = -1, -1 # type: (Any, Any) c2pread, c2pwrite = -1, -1 # type: (Any, Any) errread, errwrite = -1, -1 # type: (Any, Any) if stdin is None: p2cread = _subprocess.GetStdHandle(_subprocess.STD_INPUT_HANDLE) if p2cread is None: p2cread, _ = _subprocess.CreatePipe(None, 0) elif stdin == PIPE: p2cread, p2cwrite = _subprocess.CreatePipe(None, 0) elif isinstance(stdin, int): p2cread = msvcrt.get_osfhandle(stdin) else: # Assuming file-like object p2cread = msvcrt.get_osfhandle(stdin.fileno()) p2cread = self._make_inheritable(p2cread) if stdout is None: c2pwrite = _subprocess.GetStdHandle(_subprocess.STD_OUTPUT_HANDLE) if c2pwrite is None: _, c2pwrite = _subprocess.CreatePipe(None, 0) elif stdout == PIPE: c2pread, c2pwrite = _subprocess.CreatePipe(None, 0) elif isinstance(stdout, int): c2pwrite = msvcrt.get_osfhandle(stdout) else: # Assuming file-like object c2pwrite = msvcrt.get_osfhandle(stdout.fileno()) c2pwrite = self._make_inheritable(c2pwrite) if stderr is None: errwrite = _subprocess.GetStdHandle(_subprocess.STD_ERROR_HANDLE) if errwrite is None: _, errwrite = _subprocess.CreatePipe(None, 0) elif stderr == PIPE: errread, errwrite = _subprocess.CreatePipe(None, 0) elif stderr == STDOUT: errwrite = c2pwrite elif isinstance(stderr, int): errwrite = msvcrt.get_osfhandle(stderr) else: # Assuming file-like object errwrite = msvcrt.get_osfhandle(stderr.fileno()) errwrite = self._make_inheritable(errwrite) return (p2cread, p2cwrite, c2pread, c2pwrite, errread, errwrite) def _make_inheritable(self, handle: _subprocess.Handle) -> int: """Return a duplicate of handle, which is inheritable""" return _subprocess.DuplicateHandle(_subprocess.GetCurrentProcess(), handle, _subprocess.GetCurrentProcess(), 0, 1, _subprocess.DUPLICATE_SAME_ACCESS) def _find_w9xpopen(self) -> str: """Find and return absolut path to w9xpopen.exe""" w9xpopen = os.path.join( os.path.dirname(_subprocess.GetModuleFileName(0)), "w9xpopen.exe") if not os.path.exists(w9xpopen): # Eeek - file-not-found - possibly an embedding # situation - see if we can locate it in sys.exec_prefix w9xpopen = os.path.join(os.path.dirname(sys.exec_prefix), "w9xpopen.exe") if not os.path.exists(w9xpopen): raise RuntimeError("Cannot locate w9xpopen.exe, which is " "needed for Popen to work with your " "shell or platform.") return w9xpopen def _execute_child(self, args: Sequence[str], executable: str, preexec_fn: Callable[[], Any], close_fds: Any, pass_fds: Any, cwd: str, env: Mapping[str, str], universal_newlines: int, startupinfo: STARTUPINFO, creationflags: int, shell: int, p2cread: Any, p2cwrite: Any, c2pread: Any, c2pwrite: Any, errread: Any, errwrite: Any, restore_signals: bool, start_new_session: bool) -> None: """Execute program (MS Windows version)""" assert not pass_fds, "pass_fds not supported on Windows." if not isinstance(args, str): args = list2cmdline(args) # Process startup details if startupinfo is None: startupinfo = STARTUPINFO() if -1 not in (p2cread, c2pwrite, errwrite): startupinfo.dwFlags |= _subprocess.STARTF_USESTDHANDLES startupinfo.hStdInput = p2cread startupinfo.hStdOutput = c2pwrite startupinfo.hStdError = errwrite if shell: startupinfo.dwFlags |= _subprocess.STARTF_USESHOWWINDOW startupinfo.wShowWindow = _subprocess.SW_HIDE comspec = os.environ.get("COMSPEC", "cmd.exe") args = '{} /c "{}"'.format (comspec, args) if (_subprocess.GetVersion() >= 0x80000000 or os.path.basename(comspec).lower() == "command.com"): # Win9x, or using command.com on NT. We need to # use the w9xpopen intermediate program. For more # information, see KB Q150956 # (http://web.archive.org/web/20011105084002/http://support.microsoft.com/support/kb/articles/Q150/9/56.asp) w9xpopen = self._find_w9xpopen() args = '"%s" %s' % (w9xpopen, args) # Not passing CREATE_NEW_CONSOLE has been known to # cause random failures on win9x. Specifically a # dialog: "Your program accessed mem currently in # use at xxx" and a hopeful warning about the # stability of your system. Cost is Ctrl+C won't # kill children. creationflags |= _subprocess.CREATE_NEW_CONSOLE # Start the process try: hp, ht, pid, tid = _subprocess.CreateProcess(executable, cast(str, args), # no special security None, None, int(not close_fds), creationflags, env, cwd, startupinfo) except pywintypes.error as e: # Translate pywintypes.error to WindowsError, which is # a subclass of OSError. FIXME: We should really # translate errno using _sys_errlist (or similar), but # how can this be done from Python? raise WindowsError(*e.args) finally: # Child is launched. Close the parent's copy of those pipe # handles that only the child should have open. You need # to make sure that no handles to the write end of the # output pipe are maintained in this process or else the # pipe will not close when the child process exits and the # ReadFile will hang. if p2cread != -1: p2cread.Close() if c2pwrite != -1: c2pwrite.Close() if errwrite != -1: errwrite.Close() # Retain the process handle, but close the thread handle self._child_created = True self._handle = hp self.pid = pid ht.Close() def _internal_poll(self, _deadstate: int = None) -> int: """Check if child process has terminated. Returns returncode attribute. This method is called by __del__, so it can only refer to objects in its local scope. """ return self._internal_poll_win(_deadstate) from _subprocess import Handle def _internal_poll_win(self, _deadstate: int = None, _WaitForSingleObject: Callable[[Handle, int], int] = _subprocess.WaitForSingleObject, _WAIT_OBJECT_0: int = _subprocess.WAIT_OBJECT_0, _GetExitCodeProcess: Callable[[Handle], int] = _subprocess.GetExitCodeProcess) -> int: if self.returncode is None: if _WaitForSingleObject(self._handle, 0) == _WAIT_OBJECT_0: self.returncode = _GetExitCodeProcess(self._handle) return self.returncode def wait(self) -> int: """Wait for child process to terminate. Returns returncode attribute.""" if self.returncode is None: _subprocess.WaitForSingleObject(self._handle, _subprocess.INFINITE) self.returncode = _subprocess.GetExitCodeProcess(self._handle) return self.returncode def _readerthread(self, fh: IO[AnyStr], buffer: List[AnyStr]) -> None: buffer.append(fh.read()) fh.close() def _communicate(self, input: Any) -> Tuple[Any, Any]: stdout = cast(Any, None) # Return stderr = cast(Any, None) # Return if self.stdout: stdout = [] stdout_thread = threading.Thread(target=self._readerthread, args=(self.stdout, stdout)) stdout_thread.daemon = True stdout_thread.start() if self.stderr: stderr = [] stderr_thread = threading.Thread(target=self._readerthread, args=(self.stderr, stderr)) stderr_thread.daemon = True stderr_thread.start() if self.stdin: if input is not None: try: self.stdin.write(input) except IOError as e: if e.errno != errno.EPIPE: raise self.stdin.close() if self.stdout: stdout_thread.join() if self.stderr: stderr_thread.join() # All data exchanged. Translate lists into strings. if stdout is not None: stdout = stdout[0] if stderr is not None: stderr = stderr[0] self.wait() return (stdout, stderr) def send_signal(self, sig: int) -> None: """Send a signal to the process """ if sig == signal.SIGTERM: self.terminate() elif sig == signal.CTRL_C_EVENT: os.kill(self.pid, signal.CTRL_C_EVENT) elif sig == signal.CTRL_BREAK_EVENT: os.kill(self.pid, signal.CTRL_BREAK_EVENT) else: raise ValueError("Unsupported signal: {}".format(sig)) def terminate(self) -> None: """Terminates the process """ _subprocess.TerminateProcess(self._handle, 1) def kill(self) -> None: """Terminates the process """ self.terminate() else: # # POSIX methods # def _get_handles(self, stdin: Any, stdout: Any, stderr: Any) -> Tuple[Any, Any, Any, Any, Any, Any]: """Construct and return tuple with IO objects: p2cread, p2cwrite, c2pread, c2pwrite, errread, errwrite """ p2cread, p2cwrite = -1, -1 c2pread, c2pwrite = -1, -1 errread, errwrite = -1, -1 if stdin is None: pass elif stdin == PIPE: p2cread, p2cwrite = _create_pipe() elif isinstance(stdin, int): p2cread = stdin else: # Assuming file-like object p2cread = stdin.fileno() if stdout is None: pass elif stdout == PIPE: c2pread, c2pwrite = _create_pipe() elif isinstance(stdout, int): c2pwrite = stdout else: # Assuming file-like object c2pwrite = stdout.fileno() if stderr is None: pass elif stderr == PIPE: errread, errwrite = _create_pipe() elif stderr == STDOUT: errwrite = c2pwrite elif isinstance(stderr, int): errwrite = stderr else: # Assuming file-like object errwrite = stderr.fileno() return (p2cread, p2cwrite, c2pread, c2pwrite, errread, errwrite) def _close_fds(self, fds_to_keep: Set[int]) -> None: start_fd = 3 for fd in sorted(fds_to_keep): if fd >= start_fd: os.closerange(start_fd, fd) start_fd = fd + 1 if start_fd <= MAXFD: os.closerange(start_fd, MAXFD) def _execute_child(self, args: Sequence[str], executable: str, preexec_fn: Callable[[], Any], close_fds: Any, pass_fds: Any, cwd: str, env: Mapping[str, str], universal_newlines: int, startupinfo: 'STARTUPINFO', creationflags: int, shell: int, p2cread: Any, p2cwrite: Any, c2pread: Any, c2pwrite: Any, errread: Any, errwrite: Any, restore_signals: bool, start_new_session: bool) -> None: """Execute program (POSIX version)""" if isinstance(args, str): args = [args] else: args = list(args) if shell: args = ["/bin/sh", "-c"] + args if executable: args[0] = executable if executable is None: executable = args[0] # For transferring possible exec failure from child to parent. # Data format: "exception name:hex errno:description" # Pickle is not used; it is complex and involves memory allocation. errpipe_read, errpipe_write = _create_pipe() try: try: if have_posixsubprocess: # We must avoid complex work that could involve # malloc or free in the child process to avoid # potential deadlocks, thus we do all this here. # and pass it to fork_exec() if env is not None: env_list = [os.fsencode(k) + b'=' + os.fsencode(v) for k, v in env.items()] else: env_list = None # Use execv instead of execve. executable_enc = os.fsencode(executable) if os.path.dirname(executable_enc): executable_list = (executable_enc,) # type: tuple else: # This matches the behavior of os._execvpe(). executable_list = tuple( os.path.join(os.fsencode(dir), executable_enc) for dir in os.get_exec_path(env)) fds_to_keep = set(pass_fds) fds_to_keep.add(errpipe_write) self.pid = _posixsubprocess.fork_exec( args, executable_list, close_fds, sorted(fds_to_keep), cwd, env_list, p2cread, p2cwrite, c2pread, c2pwrite, errread, errwrite, errpipe_read, errpipe_write, restore_signals, start_new_session, preexec_fn) self._child_created = True else: # Pure Python implementation: It is not thread safe. # This implementation may deadlock in the child if your # parent process has any other threads running. gc_was_enabled = gc.isenabled() # Disable gc to avoid bug where gc -> file_dealloc -> # write to stderr -> hang. See issue1336 gc.disable() try: self.pid = os.fork() except: if gc_was_enabled: gc.enable() raise self._child_created = True if self.pid == 0: # Child try: # Close parent's pipe ends if p2cwrite != -1: os.close(p2cwrite) if c2pread != -1: os.close(c2pread) if errread != -1: os.close(errread) os.close(errpipe_read) # When duping fds, if there arises a situation # where one of the fds is either 0, 1 or 2, it # is possible that it is overwritten (#12607). if c2pwrite == 0: c2pwrite = os.dup(c2pwrite) if errwrite == 0 or errwrite == 1: errwrite = os.dup(errwrite) # Dup fds for child def _dup2(a: int, b: int) -> None: # dup2() removes the CLOEXEC flag but # we must do it ourselves if dup2() # would be a no-op (issue #10806). if a == b: _set_cloexec(a, False) elif a != -1: os.dup2(a, b) _dup2(p2cread, 0) _dup2(c2pwrite, 1) _dup2(errwrite, 2) # Close pipe fds. Make sure we don't close the # same fd more than once, or standard fds. closed = set() # type: Set[int] for fd in [p2cread, c2pwrite, errwrite]: if fd > 2 and fd not in closed: os.close(fd) closed.add(fd) # Close all other fds, if asked for if close_fds: fds_to_keep = set(pass_fds) fds_to_keep.add(errpipe_write) self._close_fds(fds_to_keep) if cwd is not None: os.chdir(cwd) # This is a copy of Python/pythonrun.c # _Py_RestoreSignals(). If that were exposed # as a sys._py_restoresignals func it would be # better.. but this pure python implementation # isn't likely to be used much anymore. if restore_signals: signals = ('SIGPIPE', 'SIGXFZ', 'SIGXFSZ') for sig in signals: if hasattr(signal, sig): signal.signal(getattr(signal, sig), signal.SIG_DFL) if start_new_session and hasattr(os, 'setsid'): os.setsid() if preexec_fn: preexec_fn() if env is None: os.execvp(executable, args) else: os.execvpe(executable, args, env) except: try: exc_type, exc_value = sys.exc_info()[:2] if isinstance(exc_value, OSError): errno_num = exc_value.errno else: errno_num = 0 message = '%s:%x:%s' % (exc_type.__name__, errno_num, exc_value) messageb = message.encode(errors="surrogatepass") os.write(errpipe_write, messageb) except Exception: # We MUST not allow anything odd happening # above to prevent us from exiting below. pass # This exitcode won't be reported to applications # so it really doesn't matter what we return. os._exit(255) # Parent if gc_was_enabled: gc.enable() finally: # be sure the FD is closed no matter what os.close(errpipe_write) if p2cread != -1 and p2cwrite != -1: os.close(p2cread) if c2pwrite != -1 and c2pread != -1: os.close(c2pwrite) if errwrite != -1 and errread != -1: os.close(errwrite) # Wait for exec to fail or succeed; possibly raising an # exception (limited in size) data = bytearray() while True: part = _eintr_retry_call(os.read, errpipe_read, 50000) data += part if not part or len(data) > 50000: break finally: # be sure the FD is closed no matter what os.close(errpipe_read) if data: try: _eintr_retry_call(os.waitpid, self.pid, 0) except OSError as e: if e.errno != errno.ECHILD: raise try: (exception_name, hex_errno, err_msg_b) = bytes(data).split(b':', 2) except ValueError: print('Bad exception data:', repr(data)) exception_name = b'RuntimeError' hex_errno = b'0' err_msg_b = b'Unknown' child_exception_type = getattr( builtins, exception_name.decode('ascii'), RuntimeError) for fd in (p2cwrite, c2pread, errread): if fd != -1: os.close(fd) err_msg = err_msg_b.decode(errors="surrogatepass") if issubclass(child_exception_type, OSError) and hex_errno: errno_num = int(hex_errno, 16) if errno_num != 0: err_msg = os.strerror(errno_num) if errno_num == errno.ENOENT: err_msg += ': ' + repr(args[0]) raise child_exception_type(errno_num, err_msg) raise child_exception_type(err_msg) def _handle_exitstatus( self, sts: int, _WIFSIGNALED: Callable[[int], bool] = os.WIFSIGNALED, _WTERMSIG: Callable[[int], int] = os.WTERMSIG, _WIFEXITED: Callable[[int], bool] = os.WIFEXITED, _WEXITSTATUS: Callable[[int], int] = os.WEXITSTATUS) -> None: # This method is called (indirectly) by __del__, so it cannot # refer to anything outside of its local scope.""" if _WIFSIGNALED(sts): self.returncode = -_WTERMSIG(sts) elif _WIFEXITED(sts): self.returncode = _WEXITSTATUS(sts) else: # Should never happen raise RuntimeError("Unknown child exit status!") def _internal_poll(self, _deadstate: int = None) -> int: """Check if child process has terminated. Returns returncode attribute. This method is called by __del__, so it cannot reference anything outside of the local scope (nor can any methods it calls). """ return self._internal_poll_posix(_deadstate) def _internal_poll_posix(self, _deadstate: int = None, _waitpid: Callable[[int, int], Tuple[int, int]] = os.waitpid, _WNOHANG: int = os.WNOHANG, _os_error: Any = os.error) -> int: if self.returncode is None: try: pid, sts = _waitpid(self.pid, _WNOHANG) if pid == self.pid: self._handle_exitstatus(sts) except _os_error: if _deadstate is not None: self.returncode = _deadstate return self.returncode def wait(self) -> int: """Wait for child process to terminate. Returns returncode attribute.""" if self.returncode is None: try: pid, sts = _eintr_retry_call(os.waitpid, self.pid, 0) except OSError as e: if e.errno != errno.ECHILD: raise # This happens if SIGCLD is set to be ignored or waiting # for child processes has otherwise been disabled for our # process. This child is dead, we can't get the status. sts = 0 self._handle_exitstatus(sts) return self.returncode def _communicate(self, input: Any) -> Tuple[Any, Any]: if self.stdin: # Flush stdio buffer. This might block, if the user has # been writing to .stdin in an uncontrolled fashion. self.stdin.flush() if not input: self.stdin.close() if _has_poll: stdout, stderr = self._communicate_with_poll(input) else: stdout, stderr = self._communicate_with_select(input) # All data exchanged. Translate lists into strings. if stdout is not None: stdout2 = b''.join(stdout) else: stdout2 = None if stderr is not None: stderr2 = b''.join(stderr) else: stderr2 = None # Translate newlines, if requested. # This also turns bytes into strings. stdout3 = cast(Any, stdout2) stderr3 = cast(Any, stderr2) if self.universal_newlines: if stdout is not None: stdout3 = self._translate_newlines( stdout2, cast(TextIO, self.stdout).encoding) if stderr is not None: stderr3 = self._translate_newlines( stderr2, cast(TextIO, self.stderr).encoding) self.wait() return (stdout3, stderr3) def _communicate_with_poll(self, input: Any) -> Tuple[List[bytes], List[bytes]]: stdout = None # type: List[bytes] # Return stderr = None # type: List[bytes] # Return fd2file = {} # type: Dict[int, Any] fd2output = {} # type: Dict[int, List[bytes]] poller = select.poll() def register_and_append(file_obj: IO[Any], eventmask: int) -> None: poller.register(file_obj.fileno(), eventmask) fd2file[file_obj.fileno()] = file_obj def close_unregister_and_remove(fd: int) -> None: poller.unregister(fd) fd2file[fd].close() fd2file.pop(fd) if self.stdin and input: register_and_append(self.stdin, select.POLLOUT) select_POLLIN_POLLPRI = select.POLLIN | select.POLLPRI if self.stdout: register_and_append(self.stdout, select_POLLIN_POLLPRI) fd2output[self.stdout.fileno()] = stdout = [] if self.stderr: register_and_append(self.stderr, select_POLLIN_POLLPRI) fd2output[self.stderr.fileno()] = stderr = [] input_offset = 0 while fd2file: try: ready = poller.poll() except select.error as e: if e.args[0] == errno.EINTR: continue raise # XXX Rewrite these to use non-blocking I/O on the # file objects; they are no longer using C stdio! for fd, mode in ready: if mode & select.POLLOUT: chunk = input[input_offset : input_offset + _PIPE_BUF] try: input_offset += os.write(fd, chunk) except OSError as e2: if e2.errno == errno.EPIPE: close_unregister_and_remove(fd) else: raise else: if input_offset >= len(input): close_unregister_and_remove(fd) elif mode & select_POLLIN_POLLPRI: data = os.read(fd, 4096) if not data: close_unregister_and_remove(fd) fd2output[fd].append(data) else: # Ignore hang up or errors. close_unregister_and_remove(fd) return (stdout, stderr) def _communicate_with_select(self, input: Any) -> Tuple[List[bytes], List[bytes]]: read_set = [] # type: List[IO[Any]] write_set = [] # type: List[IO[Any]] stdout = None # type: List[bytes] # Return stderr = None # type: List[bytes] # Return if self.stdin and input: write_set.append(self.stdin) if self.stdout: read_set.append(self.stdout) stdout = [] if self.stderr: read_set.append(self.stderr) stderr = [] input_offset = 0 while read_set or write_set: try: rlist, wlist, xlist = select.select(read_set, write_set, []) except select.error as e: if e.args[0] == errno.EINTR: continue raise # XXX Rewrite these to use non-blocking I/O on the # file objects; they are no longer using C stdio! if self.stdin in wlist: chunk = input[input_offset : input_offset + _PIPE_BUF] try: bytes_written = os.write(self.stdin.fileno(), chunk) except OSError as oe: if oe.errno == errno.EPIPE: self.stdin.close() write_set.remove(self.stdin) else: raise else: input_offset += bytes_written if input_offset >= len(input): self.stdin.close() write_set.remove(self.stdin) if self.stdout in rlist: data = os.read(self.stdout.fileno(), 1024) if not data: self.stdout.close() read_set.remove(self.stdout) stdout.append(data) if self.stderr in rlist: data = os.read(self.stderr.fileno(), 1024) if not data: self.stderr.close() read_set.remove(self.stderr) stderr.append(data) return (stdout, stderr) def send_signal(self, sig: int) -> None: """Send a signal to the process """ os.kill(self.pid, sig) def terminate(self) -> None: """Terminate the process with SIGTERM """ self.send_signal(signal.SIGTERM) def kill(self) -> None: """Kill the process with SIGKILL """ self.send_signal(signal.SIGKILL) def _demo_posix() -> None: # # Example 1: Simple redirection: Get process list # plist = Popen(["ps"], stdout=PIPE).communicate()[0] print("Process list:") print(plist) # # Example 2: Change uid before executing child # if os.getuid() == 0: p = Popen(["id"], preexec_fn=lambda: os.setuid(100)) p.wait() # # Example 3: Connecting several subprocesses # print("Looking for 'hda'...") p1 = Popen(["dmesg"], stdout=PIPE) p2 = Popen(["grep", "hda"], stdin=p1.stdout, stdout=PIPE) print(repr(p2.communicate()[0])) # # Example 4: Catch execution error # print() print("Trying a weird file...") try: print(Popen(["/this/path/does/not/exist"]).communicate()) except OSError as e: if e.errno == errno.ENOENT: print("The file didn't exist. I thought so...") else: print("Error", e.errno) else: print("Gosh. No error.", file=sys.stderr) def _demo_windows() -> None: # # Example 1: Connecting several subprocesses # print("Looking for 'PROMPT' in set output...") p1 = Popen("set", stdout=PIPE, shell=True) p2 = Popen('find "PROMPT"', stdin=p1.stdout, stdout=PIPE) print(repr(p2.communicate()[0])) # # Example 2: Simple execution of program # print("Executing calc...") p = Popen("calc") p.wait() if __name__ == "__main__": if mswindows: _demo_windows() else: _demo_posix() mypy-0.560/test-data/stdlib-samples/3.2/tempfile.py0000644€tŠÔÚ€2›s®0000005631513215007205026173 0ustar jukkaDROPBOX\Domain Users00000000000000"""Temporary files. This module provides generic, low- and high-level interfaces for creating temporary files and directories. The interfaces listed as "safe" just below can be used without fear of race conditions. Those listed as "unsafe" cannot, and are provided for backward compatibility only. This module also provides some data items to the user: TMP_MAX - maximum number of names that will be tried before giving up. template - the default prefix for all temporary names. You may change this to control the default prefix. tempdir - If this is set to a string before the first use of any routine from this module, it will be considered as another candidate location to store temporary files. """ __all__ = [ "NamedTemporaryFile", "TemporaryFile", # high level safe interfaces "SpooledTemporaryFile", "TemporaryDirectory", "mkstemp", "mkdtemp", # low level safe interfaces "mktemp", # deprecated unsafe interface "TMP_MAX", "gettempprefix", # constants "tempdir", "gettempdir" ] # Imports. import warnings as _warnings import sys as _sys import io as _io import os as _os import errno as _errno from random import Random as _Random from typing import ( Any as _Any, Callable as _Callable, Iterator as _Iterator, List as _List, Tuple as _Tuple, Dict as _Dict, Iterable as _Iterable, IO as _IO, cast as _cast, Optional as _Optional, Type as _Type, ) from types import TracebackType as _TracebackType try: import fcntl as _fcntl except ImportError: def _set_cloexec(fd: int) -> None: pass else: def _set_cloexec(fd: int) -> None: try: flags = _fcntl.fcntl(fd, _fcntl.F_GETFD, 0) except IOError: pass else: # flags read successfully, modify flags |= _fcntl.FD_CLOEXEC _fcntl.fcntl(fd, _fcntl.F_SETFD, flags) try: import _thread _allocate_lock = _thread.allocate_lock # type: _Callable[[], _Any] except ImportError: import _dummy_thread _allocate_lock = _dummy_thread.allocate_lock _text_openflags = _os.O_RDWR | _os.O_CREAT | _os.O_EXCL if hasattr(_os, 'O_NOINHERIT'): _text_openflags |= _os.O_NOINHERIT if hasattr(_os, 'O_NOFOLLOW'): _text_openflags |= _os.O_NOFOLLOW _bin_openflags = _text_openflags if hasattr(_os, 'O_BINARY'): _bin_openflags |= _os.O_BINARY if hasattr(_os, 'TMP_MAX'): TMP_MAX = _os.TMP_MAX else: TMP_MAX = 10000 template = "tmp" # Internal routines. _once_lock = _allocate_lock() if hasattr(_os, "lstat"): _stat = _os.lstat # type: _Callable[[str], object] elif hasattr(_os, "stat"): _stat = _os.stat else: # Fallback. All we need is something that raises os.error if the # file doesn't exist. def __stat(fn: str) -> object: try: f = open(fn) except IOError: raise _os.error() f.close() return None _stat = __stat def _exists(fn: str) -> bool: try: _stat(fn) except _os.error: return False else: return True class _RandomNameSequence(_Iterator[str]): """An instance of _RandomNameSequence generates an endless sequence of unpredictable strings which can safely be incorporated into file names. Each string is six characters long. Multiple threads can safely use the same instance at the same time. _RandomNameSequence is an iterator.""" characters = "abcdefghijklmnopqrstuvwxyz0123456789_" @property def rng(self) -> _Random: cur_pid = _os.getpid() if cur_pid != getattr(self, '_rng_pid', None): self._rng = _Random() self._rng_pid = cur_pid return self._rng def __iter__(self) -> _Iterator[str]: return self def __next__(self) -> str: c = self.characters choose = self.rng.choice letters = [choose(c) for dummy in "123456"] return ''.join(letters) def _candidate_tempdir_list() -> _List[str]: """Generate a list of candidate temporary directories which _get_default_tempdir will try.""" dirlist = [] # type: _List[str] # First, try the environment. for envname in 'TMPDIR', 'TEMP', 'TMP': dirname = _os.getenv(envname) if dirname: dirlist.append(dirname) # Failing that, try OS-specific locations. if _os.name == 'nt': dirlist.extend([ r'c:\temp', r'c:\tmp', r'\temp', r'\tmp' ]) else: dirlist.extend([ '/tmp', '/var/tmp', '/usr/tmp' ]) # As a last resort, the current directory. try: dirlist.append(_os.getcwd()) except (AttributeError, _os.error): dirlist.append(_os.curdir) return dirlist def _get_default_tempdir() -> str: """Calculate the default directory to use for temporary files. This routine should be called exactly once. We determine whether or not a candidate temp dir is usable by trying to create and write to a file in that directory. If this is successful, the test file is deleted. To prevent denial of service, the name of the test file must be randomized.""" namer = _RandomNameSequence() dirlist = _candidate_tempdir_list() for dir in dirlist: if dir != _os.curdir: dir = _os.path.normcase(_os.path.abspath(dir)) # Try only a few names per directory. for seq in range(100): name = next(namer) filename = _os.path.join(dir, name) try: fd = _os.open(filename, _bin_openflags, 0o600) fp = _io.open(fd, 'wb') fp.write(b'blat') fp.close() _os.unlink(filename) fp = fd = None return dir except (OSError, IOError) as e: if e.args[0] != _errno.EEXIST: break # no point trying more names in this directory pass raise IOError(_errno.ENOENT, "No usable temporary directory found in %s" % dirlist) _name_sequence = None # type: _RandomNameSequence def _get_candidate_names() -> _RandomNameSequence: """Common setup sequence for all user-callable interfaces.""" global _name_sequence if _name_sequence is None: _once_lock.acquire() try: if _name_sequence is None: _name_sequence = _RandomNameSequence() finally: _once_lock.release() return _name_sequence def _mkstemp_inner(dir: str, pre: str, suf: str, flags: int) -> _Tuple[int, str]: """Code common to mkstemp, TemporaryFile, and NamedTemporaryFile.""" names = _get_candidate_names() for seq in range(TMP_MAX): name = next(names) file = _os.path.join(dir, pre + name + suf) try: fd = _os.open(file, flags, 0o600) _set_cloexec(fd) return (fd, _os.path.abspath(file)) except OSError as e: if e.errno == _errno.EEXIST: continue # try again raise raise IOError(_errno.EEXIST, "No usable temporary file name found") # User visible interfaces. def gettempprefix() -> str: """Accessor for tempdir.template.""" return template tempdir = None # type: str def gettempdir() -> str: """Accessor for tempfile.tempdir.""" global tempdir if tempdir is None: _once_lock.acquire() try: if tempdir is None: tempdir = _get_default_tempdir() finally: _once_lock.release() return tempdir def mkstemp(suffix: str = "", prefix: str = template, dir: str = None, text: bool = False) -> _Tuple[int, str]: """User-callable function to create and return a unique temporary file. The return value is a pair (fd, name) where fd is the file descriptor returned by os.open, and name is the filename. If 'suffix' is specified, the file name will end with that suffix, otherwise there will be no suffix. If 'prefix' is specified, the file name will begin with that prefix, otherwise a default prefix is used. If 'dir' is specified, the file will be created in that directory, otherwise a default directory is used. If 'text' is specified and true, the file is opened in text mode. Else (the default) the file is opened in binary mode. On some operating systems, this makes no difference. The file is readable and writable only by the creating user ID. If the operating system uses permission bits to indicate whether a file is executable, the file is executable by no one. The file descriptor is not inherited by children of this process. Caller is responsible for deleting the file when done with it. """ if dir is None: dir = gettempdir() if text: flags = _text_openflags else: flags = _bin_openflags return _mkstemp_inner(dir, prefix, suffix, flags) def mkdtemp(suffix: str = "", prefix: str = template, dir: str = None) -> str: """User-callable function to create and return a unique temporary directory. The return value is the pathname of the directory. Arguments are as for mkstemp, except that the 'text' argument is not accepted. The directory is readable, writable, and searchable only by the creating user. Caller is responsible for deleting the directory when done with it. """ if dir is None: dir = gettempdir() names = _get_candidate_names() for seq in range(TMP_MAX): name = next(names) file = _os.path.join(dir, prefix + name + suffix) try: _os.mkdir(file, 0o700) return file except OSError as e: if e.errno == _errno.EEXIST: continue # try again raise raise IOError(_errno.EEXIST, "No usable temporary directory name found") def mktemp(suffix: str = "", prefix: str = template, dir: str = None) -> str: """User-callable function to return a unique temporary file name. The file is not created. Arguments are as for mkstemp, except that the 'text' argument is not accepted. This function is unsafe and should not be used. The file name refers to a file that did not exist at some point, but by the time you get around to creating it, someone else may have beaten you to the punch. """ ## from warnings import warn as _warn ## _warn("mktemp is a potential security risk to your program", ## RuntimeWarning, stacklevel=2) if dir is None: dir = gettempdir() names = _get_candidate_names() for seq in range(TMP_MAX): name = next(names) file = _os.path.join(dir, prefix + name + suffix) if not _exists(file): return file raise IOError(_errno.EEXIST, "No usable temporary filename found") class _TemporaryFileWrapper: """Temporary file wrapper This class provides a wrapper around files opened for temporary use. In particular, it seeks to automatically remove the file when it is no longer needed. """ def __init__(self, file: _IO[_Any], name: str, delete: bool = True) -> None: self.file = file self.name = name self.close_called = False self.delete = delete if _os.name != 'nt': # Cache the unlinker so we don't get spurious errors at # shutdown when the module-level "os" is None'd out. Note # that this must be referenced as self.unlink, because the # name TemporaryFileWrapper may also get None'd out before # __del__ is called. self.unlink = _os.unlink def __getattr__(self, name: str) -> _Any: # Attribute lookups are delegated to the underlying file # and cached for non-numeric results # (i.e. methods are cached, closed and friends are not) file = _cast(_Any, self).__dict__['file'] # type: _IO[_Any] a = getattr(file, name) if not isinstance(a, int): setattr(self, name, a) return a # The underlying __enter__ method returns the wrong object # (self.file) so override it to return the wrapper def __enter__(self) -> '_TemporaryFileWrapper': self.file.__enter__() return self # iter() doesn't use __getattr__ to find the __iter__ method def __iter__(self) -> _Iterator[_Any]: return iter(self.file) # NT provides delete-on-close as a primitive, so we don't need # the wrapper to do anything special. We still use it so that # file.name is useful (i.e. not "(fdopen)") with NamedTemporaryFile. if _os.name != 'nt': def close(self) -> None: if not self.close_called: self.close_called = True self.file.close() if self.delete: self.unlink(self.name) def __del__(self) -> None: self.close() # Need to trap __exit__ as well to ensure the file gets # deleted when used in a with statement def __exit__(self, exc: _Type[BaseException], value: BaseException, tb: _Optional[_TracebackType]) -> bool: result = self.file.__exit__(exc, value, tb) self.close() return result else: def __exit__(self, exc: _Type[BaseException], value: BaseException, tb: _Optional[_TracebackType]) -> bool: self.file.__exit__(exc, value, tb) return False def NamedTemporaryFile(mode: str = 'w+b', buffering: int = -1, encoding: str = None, newline: str = None, suffix: str = "", prefix: str = template, dir: str = None, delete: bool = True) -> _IO[_Any]: """Create and return a temporary file. Arguments: 'prefix', 'suffix', 'dir' -- as for mkstemp. 'mode' -- the mode argument to io.open (default "w+b"). 'buffering' -- the buffer size argument to io.open (default -1). 'encoding' -- the encoding argument to io.open (default None) 'newline' -- the newline argument to io.open (default None) 'delete' -- whether the file is deleted on close (default True). The file is created as mkstemp() would do it. Returns an object with a file-like interface; the name of the file is accessible as file.name. The file will be automatically deleted when it is closed unless the 'delete' argument is set to False. """ if dir is None: dir = gettempdir() flags = _bin_openflags # Setting O_TEMPORARY in the flags causes the OS to delete # the file when it is closed. This is only supported by Windows. if _os.name == 'nt' and delete: flags |= _os.O_TEMPORARY (fd, name) = _mkstemp_inner(dir, prefix, suffix, flags) file = _io.open(fd, mode, buffering=buffering, newline=newline, encoding=encoding) return _cast(_IO[_Any], _TemporaryFileWrapper(file, name, delete)) if _os.name != 'posix' or _sys.platform == 'cygwin': # On non-POSIX and Cygwin systems, assume that we cannot unlink a file # while it is open. TemporaryFile = NamedTemporaryFile else: def _TemporaryFile(mode: str = 'w+b', buffering: int = -1, encoding: str = None, newline: str = None, suffix: str = "", prefix: str = template, dir: str = None, delete: bool = True) -> _IO[_Any]: """Create and return a temporary file. Arguments: 'prefix', 'suffix', 'dir' -- as for mkstemp. 'mode' -- the mode argument to io.open (default "w+b"). 'buffering' -- the buffer size argument to io.open (default -1). 'encoding' -- the encoding argument to io.open (default None) 'newline' -- the newline argument to io.open (default None) The file is created as mkstemp() would do it. Returns an object with a file-like interface. The file has no name, and will cease to exist when it is closed. """ if dir is None: dir = gettempdir() flags = _bin_openflags (fd, name) = _mkstemp_inner(dir, prefix, suffix, flags) try: _os.unlink(name) return _io.open(fd, mode, buffering=buffering, newline=newline, encoding=encoding) except: _os.close(fd) raise TemporaryFile = _TemporaryFile class SpooledTemporaryFile: """Temporary file wrapper, specialized to switch from StringIO to a real file when it exceeds a certain size or when a fileno is needed. """ _rolled = False _file = None # type: _Any # BytesIO, StringIO or TemporaryFile def __init__(self, max_size: int = 0, mode: str = 'w+b', buffering: int = -1, encoding: str = None, newline: str = None, suffix: str = "", prefix: str = template, dir: str = None) -> None: if 'b' in mode: self._file = _io.BytesIO() else: # Setting newline="\n" avoids newline translation; # this is important because otherwise on Windows we'd # hget double newline translation upon rollover(). self._file = _io.StringIO(newline="\n") self._max_size = max_size self._rolled = False self._TemporaryFileArgs = { 'mode': mode, 'buffering': buffering, 'suffix': suffix, 'prefix': prefix, 'encoding': encoding, 'newline': newline, 'dir': dir} # type: _Dict[str, _Any] def _check(self, file: _IO[_Any]) -> None: if self._rolled: return max_size = self._max_size if max_size and file.tell() > max_size: self.rollover() def rollover(self) -> None: if self._rolled: return file = self._file newfile = self._file = TemporaryFile(**self._TemporaryFileArgs) self._TemporaryFileArgs = None newfile.write(file.getvalue()) newfile.seek(file.tell(), 0) self._rolled = True # The method caching trick from NamedTemporaryFile # won't work here, because _file may change from a # _StringIO instance to a real file. So we list # all the methods directly. # Context management protocol def __enter__(self) -> 'SpooledTemporaryFile': if self._file.closed: raise ValueError("Cannot enter context with closed file") return self def __exit__(self, exc: type, value: BaseException, tb: _TracebackType) -> bool: self._file.close() return False # file protocol def __iter__(self) -> _Iterable[_Any]: return self._file.__iter__() def close(self) -> None: self._file.close() @property def closed(self) -> bool: return self._file.closed @property def encoding(self) -> str: return self._file.encoding def fileno(self) -> int: self.rollover() return self._file.fileno() def flush(self) -> None: self._file.flush() def isatty(self) -> bool: return self._file.isatty() @property def mode(self) -> str: return self._file.mode @property def name(self) -> str: return self._file.name @property def newlines(self) -> _Any: return self._file.newlines #def next(self): # return self._file.next def read(self, n: int = -1) -> _Any: return self._file.read(n) def readline(self, limit: int = -1) -> _Any: return self._file.readline(limit) def readlines(self, *args) -> _List[_Any]: return self._file.readlines(*args) def seek(self, offset: int, whence: int = 0) -> None: self._file.seek(offset, whence) @property def softspace(self) -> bool: return self._file.softspace def tell(self) -> int: return self._file.tell() def truncate(self) -> None: self._file.truncate() def write(self, s: _Any) -> int: file = self._file # type: _IO[_Any] rv = file.write(s) self._check(file) return rv def writelines(self, iterable: _Iterable[_Any]) -> None: file = self._file # type: _IO[_Any] file.writelines(iterable) self._check(file) #def xreadlines(self, *args) -> _Any: # return self._file.xreadlines(*args) class TemporaryDirectory(object): """Create and return a temporary directory. This has the same behavior as mkdtemp but can be used as a context manager. For example: with TemporaryDirectory() as tmpdir: ... Upon exiting the context, the directory and everthing contained in it are removed. """ def __init__(self, suffix: str = "", prefix: str = template, dir: str = None) -> None: self._closed = False self.name = None # type: str # Handle mkdtemp throwing an exception self.name = mkdtemp(suffix, prefix, dir) # XXX (ncoghlan): The following code attempts to make # this class tolerant of the module nulling out process # that happens during CPython interpreter shutdown # Alas, it doesn't actually manage it. See issue #10188 self._listdir = _os.listdir self._path_join = _os.path.join self._isdir = _os.path.isdir self._islink = _os.path.islink self._remove = _os.remove self._rmdir = _os.rmdir self._os_error = _os.error self._warn = _warnings.warn def __repr__(self) -> str: return "<{} {!r}>".format(self.__class__.__name__, self.name) def __enter__(self) -> str: return self.name def cleanup(self, _warn: bool = False) -> None: if self.name and not self._closed: try: self._rmtree(self.name) except (TypeError, AttributeError) as ex: # Issue #10188: Emit a warning on stderr # if the directory could not be cleaned # up due to missing globals if "None" not in str(ex): raise print("ERROR: {!r} while cleaning up {!r}".format(ex, self,), file=_sys.stderr) return self._closed = True if _warn: self._warn("Implicitly cleaning up {!r}".format(self), ResourceWarning) def __exit__(self, exc: type, value: BaseException, tb: _TracebackType) -> bool: self.cleanup() return False def __del__(self) -> None: # Issue a ResourceWarning if implicit cleanup needed self.cleanup(_warn=True) def _rmtree(self, path: str) -> None: # Essentially a stripped down version of shutil.rmtree. We can't # use globals because they may be None'ed out at shutdown. for name in self._listdir(path): fullname = self._path_join(path, name) try: isdir = self._isdir(fullname) and not self._islink(fullname) except self._os_error: isdir = False if isdir: self._rmtree(fullname) else: try: self._remove(fullname) except self._os_error: pass try: self._rmdir(path) except self._os_error: pass mypy-0.560/test-data/stdlib-samples/3.2/test/0000755€tŠÔÚ€2›s®0000000000013215007243024763 5ustar jukkaDROPBOX\Domain Users00000000000000mypy-0.560/test-data/stdlib-samples/3.2/test/__init__.py0000644€tŠÔÚ€2›s®0000000000013215007205027060 0ustar jukkaDROPBOX\Domain Users00000000000000mypy-0.560/test-data/stdlib-samples/3.2/test/randv2_32.pck0000644€tŠÔÚ€2›s®0000001653513215007205027172 0ustar jukkaDROPBOX\Domain Users00000000000000crandom Random p0 (tRp1 (I2 (I-2147483648 I-845974985 I-1294090086 I1193659239 I-1849481736 I-946579732 I-34406770 I1749049471 I1997774682 I1432026457 I1288127073 I-943175655 I-1718073964 I339993548 I-1045260575 I582505037 I-1555108250 I-1114765620 I1578648750 I-350384412 I-20845848 I-288255314 I738790953 I1901249641 I1999324672 I-277361068 I-1515885839 I2061761596 I-809068089 I1287981136 I258129492 I-6303745 I-765148337 I1090344911 I1653434703 I-1242923628 I1639171313 I-1870042660 I-1655014050 I345609048 I2093410138 I1963263374 I-2122098342 I1336859961 I-810942729 I945857753 I2103049942 I623922684 I1418349549 I690877342 I754973107 I-1605111847 I1607137813 I-1704917131 I1317536428 I1714882872 I-1665385120 I1823694397 I-1790836866 I-1696724812 I-603979847 I-498599394 I-341265291 I927388804 I1778562135 I1716895781 I1023198122 I1726145967 I941955525 I1240148950 I-1929634545 I-1288147083 I-519318335 I754559777 I-707571958 I374604022 I420424061 I-1095443486 I1621934944 I-1220502522 I-140049608 I-918917122 I304341024 I-1637446057 I-353934485 I1973436235 I433380241 I-686759465 I-2111563154 I-573422032 I804304541 I1513063483 I1417381689 I-804778729 I211756408 I544537322 I890881641 I150378374 I1765739392 I1011604116 I584889095 I1400520554 I413747808 I-1741992587 I-1882421574 I-1373001903 I-1885348538 I903819480 I1083220038 I-1318105424 I1740421404 I1693089625 I775965557 I1319608037 I-2127475785 I-367562895 I-1416273451 I1693000327 I-1217438421 I834405522 I-128287275 I864057548 I-973917356 I7304111 I1712253182 I1353897741 I672982288 I1778575559 I-403058377 I-38540378 I-1393713496 I13193171 I1127196200 I205176472 I-2104790506 I299985416 I1403541685 I-1018270667 I-1980677490 I-1182625797 I1637015181 I-1795357414 I1514413405 I-924516237 I-1841873650 I-1014591269 I1576616065 I-1319103135 I-120847840 I2062259778 I-9285070 I1160890300 I-575137313 I-1509108275 I46701926 I-287560914 I-256824960 I577558250 I900598310 I944607867 I2121154920 I-1170505192 I-1347170575 I77247778 I-1899015765 I1234103327 I1027053658 I1934632322 I-792031234 I1147322536 I1290655117 I1002059715 I1325898538 I896029793 I-790940694 I-980470721 I-1922648255 I-951672814 I291543943 I1158740218 I-1959023736 I-1977185236 I1527900076 I514104195 I-814154113 I-593157883 I-1023704660 I1285688377 I-2117525386 I768954360 I-38676846 I-799848659 I-1305517259 I-1938213641 I-462146758 I-1663302892 I1899591069 I-22935388 I-275856976 I-443736893 I-739441156 I93862068 I-838105669 I1735629845 I-817484206 I280814555 I1753547179 I1811123479 I1974543632 I-48447465 I-642694345 I-531149613 I518698953 I-221642627 I-686519187 I776644303 I257774400 I-1499134857 I-1055273455 I-237023943 I1981752330 I-917671662 I-372905983 I1588058420 I1171936660 I-1730977121 I1360028989 I1769469287 I1910709542 I-852692959 I1396944667 I-1723999155 I-310975435 I-1965453954 I-1636858570 I2005650794 I680293715 I1355629386 I844514684 I-1909152807 I-808646074 I1936510018 I1134413810 I-143411047 I-1478436304 I1394969244 I-1170110660 I1963112086 I-1518351049 I-1506287443 I-455023090 I-855366028 I-1746785568 I933990882 I-703625141 I-285036872 I188277905 I1471578620 I-981382835 I-586974220 I945619758 I1608778444 I-1708548066 I-1897629320 I-42617810 I-836840790 I539154487 I-235706962 I332074418 I-575700589 I1534608003 I632116560 I-1819760653 I642052958 I-722391771 I-1104719475 I-1196847084 I582413973 I1563394876 I642007944 I108989456 I361625014 I677308625 I-1806529496 I-959050708 I-1858251070 I-216069832 I701624579 I501238033 I12287030 I1895107107 I2089098638 I-874806230 I1236279203 I563718890 I-544352489 I-1879707498 I1767583393 I-1776604656 I-693294301 I-88882831 I169303357 I1299196152 I-1122791089 I-379157172 I1934671851 I1575736961 I-19573174 I-1401511009 I9305167 I-1115174467 I1670735537 I1226436501 I-2004524535 I1767463878 I-1722855079 I-559413926 I1529810851 I1201272087 I-1297130971 I-1188149982 I1396557188 I-370358342 I-1006619702 I1600942463 I906087130 I-76991909 I2069580179 I-1674195181 I-2098404729 I-940972459 I-573399187 I-1930386277 I-721311199 I-647834744 I1452181671 I688681916 I1812793731 I1704380620 I-1389615179 I866287837 I-1435265007 I388400782 I-147986600 I-1613598851 I-1040347408 I782063323 I-239282031 I-575966722 I-1865208174 I-481365146 I579572803 I-1239481494 I335361280 I-429722947 I1881772789 I1908103808 I1653690013 I-1668588344 I1933787953 I-2033480609 I22162797 I-1516527040 I-461232482 I-16201372 I-2043092030 I114990337 I-1524090084 I1456374020 I458606440 I-1928083218 I227773125 I-1129028159 I1678689 I1575896907 I-1792935220 I-151387575 I64084088 I-95737215 I1337335688 I-1963466345 I1243315130 I-1798518411 I-546013212 I-607065396 I1219824160 I1715218469 I-1368163783 I1701552913 I-381114888 I1068821717 I266062971 I-2066513172 I1767407229 I-780936414 I-705413443 I-1256268847 I1646874149 I1107690353 I839133072 I67001749 I860763503 I884880613 I91977084 I755371933 I420745153 I-578480690 I-1520193551 I1011369331 I-99754575 I-733141064 I-500598588 I1081124271 I-1341266575 I921002612 I-848852487 I-1904467341 I-1294256973 I-94074714 I-1778758498 I-1401188547 I2101830578 I2058864877 I-272875991 I-1375854779 I-1332937870 I619425525 I-1034529639 I-36454393 I-2030499985 I-1637127500 I-1408110287 I-2108625749 I-961007436 I1475654951 I-791946251 I1667792115 I1818978830 I1897980514 I1959546477 I-74478911 I-508643347 I461594399 I538802715 I-2094970071 I-2076660253 I1091358944 I1944029246 I-343957436 I-1915845022 I1237620188 I1144125174 I1522190520 I-670252952 I-19469226 I675626510 I758750096 I909724354 I-1846259652 I544669343 I445182495 I-821519930 I-1124279685 I-1668995122 I1653284793 I-678555151 I-687513207 I1558259445 I-1978866839 I1558835601 I1732138472 I-1904793363 I620020296 I1562597874 I1942617227 I-549632552 I721603795 I417978456 I-1355281522 I-538065208 I-1079523196 I187375699 I449064972 I1018083947 I1632388882 I-493269866 I92769041 I1477146750 I1782708404 I444873376 I1085851104 I-6823272 I-1302251853 I1602050688 I-1042187824 I287161745 I-1972094479 I103271491 I2131619773 I-2064115870 I766815498 I990861458 I-1664407378 I1083746756 I-1018331904 I-677315687 I-951670647 I-952356874 I451460609 I-818615564 I851439508 I656362634 I-1351240485 I823378078 I1985597385 I597757740 I-1512303057 I1590872798 I1108424213 I818850898 I-1368594306 I-201107761 I1793370378 I1247597611 I-1594326264 I-601653890 I427642759 I248322113 I-292545338 I1708985870 I1917042771 I429354503 I-478470329 I793960014 I369939133 I1728189157 I-518963626 I-278523974 I-1877289696 I-2088617658 I-1367940049 I-62295925 I197975119 I-252900777 I803430539 I485759441 I-528283480 I-1287443963 I-478617444 I-861906946 I-649095555 I-893184337 I2050571322 I803433133 I1629574571 I1649720417 I-2050225209 I1208598977 I720314344 I-615166251 I-835077127 I-1405372429 I995698064 I148123240 I-943016676 I-594609622 I-1381596711 I1017195301 I-1268893013 I-1815985179 I-1393570351 I-870027364 I-476064472 I185582645 I569863326 I1098584267 I-1599147006 I-485054391 I-852098365 I1477320135 I222316762 I-1515583064 I-935051367 I393383063 I819617226 I722921837 I-1241806499 I-1358566385 I1666813591 I1333875114 I-1663688317 I-47254623 I-885800726 I307388991 I-1219459496 I1374870300 I2132047877 I-1385624198 I-245139206 I1015139214 I-926198559 I1969798868 I-1950480619 I-559193432 I-1256446518 I-1983476981 I790179655 I1004289659 I1541827617 I1555805575 I501127333 I-1123446797 I-453230915 I2035104883 I1296122398 I-1843698604 I-715464588 I337143971 I-1972119192 I606777909 I726977302 I-1149501872 I-1963733522 I-1797504644 I624 tp2 Ntp3 b.mypy-0.560/test-data/stdlib-samples/3.2/test/randv2_64.pck0000644€tŠÔÚ€2›s®0000001630513215007205027172 0ustar jukkaDROPBOX\Domain Users00000000000000crandom Random p0 (tRp1 (I2 (I2147483648 I1812115682 I2741755497 I1028055730 I809166036 I2773628650 I62321950 I535290043 I349877800 I976167039 I2490696940 I3631326955 I2107991114 I2941205793 I3199611605 I1871971556 I1456108540 I2984591044 I140836801 I4203227310 I3652722980 I4031971234 I555769760 I697301296 I2347638880 I3302335858 I320255162 I2553586608 I1570224361 I2838780912 I2315834918 I2351348158 I3545433015 I2292018579 I1177569331 I758497559 I2913311175 I1014948880 I1793619243 I3982451053 I3850988342 I2393984324 I1583100093 I3144742543 I3655047493 I3507532385 I3094515442 I350042434 I2455294844 I1038739312 I313809152 I189433072 I1653165452 I4186650593 I19281455 I2589680619 I4145931590 I4283266118 I636283172 I943618337 I3170184633 I2308766231 I634615159 I538152647 I2079576891 I1029442616 I3410689412 I1370292761 I1071718978 I2139496322 I1876699543 I3485866187 I3157490130 I1633105386 I1453253160 I3841322080 I3789608924 I4110770792 I95083673 I931354627 I2065389591 I3448339827 I3348204577 I3263528560 I2411324590 I4003055026 I1869670093 I2737231843 I4150701155 I2689667621 I2993263224 I3239890140 I1191430483 I1214399779 I3623428533 I1817058866 I3052274451 I326030082 I1505129312 I2306812262 I1349150363 I1099127895 I2543465574 I2396380193 I503926466 I1607109730 I3451716817 I58037114 I4290081119 I947517597 I3083440186 I520522630 I2948962496 I4184319574 I2957636335 I668374201 I2325446473 I472785314 I3791932366 I573017189 I2185725379 I1262251492 I3525089379 I2951262653 I1305347305 I940958122 I3343754566 I359371744 I3874044973 I396897232 I147188248 I716683703 I4013880315 I1133359586 I1794612249 I3480815192 I3988787804 I1729355809 I573408542 I1419310934 I1770030447 I3552845567 I1693976502 I1271189893 I2298236738 I2049219027 I3464198070 I1233574082 I1007451781 I1838253750 I687096593 I1131375603 I1223013895 I1490478435 I339265439 I4232792659 I491538536 I2816256769 I1044097522 I2566227049 I748762793 I1511830494 I3593259822 I4121279213 I3735541309 I3609794797 I1939942331 I377570434 I1437957554 I1831285696 I55062811 I2046783110 I1303902283 I1838349877 I420993556 I1256392560 I2795216506 I2783687924 I3322303169 I512794749 I308405826 I517164429 I3320436022 I1328403632 I2269184746 I3729522810 I3304314450 I2238756124 I1690581361 I3813277532 I4119706879 I2659447875 I388818978 I2064580814 I1586227676 I2627522685 I2017792269 I547928109 I859107450 I1062238929 I858886237 I3795783146 I4173914756 I3835915965 I3329504821 I3494579904 I838863205 I3399734724 I4247387481 I3618414834 I2984433798 I2165205561 I4260685684 I3045904244 I3450093836 I3597307595 I3215851166 I3162801328 I2558283799 I950068105 I1829664117 I3108542987 I2378860527 I790023460 I280087750 I1171478018 I2333653728 I3976932140 I896746152 I1802494195 I1232873794 I2749440836 I2032037296 I2012091682 I1296131034 I3892133385 I908161334 I2296791795 I548169794 I696265 I893156828 I426904709 I3565374535 I2655906825 I2792178515 I2406814632 I4038847579 I3123934642 I2197503004 I3535032597 I2266216689 I2117613462 I1787448518 I1875089416 I2037165384 I1140676321 I3606296464 I3229138231 I2458267132 I1874651171 I3331900867 I1000557654 I1432861701 I473636323 I2691783927 I1871437447 I1328016401 I4118690062 I449467602 I681789035 I864889442 I1200888928 I75769445 I4008690037 I2464577667 I4167795823 I3070097648 I2579174882 I1216886568 I3810116343 I2249507485 I3266903480 I3671233480 I100191658 I3087121334 I365063087 I3821275176 I2165052848 I1282465245 I3601570637 I3132413236 I2780570459 I3222142917 I3129794692 I2611590811 I947031677 I2991908938 I750997949 I3632575131 I1632014461 I2846484755 I2347261779 I2903959448 I1397316686 I1904578392 I774649578 I3164598558 I2429587609 I738244516 I1563304975 I1399317414 I1021316297 I3187933234 I2126780757 I4011907847 I4095169219 I3358010054 I2729978247 I3736811646 I3009656410 I2893043637 I4027447385 I1239610110 I1488806900 I2674866844 I442876374 I2853687260 I2785921005 I3151378528 I1180567 I2803146964 I982221759 I2192919417 I3087026181 I2480838002 I738452921 I687986185 I3049371676 I3636492954 I3468311299 I2379621102 I788988633 I1643210601 I2983998168 I2492730801 I2586048705 I604073029 I4121082815 I1496476928 I2972357110 I2663116968 I2642628592 I2116052039 I487186279 I2577680328 I3974766614 I730776636 I3842528855 I1929093695 I44626622 I3989908833 I1695426222 I3675479382 I3051784964 I1514876613 I1254036595 I2420450649 I3034377361 I2332990590 I1535175126 I185834384 I1107372900 I1707278185 I1286285295 I3332574225 I2785672437 I883170645 I2005666473 I3403131327 I4122021352 I1464032858 I3702576112 I260554598 I1837731650 I2594435345 I75771049 I2012484289 I3058649775 I29979703 I3861335335 I2506495152 I3786448704 I442947790 I2582724774 I4291336243 I2568189843 I1923072690 I1121589611 I837696302 I3284631720 I3865021324 I3576453165 I2559531629 I1459231762 I3506550036 I3754420159 I2622000757 I124228596 I1084328605 I1692830753 I547273558 I674282621 I655259103 I3188629610 I490502174 I2081001293 I3191330704 I4109943593 I1859948504 I3163806460 I508833168 I1256371033 I2709253790 I2068956572 I3092842814 I3913926529 I2039638759 I981982529 I536094190 I368855295 I51993975 I1597480732 I4058175522 I2155896702 I3196251991 I1081913893 I3952353788 I3545548108 I2370669647 I2206572308 I2576392991 I1732303374 I1153136290 I537641955 I1738691747 I3232854186 I2539632206 I2829760278 I3058187853 I1202425792 I3762361970 I2863949342 I2640635867 I376638744 I1857679757 I330798087 I1457400505 I1135610046 I606400715 I1859536026 I509811335 I529772308 I2579273244 I1890382004 I3959908876 I2612335971 I2834052227 I1434475986 I3684202717 I4015011345 I582567852 I3689969571 I3934753460 I3034960691 I208573292 I4004113742 I3992904842 I2587153719 I3529179079 I1565424987 I779130678 I1048582935 I3213591622 I3607793434 I3951254937 I2047811901 I7508850 I248544605 I4210090324 I2331490884 I70057213 I776474945 I1345528889 I3290403612 I1664955269 I1533143116 I545003424 I4141564478 I1257326139 I868843601 I2337603029 I1918131449 I1843439523 I1125519035 I673340118 I421408852 I1520454906 I1804722630 I3621254196 I2329968000 I39464672 I430583134 I294026512 I53978525 I2892276105 I1418863764 I3419054451 I1391595797 I3544981798 I4191780858 I825672357 I2972000844 I1571305069 I4231982845 I3611916419 I3045163168 I2982349733 I278572141 I4215338078 I839860504 I1819151779 I1412347479 I1386770353 I3914589491 I3783104977 I4124296733 I830546258 I89825624 I4110601328 I2545483429 I300600527 I516641158 I3693021034 I2852912854 I3240039868 I4167407959 I1479557946 I3621188804 I1391590944 I3578441128 I1227055556 I406898396 I3064054983 I25835338 I402664165 I4097682779 I2106728012 I203613622 I3045467686 I1381726438 I3798670110 I1342314961 I3552497361 I535913619 I2625787583 I1606574307 I1101269630 I1950513752 I1121355862 I3586816903 I438529984 I2473182121 I1229997203 I405445940 I1695535315 I427014336 I3916768430 I392298359 I1884642868 I1244730821 I741058080 I567479957 I3527621168 I3191971011 I3267069104 I4108668146 I1520795587 I166581006 I473794477 I1562126550 I929843010 I889533294 I1266556608 I874518650 I3520162092 I3013765049 I4220231414 I547246449 I3998093769 I3737193746 I3872944207 I793651876 I2606384318 I875991012 I1394836334 I4102011644 I854380426 I2618666767 I2568302000 I1995512132 I229491093 I2673500286 I3364550739 I3836923416 I243656987 I3944388983 I4064949677 I1416956378 I1703244487 I3990798829 I2023425781 I3926702214 I1229015501 I3174247824 I624 tp2 Ntp3 b.mypy-0.560/test-data/stdlib-samples/3.2/test/randv3.pck0000644€tŠÔÚ€2›s®0000001750413215007205026664 0ustar jukkaDROPBOX\Domain Users00000000000000crandom Random p0 (tRp1 (I3 (L2147483648L L994081831L L2806287265L L2228999830L L3396498069L L2956805457L L3273927761L L920726507L L1862624492L L2921292485L L1779526843L L2469105503L L251696293L L1254390717L L779197080L L3165356830L L2007365218L L1870028812L L2896519363L L1855578438L L979518416L L3481710246L L3191861507L L3993006593L L2967971479L L3353342753L L3576782572L L339685558L L2367675732L L116208555L L1220054437L L486597056L L1912115141L L1037044792L L4096904723L L3409146175L L3701651227L L315824610L L4138604583L L1385764892L L191878900L L2320582219L L3420677494L L2776503169L L1148247403L L829555069L L902064012L L2934642741L L2477108577L L2583928217L L1658612579L L2865447913L L129147346L L3691171887L L1569328110L L1372860143L L1054139183L L1617707080L L69020592L L3810271603L L1853953416L L3499803073L L1027545027L L3229043605L L250848720L L3324932626L L3537002962L L2494323345L L3238103962L L4147541579L L3636348186L L3025455083L L2678771977L L584700256L L3461826909L L854511420L L943463552L L3609239025L L3977577989L L253070090L L777394544L L2144086567L L1092947992L L854327284L L2222750082L L360183510L L1312466483L L3227531091L L2235022500L L3013060530L L2541091298L L3480126342L L1839762775L L2632608190L L1108889403L L3045050923L L731513126L L3505436788L L3062762017L L1667392680L L1354126500L L1143573930L L2816645702L L2100356873L L2817679106L L1210746010L L2409915248L L2910119964L L2309001420L L220351824L L3667352871L L3993148590L L2886160232L L4239393701L L1189270581L L3067985541L L147374573L L2355164869L L3696013550L L4227037846L L1905112743L L3312843689L L2930678266L L1828795355L L76933594L L3987100796L L1288361435L L3464529151L L965498079L L1444623093L L1372893415L L1536235597L L1341994850L L963594758L L2115295754L L982098685L L1053433904L L2078469844L L3059765792L L1753606181L L2130171254L L567588194L L529629426L L3621523534L L3027576564L L1176438083L L4096287858L L1168574683L L1425058962L L1429631655L L2902106759L L761900641L L1329183956L L1947050932L L447490289L L3282516276L L200037389L L921868197L L3331403999L L4088760249L L2188326318L L288401961L L1360802675L L314302808L L3314639210L L3749821203L L2286081570L L2768939062L L3200541016L L2133495482L L385029880L L4217232202L L3171617231L L1660846653L L2459987621L L2691776124L L4225030408L L3595396773L L1103680661L L539064057L L1492841101L L166195394L L757973658L L533893054L L2784879594L L1021821883L L2350548162L L176852116L L3503166025L L148079914L L1633466236L L2773090165L L1162846701L L3575737795L L1624178239L L2454894710L L3014691938L L526355679L L1870824081L L3362425857L L3907566665L L3462563184L L2229112004L L4203735748L L1557442481L L924133999L L1906634214L L880459727L L4065895870L L141426254L L1258450159L L3243115027L L1574958840L L313939294L L3055664260L L3459714255L L531778790L L509505506L L1620227491L L2675554942L L2516509560L L3797299887L L237135890L L3203142213L L1087745310L L1897151854L L3936590041L L132765167L L2385908063L L1360600289L L3574567769L L2752788114L L2644228966L L2377705183L L601277909L L4046480498L L324401408L L3279931760L L2227059377L L1538827493L L4220532064L L478044564L L2917117761L L635492832L L2319763261L L795944206L L1820473234L L1673151409L L1404095402L L1661067505L L3217106938L L2406310683L L1931309248L L2458622868L L3323670524L L3266852755L L240083943L L3168387397L L607722198L L1256837690L L3608124913L L4244969357L L1289959293L L519750328L L3229482463L L1105196988L L1832684479L L3761037224L L2363631822L L3297957711L L572766355L L1195822137L L2239207981L L2034241203L L163540514L L288160255L L716403680L L4019439143L L1536281935L L2345100458L L2786059178L L2822232109L L987025395L L3061166559L L490422513L L2551030115L L2638707620L L1344728502L L714108911L L2831719700L L2188615369L L373509061L L1351077504L L3136217056L L783521095L L2554949468L L2662499550L L1203826951L L1379632388L L1918858985L L607465976L L1980450237L L3540079211L L3397813410L L2913309266L L2289572621L L4133935327L L4166227663L L3371801704L L3065474909L L3580562343L L3832172378L L2556130719L L310473705L L3734014346L L2490413810L L347233056L L526668037L L1158393656L L544329703L L2150085419L L3914038146L L1060237586L L4159394837L L113205121L L309966775L L4098784465L L3635222960L L2417516569L L2089579233L L1725807541L L2728122526L L2365836523L L2504078522L L1443946869L L2384171411L L997046534L L3249131657L L1699875986L L3618097146L L1716038224L L2629818607L L2929217876L L1367250314L L1726434951L L1388496325L L2107602181L L2822366842L L3052979190L L3796798633L L1543813381L L959000121L L1363845999L L2952528150L L874184932L L1888387194L L2328695295L L3442959855L L841805947L L1087739275L L3230005434L L3045399265L L1161817318L L2898673139L L860011094L L940539782L L1297818080L L4243941623L L1577613033L L4204131887L L3819057225L L1969439558L L3297963932L L241874069L L3517033453L L2295345664L L1098911422L L886955008L L1477397621L L4279347332L L3616558791L L2384411957L L742537731L L764221540L L2871698900L L3530636393L L691256644L L758730966L L1717773090L L2751856377L L3188484000L L3767469670L L1623863053L L3533236793L L4099284176L L723921107L L310594036L L223978745L L2266565776L L201843303L L2969968546L L3351170888L L3465113624L L2712246712L L1521383057L L2384461798L L216357551L L2167301975L L3144653194L L2781220155L L3620747666L L95971265L L4255400243L L59999757L L4174273472L L3974511524L L1007123950L L3112477628L L806461512L L3148074008L L528352882L L2545979588L L2562281969L L3010249477L L1886331611L L3210656433L L1034099976L L2906893579L L1197048779L L1870004401L L3898300490L L2686856402L L3975723478L L613043532L L2565674353L L3760045310L L3468984376L L4126258L L303855424L L3988963552L L276256796L L544071807L L1023872062L L1747461519L L1975571260L L4033766958L L2946555557L L1492957796L L958271685L L46480515L L907760635L L1306626357L L819652378L L1172300279L L1116851319L L495601075L L1157715330L L534220108L L377320028L L1672286106L L2066219284L L1842386355L L2546059464L L1839457336L L3476194446L L3050550028L L594705582L L1905813535L L1813033412L L2700858157L L169067972L L4252889045L L1921944555L L497671474L L210143935L L2688398489L L325158375L L3450846447L L891760597L L712802536L L1132557436L L1417044075L L1639889660L L1746379970L L1478741647L L2817563486L L2573612532L L4266444457L L2911601615L L804745411L L2207254652L L1189140646L L3829725111L L3637367348L L1944731747L L2193440343L L1430195413L L1173515229L L1582618217L L2070767037L L247908936L L1460675439L L556001596L L327629335L L1036133876L L4228129605L L999174048L L3635804039L L1416550481L L1270540269L L4280743815L L39607659L L1552540623L L2762294062L L504137289L L4117044239L L1417130225L L1342970056L L1755716449L L1169447322L L2731401356L L2319976745L L2869221479L L23972655L L2251495389L L1429860878L L3728135992L L4241432973L L3698275076L L216416432L L4040046960L L246077176L L894675685L L3932282259L L3097205100L L2128818650L L1319010656L L1601974009L L2552960957L L3554016055L L4209395641L L2013340102L L3370447801L L2307272002L L1795091354L L202109401L L988345070L L2514870758L L1132726850L L582746224L L3112305421L L1843020683L L3600189223L L1101349165L L4211905855L L2866677581L L2881621130L L4165324109L L4238773191L L3635649550L L2670481044L L2996248219L L1676992480L L3473067050L L4205793699L L4019490897L L1579990481L L1899617990L L1136347713L L1802842268L L3591752960L L1197308739L L433629786L L4032142790L L3148041979L L3312138845L L3896860449L L3298182567L L907605170L L1658664067L L2682980313L L2523523173L L1208722103L L3808530363L L1079003946L L4282402864L L2041010073L L2667555071L L688018180L L1405121012L L4167994076L L3504695336L L1923944749L L1143598790L L3936268898L L3606243846L L1017420080L L4026211169L L596529763L L1844259624L L2840216282L L2673807759L L3407202575L L2737971083L L4075423068L L3684057432L L3146627241L L599650513L L69773114L L1257035919L L807485291L L2376230687L L3036593147L L2642411658L L106080044L L2199622729L L291834511L L2697611361L L11689733L L625123952L L3226023062L L3229663265L L753059444L L2843610189L L624L tp2 Ntp3 b.mypy-0.560/test-data/stdlib-samples/3.2/test/subprocessdata/0000755€tŠÔÚ€2›s®0000000000013215007243030005 5ustar jukkaDROPBOX\Domain Users00000000000000mypy-0.560/test-data/stdlib-samples/3.2/test/subprocessdata/fd_status.py0000644€tŠÔÚ€2›s®0000000106113215007205032347 0ustar jukkaDROPBOX\Domain Users00000000000000"""When called as a script, print a comma-separated list of the open file descriptors on stdout.""" import errno import os try: _MAXFD = os.sysconf("SC_OPEN_MAX") except: _MAXFD = 256 if __name__ == "__main__": fds = [] for fd in range(0, _MAXFD): try: st = os.fstat(fd) except OSError as e: if e.errno == errno.EBADF: continue raise # Ignore Solaris door files if st.st_mode & 0xF000 != 0xd000: fds.append(fd) print(','.join(map(str, fds))) mypy-0.560/test-data/stdlib-samples/3.2/test/subprocessdata/input_reader.py0000644€tŠÔÚ€2›s®0000000020213215007205033030 0ustar jukkaDROPBOX\Domain Users00000000000000"""When called as a script, consumes the input""" import sys if __name__ == "__main__": for line in sys.stdin: pass mypy-0.560/test-data/stdlib-samples/3.2/test/subprocessdata/qcat.py0000644€tŠÔÚ€2›s®0000000023713215007205031307 0ustar jukkaDROPBOX\Domain Users00000000000000"""When ran as a script, simulates cat with no arguments.""" import sys if __name__ == "__main__": for line in sys.stdin: sys.stdout.write(line) mypy-0.560/test-data/stdlib-samples/3.2/test/subprocessdata/qgrep.py0000644€tŠÔÚ€2›s®0000000037513215007205031500 0ustar jukkaDROPBOX\Domain Users00000000000000"""When called with a single argument, simulated fgrep with a single argument and no options.""" import sys if __name__ == "__main__": pattern = sys.argv[1] for line in sys.stdin: if pattern in line: sys.stdout.write(line) mypy-0.560/test-data/stdlib-samples/3.2/test/subprocessdata/sigchild_ignore.py0000644€tŠÔÚ€2›s®0000000056413215007205033513 0ustar jukkaDROPBOX\Domain Users00000000000000import signal, subprocess, sys # On Linux this causes os.waitpid to fail with OSError as the OS has already # reaped our child process. The wait() passing the OSError on to the caller # and causing us to exit with an error is what we are testing against. signal.signal(signal.SIGCHLD, signal.SIG_IGN) subprocess.Popen([sys.executable, '-c', 'print("albatross")']).wait() mypy-0.560/test-data/stdlib-samples/3.2/test/support.py0000644€tŠÔÚ€2›s®0000015476613215007205027072 0ustar jukkaDROPBOX\Domain Users00000000000000"""Supporting definitions for the Python regression tests.""" if __name__ != 'test.support': raise ImportError('support must be imported from the test package') import contextlib import errno import functools import gc import socket import sys import os import platform import shutil import warnings import unittest import importlib import collections import re import subprocess import imp import time import sysconfig import fnmatch import logging.handlers import _thread, threading from typing import Any, Dict, cast #try: # import multiprocessing.process #except ImportError: # multiprocessing = None __all__ = [ "Error", "TestFailed", "ResourceDenied", "import_module", "verbose", "use_resources", "max_memuse", "record_original_stdout", "get_original_stdout", "unload", "unlink", "rmtree", "forget", "is_resource_enabled", "requires", "requires_mac_ver", "find_unused_port", "bind_port", "fcmp", "is_jython", "TESTFN", "HOST", "FUZZ", "SAVEDCWD", "temp_cwd", "findfile", "sortdict", "check_syntax_error", "open_urlresource", "check_warnings", "CleanImport", "EnvironmentVarGuard", "TransientResource", "captured_output", "captured_stdout", "captured_stdin", "captured_stderr", "time_out", "socket_peer_reset", "ioerror_peer_reset", "run_with_locale", 'temp_umask', "transient_internet", "set_memlimit", "bigmemtest", "bigaddrspacetest", "BasicTestRunner", "run_unittest", "run_doctest", "threading_setup", "threading_cleanup", "reap_children", "cpython_only", "check_impl_detail", "get_attribute", "swap_item", "swap_attr", "requires_IEEE_754", "TestHandler", "Matcher", "can_symlink", "skip_unless_symlink", "import_fresh_module", "failfast", ] class Error(Exception): """Base class for regression test exceptions.""" class TestFailed(Error): """Test failed.""" class ResourceDenied(unittest.SkipTest): """Test skipped because it requested a disallowed resource. This is raised when a test calls requires() for a resource that has not be enabled. It is used to distinguish between expected and unexpected skips. """ @contextlib.contextmanager def _ignore_deprecated_imports(ignore=True): """Context manager to suppress package and module deprecation warnings when importing them. If ignore is False, this context manager has no effect.""" if ignore: with warnings.catch_warnings(): warnings.filterwarnings("ignore", ".+ (module|package)", DeprecationWarning) yield None else: yield None def import_module(name, deprecated=False): """Import and return the module to be tested, raising SkipTest if it is not available. If deprecated is True, any module or package deprecation messages will be suppressed.""" with _ignore_deprecated_imports(deprecated): try: return importlib.import_module(name) except ImportError as msg: raise unittest.SkipTest(str(msg)) def _save_and_remove_module(name, orig_modules): """Helper function to save and remove a module from sys.modules Raise ImportError if the module can't be imported.""" # try to import the module and raise an error if it can't be imported if name not in sys.modules: __import__(name) del sys.modules[name] for modname in list(sys.modules): if modname == name or modname.startswith(name + '.'): orig_modules[modname] = sys.modules[modname] del sys.modules[modname] def _save_and_block_module(name, orig_modules): """Helper function to save and block a module in sys.modules Return True if the module was in sys.modules, False otherwise.""" saved = True try: orig_modules[name] = sys.modules[name] except KeyError: saved = False sys.modules[name] = None return saved def import_fresh_module(name, fresh=(), blocked=(), deprecated=False): """Imports and returns a module, deliberately bypassing the sys.modules cache and importing a fresh copy of the module. Once the import is complete, the sys.modules cache is restored to its original state. Modules named in fresh are also imported anew if needed by the import. If one of these modules can't be imported, None is returned. Importing of modules named in blocked is prevented while the fresh import takes place. If deprecated is True, any module or package deprecation messages will be suppressed.""" # NOTE: test_heapq, test_json and test_warnings include extra sanity checks # to make sure that this utility function is working as expected with _ignore_deprecated_imports(deprecated): # Keep track of modules saved for later restoration as well # as those which just need a blocking entry removed orig_modules = {} names_to_remove = [] _save_and_remove_module(name, orig_modules) try: for fresh_name in fresh: _save_and_remove_module(fresh_name, orig_modules) for blocked_name in blocked: if not _save_and_block_module(blocked_name, orig_modules): names_to_remove.append(blocked_name) fresh_module = importlib.import_module(name) except ImportError: fresh_module = None finally: for orig_name, module in orig_modules.items(): sys.modules[orig_name] = module for name_to_remove in names_to_remove: del sys.modules[name_to_remove] return fresh_module def get_attribute(obj, name): """Get an attribute, raising SkipTest if AttributeError is raised.""" try: attribute = getattr(obj, name) except AttributeError: raise unittest.SkipTest("module %s has no attribute %s" % ( obj.__name__, name)) else: return attribute verbose = 1 # Flag set to 0 by regrtest.py use_resources = None # type: Any # Flag set to [] by regrtest.py max_memuse = 0 # Disable bigmem tests (they will still be run with # small sizes, to make sure they work.) real_max_memuse = 0 failfast = False match_tests = None # type: Any # _original_stdout is meant to hold stdout at the time regrtest began. # This may be "the real" stdout, or IDLE's emulation of stdout, or whatever. # The point is to have some flavor of stdout the user can actually see. _original_stdout = None # type: 'Any' def record_original_stdout(stdout): global _original_stdout _original_stdout = stdout def get_original_stdout(): return _original_stdout or sys.stdout def unload(name): try: del sys.modules[name] except KeyError: pass def unlink(filename): try: os.unlink(filename) except OSError as error: # The filename need not exist. if error.errno not in (errno.ENOENT, errno.ENOTDIR): raise def rmtree(path): try: shutil.rmtree(path) except OSError as error: # Unix returns ENOENT, Windows returns ESRCH. if error.errno not in (errno.ENOENT, errno.ESRCH): raise def make_legacy_pyc(source): """Move a PEP 3147 pyc/pyo file to its legacy pyc/pyo location. The choice of .pyc or .pyo extension is done based on the __debug__ flag value. :param source: The file system path to the source file. The source file does not need to exist, however the PEP 3147 pyc file must exist. :return: The file system path to the legacy pyc file. """ pyc_file = imp.cache_from_source(source) up_one = os.path.dirname(os.path.abspath(source)) if __debug__: ch = 'c' else: ch = 'o' legacy_pyc = os.path.join(up_one, source + ch) os.rename(pyc_file, legacy_pyc) return legacy_pyc def forget(modname): """'Forget' a module was ever imported. This removes the module from sys.modules and deletes any PEP 3147 or legacy .pyc and .pyo files. """ unload(modname) for dirname in sys.path: source = os.path.join(dirname, modname + '.py') # It doesn't matter if they exist or not, unlink all possible # combinations of PEP 3147 and legacy pyc and pyo files. unlink(source + 'c') unlink(source + 'o') unlink(imp.cache_from_source(source, debug_override=True)) unlink(imp.cache_from_source(source, debug_override=False)) # On some platforms, should not run gui test even if it is allowed # in `use_resources'. #if sys.platform.startswith('win'): #import ctypes #import ctypes.wintypes #def _is_gui_available(): # UOI_FLAGS = 1 # WSF_VISIBLE = 0x0001 # class USEROBJECTFLAGS(ctypes.Structure): # _fields_ = [("fInherit", ctypes.wintypes.BOOL), # ("fReserved", ctypes.wintypes.BOOL), # ("dwFlags", ctypes.wintypes.DWORD)] # dll = ctypes.windll.user32 # h = dll.GetProcessWindowStation() # if not h: # raise ctypes.WinError() # uof = USEROBJECTFLAGS() # needed = ctypes.wintypes.DWORD() # res = dll.GetUserObjectInformationW(h, # UOI_FLAGS, # ctypes.byref(uof), # ctypes.sizeof(uof), # ctypes.byref(needed)) # if not res: # raise ctypes.WinError() # return bool(uof.dwFlags & WSF_VISIBLE) #else: def _is_gui_available(): return True def is_resource_enabled(resource): """Test whether a resource is enabled. Known resources are set by regrtest.py.""" return use_resources is not None and resource in use_resources def requires(resource, msg=None): """Raise ResourceDenied if the specified resource is not available. If the caller's module is __main__ then automatically return True. The possibility of False being returned occurs when regrtest.py is executing. """ if resource == 'gui' and not _is_gui_available(): raise unittest.SkipTest("Cannot use the 'gui' resource") # see if the caller's module is __main__ - if so, treat as if # the resource was set if sys._getframe(1).f_globals.get("__name__") == "__main__": return if not is_resource_enabled(resource): if msg is None: msg = "Use of the `%s' resource not enabled" % resource raise ResourceDenied(msg) def requires_mac_ver(*min_version): """Decorator raising SkipTest if the OS is Mac OS X and the OS X version if less than min_version. For example, @requires_mac_ver(10, 5) raises SkipTest if the OS X version is lesser than 10.5. """ def decorator(func): @functools.wraps(func) def wrapper(*args, **kw): if sys.platform == 'darwin': version_txt = platform.mac_ver()[0] try: version = tuple(map(int, version_txt.split('.'))) except ValueError: pass else: if version < min_version: min_version_txt = '.'.join(map(str, min_version)) raise unittest.SkipTest( "Mac OS X %s or higher required, not %s" % (min_version_txt, version_txt)) return func(*args, **kw) wrapper.min_version = min_version return wrapper return decorator HOST = 'localhost' def find_unused_port(family=socket.AF_INET, socktype=socket.SOCK_STREAM): """Returns an unused port that should be suitable for binding. This is achieved by creating a temporary socket with the same family and type as the 'sock' parameter (default is AF_INET, SOCK_STREAM), and binding it to the specified host address (defaults to 0.0.0.0) with the port set to 0, eliciting an unused ephemeral port from the OS. The temporary socket is then closed and deleted, and the ephemeral port is returned. Either this method or bind_port() should be used for any tests where a server socket needs to be bound to a particular port for the duration of the test. Which one to use depends on whether the calling code is creating a python socket, or if an unused port needs to be provided in a constructor or passed to an external program (i.e. the -accept argument to openssl's s_server mode). Always prefer bind_port() over find_unused_port() where possible. Hard coded ports should *NEVER* be used. As soon as a server socket is bound to a hard coded port, the ability to run multiple instances of the test simultaneously on the same host is compromised, which makes the test a ticking time bomb in a buildbot environment. On Unix buildbots, this may simply manifest as a failed test, which can be recovered from without intervention in most cases, but on Windows, the entire python process can completely and utterly wedge, requiring someone to log in to the buildbot and manually kill the affected process. (This is easy to reproduce on Windows, unfortunately, and can be traced to the SO_REUSEADDR socket option having different semantics on Windows versus Unix/Linux. On Unix, you can't have two AF_INET SOCK_STREAM sockets bind, listen and then accept connections on identical host/ports. An EADDRINUSE socket.error will be raised at some point (depending on the platform and the order bind and listen were called on each socket). However, on Windows, if SO_REUSEADDR is set on the sockets, no EADDRINUSE will ever be raised when attempting to bind two identical host/ports. When accept() is called on each socket, the second caller's process will steal the port from the first caller, leaving them both in an awkwardly wedged state where they'll no longer respond to any signals or graceful kills, and must be forcibly killed via OpenProcess()/TerminateProcess(). The solution on Windows is to use the SO_EXCLUSIVEADDRUSE socket option instead of SO_REUSEADDR, which effectively affords the same semantics as SO_REUSEADDR on Unix. Given the propensity of Unix developers in the Open Source world compared to Windows ones, this is a common mistake. A quick look over OpenSSL's 0.9.8g source shows that they use SO_REUSEADDR when openssl.exe is called with the 's_server' option, for example. See http://bugs.python.org/issue2550 for more info. The following site also has a very thorough description about the implications of both REUSEADDR and EXCLUSIVEADDRUSE on Windows: http://msdn2.microsoft.com/en-us/library/ms740621(VS.85).aspx) XXX: although this approach is a vast improvement on previous attempts to elicit unused ports, it rests heavily on the assumption that the ephemeral port returned to us by the OS won't immediately be dished back out to some other process when we close and delete our temporary socket but before our calling code has a chance to bind the returned port. We can deal with this issue if/when we come across it. """ tempsock = socket.socket(family, socktype) port = bind_port(tempsock) tempsock.close() #del tempsock return port def bind_port(sock, host=HOST): """Bind the socket to a free port and return the port number. Relies on ephemeral ports in order to ensure we are using an unbound port. This is important as many tests may be running simultaneously, especially in a buildbot environment. This method raises an exception if the sock.family is AF_INET and sock.type is SOCK_STREAM, *and* the socket has SO_REUSEADDR or SO_REUSEPORT set on it. Tests should *never* set these socket options for TCP/IP sockets. The only case for setting these options is testing multicasting via multiple UDP sockets. Additionally, if the SO_EXCLUSIVEADDRUSE socket option is available (i.e. on Windows), it will be set on the socket. This will prevent anyone else from bind()'ing to our host/port for the duration of the test. """ if sock.family == socket.AF_INET and sock.type == socket.SOCK_STREAM: if hasattr(socket, 'SO_REUSEADDR'): if sock.getsockopt(socket.SOL_SOCKET, socket.SO_REUSEADDR) == 1: raise TestFailed("tests should never set the SO_REUSEADDR " \ "socket option on TCP/IP sockets!") if hasattr(socket, 'SO_REUSEPORT'): if sock.getsockopt(socket.SOL_SOCKET, socket.SO_REUSEPORT) == 1: raise TestFailed("tests should never set the SO_REUSEPORT " \ "socket option on TCP/IP sockets!") if hasattr(socket, 'SO_EXCLUSIVEADDRUSE'): sock.setsockopt(socket.SOL_SOCKET, socket.SO_EXCLUSIVEADDRUSE, 1) sock.bind((host, 0)) port = sock.getsockname()[1] return port FUZZ = 1e-6 def fcmp(x, y): # fuzzy comparison function if isinstance(x, float) or isinstance(y, float): try: fuzz = (abs(x) + abs(y)) * FUZZ if abs(x-y) <= fuzz: return 0 except: pass elif type(x) == type(y) and isinstance(x, (tuple, list)): for i in range(min(len(x), len(y))): outcome = fcmp(x[i], y[i]) if outcome != 0: return outcome return (len(x) > len(y)) - (len(x) < len(y)) return (x > y) - (x < y) # decorator for skipping tests on non-IEEE 754 platforms requires_IEEE_754 = unittest.skipUnless( cast(Any, float).__getformat__("double").startswith("IEEE"), "test requires IEEE 754 doubles") is_jython = sys.platform.startswith('java') TESTFN = '' # Filename used for testing if os.name == 'java': # Jython disallows @ in module names TESTFN = '$test' else: TESTFN = '@test' # Disambiguate TESTFN for parallel testing, while letting it remain a valid # module name. TESTFN = "{}_{}_tmp".format(TESTFN, os.getpid()) # TESTFN_UNICODE is a non-ascii filename TESTFN_UNICODE = TESTFN + "-\xe0\xf2\u0258\u0141\u011f" if sys.platform == 'darwin': # In Mac OS X's VFS API file names are, by definition, canonically # decomposed Unicode, encoded using UTF-8. See QA1173: # http://developer.apple.com/mac/library/qa/qa2001/qa1173.html import unicodedata TESTFN_UNICODE = unicodedata.normalize('NFD', TESTFN_UNICODE) TESTFN_ENCODING = sys.getfilesystemencoding() # TESTFN_UNENCODABLE is a filename (str type) that should *not* be able to be # encoded by the filesystem encoding (in strict mode). It can be None if we # cannot generate such filename. TESTFN_UNENCODABLE = None # type: Any if os.name in ('nt', 'ce'): # skip win32s (0) or Windows 9x/ME (1) if sys.getwindowsversion().platform >= 2: # Different kinds of characters from various languages to minimize the # probability that the whole name is encodable to MBCS (issue #9819) TESTFN_UNENCODABLE = TESTFN + "-\u5171\u0141\u2661\u0363\uDC80" try: TESTFN_UNENCODABLE.encode(TESTFN_ENCODING) except UnicodeEncodeError: pass else: print('WARNING: The filename %r CAN be encoded by the filesystem encoding (%s). ' 'Unicode filename tests may not be effective' % (TESTFN_UNENCODABLE, TESTFN_ENCODING)) TESTFN_UNENCODABLE = None # Mac OS X denies unencodable filenames (invalid utf-8) elif sys.platform != 'darwin': try: # ascii and utf-8 cannot encode the byte 0xff b'\xff'.decode(TESTFN_ENCODING) except UnicodeDecodeError: # 0xff will be encoded using the surrogate character u+DCFF TESTFN_UNENCODABLE = TESTFN \ + b'-\xff'.decode(TESTFN_ENCODING, 'surrogateescape') else: # File system encoding (eg. ISO-8859-* encodings) can encode # the byte 0xff. Skip some unicode filename tests. pass # Save the initial cwd SAVEDCWD = os.getcwd() @contextlib.contextmanager def temp_cwd(name='tempcwd', quiet=False, path=None): """ Context manager that temporarily changes the CWD. An existing path may be provided as *path*, in which case this function makes no changes to the file system. Otherwise, the new CWD is created in the current directory and it's named *name*. If *quiet* is False (default) and it's not possible to create or change the CWD, an error is raised. If it's True, only a warning is raised and the original CWD is used. """ saved_dir = os.getcwd() is_temporary = False if path is None: path = name try: os.mkdir(name) is_temporary = True except OSError: if not quiet: raise warnings.warn('tests may fail, unable to create temp CWD ' + name, RuntimeWarning, stacklevel=3) try: os.chdir(path) except OSError: if not quiet: raise warnings.warn('tests may fail, unable to change the CWD to ' + name, RuntimeWarning, stacklevel=3) try: yield os.getcwd() finally: os.chdir(saved_dir) if is_temporary: rmtree(name) @contextlib.contextmanager def temp_umask(umask): """Context manager that temporarily sets the process umask.""" oldmask = os.umask(umask) try: yield None finally: os.umask(oldmask) def findfile(file, here=__file__, subdir=None): """Try to find a file on sys.path and the working directory. If it is not found the argument passed to the function is returned (this does not necessarily signal failure; could still be the legitimate path).""" if os.path.isabs(file): return file if subdir is not None: file = os.path.join(subdir, file) path = sys.path path = [os.path.dirname(here)] + path for dn in path: fn = os.path.join(dn, file) if os.path.exists(fn): return fn return file def sortdict(dict): "Like repr(dict), but in sorted order." items = sorted(dict.items()) reprpairs = ["%r: %r" % pair for pair in items] withcommas = ", ".join(reprpairs) return "{%s}" % withcommas def make_bad_fd(): """ Create an invalid file descriptor by opening and closing a file and return its fd. """ file = open(TESTFN, "wb") try: return file.fileno() finally: file.close() unlink(TESTFN) def check_syntax_error(testcase, statement): raise NotImplementedError('no compile built-in') #testcase.assertRaises(SyntaxError, compile, statement, # '', 'exec') def open_urlresource(url, *args, **kw): from urllib import request, parse check = kw.pop('check', None) filename = parse.urlparse(url)[2].split('/')[-1] # '/': it's URL! fn = os.path.join(os.path.dirname(__file__), "data", filename) def check_valid_file(fn): f = open(fn, *args, **kw) if check is None: return f elif check(f): f.seek(0) return f f.close() if os.path.exists(fn): f = check_valid_file(fn) if f is not None: return f unlink(fn) # Verify the requirement before downloading the file requires('urlfetch') print('\tfetching %s ...' % url, file=get_original_stdout()) f = request.urlopen(url, timeout=15) try: with open(fn, "wb") as out: s = f.read() while s: out.write(s) s = f.read() finally: f.close() f = check_valid_file(fn) if f is not None: return f raise TestFailed('invalid resource "%s"' % fn) class WarningsRecorder(object): """Convenience wrapper for the warnings list returned on entry to the warnings.catch_warnings() context manager. """ def __init__(self, warnings_list): self._warnings = warnings_list self._last = 0 def __getattr__(self, attr): if len(self._warnings) > self._last: return getattr(self._warnings[-1], attr) elif attr in warnings.WarningMessage._WARNING_DETAILS: return None raise AttributeError("%r has no attribute %r" % (self, attr)) #@property #def warnings(self): # return self._warnings[self._last:] def reset(self): self._last = len(self._warnings) def _filterwarnings(filters, quiet=False): """Catch the warnings, then check if all the expected warnings have been raised and re-raise unexpected warnings. If 'quiet' is True, only re-raise the unexpected warnings. """ # Clear the warning registry of the calling module # in order to re-raise the warnings. frame = sys._getframe(2) registry = frame.f_globals.get('__warningregistry__') if registry: registry.clear() with warnings.catch_warnings(record=True) as w: # Set filter "always" to record all warnings. Because # test_warnings swap the module, we need to look up in # the sys.modules dictionary. sys.modules['warnings'].simplefilter("always") yield WarningsRecorder(w) # Filter the recorded warnings reraise = list(w) missing = [] for msg, cat in filters: seen = False for w in reraise[:]: warning = w.message # Filter out the matching messages if (re.match(msg, str(warning), re.I) and issubclass(warning.__class__, cat)): seen = True reraise.remove(w) if not seen and not quiet: # This filter caught nothing missing.append((msg, cat.__name__)) if reraise: raise AssertionError("unhandled warning %s" % reraise[0]) if missing: raise AssertionError("filter (%r, %s) did not catch any warning" % missing[0]) @contextlib.contextmanager def check_warnings(*filters, **kwargs): """Context manager to silence warnings. Accept 2-tuples as positional arguments: ("message regexp", WarningCategory) Optional argument: - if 'quiet' is True, it does not fail if a filter catches nothing (default True without argument, default False if some filters are defined) Without argument, it defaults to: check_warnings(("", Warning), quiet=True) """ quiet = kwargs.get('quiet') if not filters: filters = (("", Warning),) # Preserve backward compatibility if quiet is None: quiet = True return _filterwarnings(filters, quiet) class CleanImport(object): """Context manager to force import to return a new module reference. This is useful for testing module-level behaviours, such as the emission of a DeprecationWarning on import. Use like this: with CleanImport("foo"): importlib.import_module("foo") # new reference """ def __init__(self, *module_names): self.original_modules = sys.modules.copy() for module_name in module_names: if module_name in sys.modules: module = sys.modules[module_name] # It is possible that module_name is just an alias for # another module (e.g. stub for modules renamed in 3.x). # In that case, we also need delete the real module to clear # the import cache. if module.__name__ != module_name: del sys.modules[module.__name__] del sys.modules[module_name] def __enter__(self): return self def __exit__(self, *ignore_exc): sys.modules.update(self.original_modules) class EnvironmentVarGuard(dict): """Class to help protect the environment variable properly. Can be used as a context manager.""" def __init__(self): self._environ = os.environ self._changed = {} def __getitem__(self, envvar): return self._environ[envvar] def __setitem__(self, envvar, value): # Remember the initial value on the first access if envvar not in self._changed: self._changed[envvar] = self._environ.get(envvar) self._environ[envvar] = value def __delitem__(self, envvar): # Remember the initial value on the first access if envvar not in self._changed: self._changed[envvar] = self._environ.get(envvar) if envvar in self._environ: del self._environ[envvar] def keys(self): return self._environ.keys() def __iter__(self): return iter(self._environ) def __len__(self): return len(self._environ) def set(self, envvar, value): self[envvar] = value def unset(self, envvar): del self[envvar] def __enter__(self): return self def __exit__(self, *ignore_exc): for k, v in self._changed.items(): if v is None: if k in self._environ: del self._environ[k] else: self._environ[k] = v os.environ = self._environ class DirsOnSysPath(object): """Context manager to temporarily add directories to sys.path. This makes a copy of sys.path, appends any directories given as positional arguments, then reverts sys.path to the copied settings when the context ends. Note that *all* sys.path modifications in the body of the context manager, including replacement of the object, will be reverted at the end of the block. """ def __init__(self, *paths): self.original_value = sys.path[:] self.original_object = sys.path sys.path.extend(paths) def __enter__(self): return self def __exit__(self, *ignore_exc): sys.path = self.original_object sys.path[:] = self.original_value class TransientResource(object): """Raise ResourceDenied if an exception is raised while the context manager is in effect that matches the specified exception and attributes.""" def __init__(self, exc, **kwargs): self.exc = exc self.attrs = kwargs def __enter__(self): return self def __exit__(self, type_=None, value=None, traceback=None): """If type_ is a subclass of self.exc and value has attributes matching self.attrs, raise ResourceDenied. Otherwise let the exception propagate (if any).""" if type_ is not None and issubclass(self.exc, type_): for attr, attr_value in self.attrs.items(): if not hasattr(value, attr): break if getattr(value, attr) != attr_value: break else: raise ResourceDenied("an optional resource is not available") # Context managers that raise ResourceDenied when various issues # with the Internet connection manifest themselves as exceptions. # XXX deprecate these and use transient_internet() instead time_out = TransientResource(IOError, errno=errno.ETIMEDOUT) socket_peer_reset = TransientResource(socket.error, errno=errno.ECONNRESET) ioerror_peer_reset = TransientResource(IOError, errno=errno.ECONNRESET) @contextlib.contextmanager def transient_internet(resource_name, *, timeout=30.0, errnos=()): """Return a context manager that raises ResourceDenied when various issues with the Internet connection manifest themselves as exceptions.""" default_errnos = [ ('ECONNREFUSED', 111), ('ECONNRESET', 104), ('EHOSTUNREACH', 113), ('ENETUNREACH', 101), ('ETIMEDOUT', 110), ] default_gai_errnos = [ ('EAI_AGAIN', -3), ('EAI_FAIL', -4), ('EAI_NONAME', -2), ('EAI_NODATA', -5), # Encountered when trying to resolve IPv6-only hostnames ('WSANO_DATA', 11004), ] denied = ResourceDenied("Resource '%s' is not available" % resource_name) captured_errnos = errnos gai_errnos = [] if not captured_errnos: captured_errnos = [getattr(errno, name, num) for name, num in default_errnos] gai_errnos = [getattr(socket, name, num) for name, num in default_gai_errnos] def filter_error(err): n = getattr(err, 'errno', None) if (isinstance(err, socket.timeout) or (isinstance(err, socket.gaierror) and n in gai_errnos) or n in captured_errnos): if not verbose: sys.stderr.write(denied.args[0] + "\n") raise denied from err old_timeout = socket.getdefaulttimeout() try: if timeout is not None: socket.setdefaulttimeout(timeout) yield None except IOError as err: # urllib can wrap original socket errors multiple times (!), we must # unwrap to get at the original error. while True: a = err.args if len(a) >= 1 and isinstance(a[0], IOError): err = a[0] # The error can also be wrapped as args[1]: # except socket.error as msg: # raise IOError('socket error', msg).with_traceback(sys.exc_info()[2]) elif len(a) >= 2 and isinstance(a[1], IOError): err = a[1] else: break filter_error(err) raise # XXX should we catch generic exceptions and look for their # __cause__ or __context__? finally: socket.setdefaulttimeout(old_timeout) @contextlib.contextmanager def captured_output(stream_name): """Return a context manager used by captured_stdout/stdin/stderr that temporarily replaces the sys stream *stream_name* with a StringIO.""" import io orig_stdout = getattr(sys, stream_name) setattr(sys, stream_name, io.StringIO()) try: yield getattr(sys, stream_name) finally: setattr(sys, stream_name, orig_stdout) def captured_stdout(): """Capture the output of sys.stdout: with captured_stdout() as s: print("hello") self.assertEqual(s.getvalue(), "hello") """ return captured_output("stdout") def captured_stderr(): return captured_output("stderr") def captured_stdin(): return captured_output("stdin") def gc_collect(): """Force as many objects as possible to be collected. In non-CPython implementations of Python, this is needed because timely deallocation is not guaranteed by the garbage collector. (Even in CPython this can be the case in case of reference cycles.) This means that __del__ methods may be called later than expected and weakrefs may remain alive for longer than expected. This function tries its best to force all garbage objects to disappear. """ gc.collect() if is_jython: time.sleep(0.1) gc.collect() gc.collect() def python_is_optimized(): """Find if Python was built with optimizations.""" cflags = sysconfig.get_config_var('PY_CFLAGS') or '' final_opt = "" for opt in cflags.split(): if opt.startswith('-O'): final_opt = opt return final_opt and final_opt != '-O0' #======================================================================= # Decorator for running a function in a different locale, correctly resetting # it afterwards. def run_with_locale(catstr, *locales): def decorator(func): def inner(*args, **kwds): try: import locale category = getattr(locale, catstr) orig_locale = locale.setlocale(category) except AttributeError: # if the test author gives us an invalid category string raise except: # cannot retrieve original locale, so do nothing locale = orig_locale = None else: for loc in locales: try: locale.setlocale(category, loc) break except: pass # now run the function, resetting the locale on exceptions try: return func(*args, **kwds) finally: if locale and orig_locale: locale.setlocale(category, orig_locale) inner.__name__ = func.__name__ inner.__doc__ = func.__doc__ return inner return decorator #======================================================================= # Big-memory-test support. Separate from 'resources' because memory use # should be configurable. # Some handy shorthands. Note that these are used for byte-limits as well # as size-limits, in the various bigmem tests _1M = 1024*1024 _1G = 1024 * _1M _2G = 2 * _1G _4G = 4 * _1G MAX_Py_ssize_t = sys.maxsize def set_memlimit(limit): global max_memuse global real_max_memuse sizes = { 'k': 1024, 'm': _1M, 'g': _1G, 't': 1024*_1G, } m = re.match(r'(\d+(\.\d+)?) (K|M|G|T)b?$', limit, re.IGNORECASE | re.VERBOSE) if m is None: raise ValueError('Invalid memory limit %r' % (limit,)) memlimit = int(float(m.group(1)) * sizes[m.group(3).lower()]) real_max_memuse = memlimit if memlimit > MAX_Py_ssize_t: memlimit = MAX_Py_ssize_t if memlimit < _2G - 1: raise ValueError('Memory limit %r too low to be useful' % (limit,)) max_memuse = memlimit def _memory_watchdog(start_evt, finish_evt, period=10.0): """A function which periodically watches the process' memory consumption and prints it out. """ # XXX: because of the GIL, and because the very long operations tested # in most bigmem tests are uninterruptible, the loop below gets woken up # much less often than expected. # The polling code should be rewritten in raw C, without holding the GIL, # and push results onto an anonymous pipe. try: page_size = os.sysconf('SC_PAGESIZE') except (ValueError, AttributeError): try: page_size = os.sysconf('SC_PAGE_SIZE') except (ValueError, AttributeError): page_size = 4096 procfile = '/proc/{pid}/statm'.format(pid=os.getpid()) try: f = open(procfile, 'rb') except IOError as e: warnings.warn('/proc not available for stats: {}'.format(e), RuntimeWarning) sys.stderr.flush() return with f: start_evt.set() old_data = -1 while not finish_evt.wait(period): f.seek(0) statm = f.read().decode('ascii') data = int(statm.split()[5]) if data != old_data: old_data = data print(" ... process data size: {data:.1f}G" .format(data=data * page_size / (1024 ** 3))) def bigmemtest(size, memuse, dry_run=True): """Decorator for bigmem tests. 'minsize' is the minimum useful size for the test (in arbitrary, test-interpreted units.) 'memuse' is the number of 'bytes per size' for the test, or a good estimate of it. if 'dry_run' is False, it means the test doesn't support dummy runs when -M is not specified. """ def decorator(f): def wrapper(self): size = wrapper.size memuse = wrapper.memuse if not real_max_memuse: maxsize = 5147 else: maxsize = size if ((real_max_memuse or not dry_run) and real_max_memuse < maxsize * memuse): raise unittest.SkipTest( "not enough memory: %.1fG minimum needed" % (size * memuse / (1024 ** 3))) if real_max_memuse and verbose and threading: print() print(" ... expected peak memory use: {peak:.1f}G" .format(peak=size * memuse / (1024 ** 3))) sys.stdout.flush() start_evt = threading.Event() finish_evt = threading.Event() t = threading.Thread(target=_memory_watchdog, args=(start_evt, finish_evt, 0.5)) t.daemon = True t.start() start_evt.set() else: t = None try: return f(self, maxsize) finally: if t: finish_evt.set() t.join() wrapper.size = size wrapper.memuse = memuse return wrapper return decorator def bigaddrspacetest(f): """Decorator for tests that fill the address space.""" def wrapper(self): if max_memuse < MAX_Py_ssize_t: if MAX_Py_ssize_t >= 2**63 - 1 and max_memuse >= 2**31: raise unittest.SkipTest( "not enough memory: try a 32-bit build instead") else: raise unittest.SkipTest( "not enough memory: %.1fG minimum needed" % (MAX_Py_ssize_t / (1024 ** 3))) else: return f(self) return wrapper #======================================================================= # unittest integration. class BasicTestRunner: def run(self, test): result = unittest.TestResult() test(result) return result def _id(obj): return obj def requires_resource(resource): if resource == 'gui' and not _is_gui_available(): return unittest.skip("resource 'gui' is not available") if is_resource_enabled(resource): return _id else: return unittest.skip("resource {0!r} is not enabled".format(resource)) def cpython_only(test): """ Decorator for tests only applicable on CPython. """ return impl_detail(cpython=True)(test) def impl_detail(msg=None, **guards): if check_impl_detail(**guards): return _id if msg is None: guardnames, default = _parse_guards(guards) if default: msg = "implementation detail not available on {0}" else: msg = "implementation detail specific to {0}" guardnames = sorted(guardnames.keys()) msg = msg.format(' or '.join(guardnames)) return unittest.skip(msg) def _parse_guards(guards): # Returns a tuple ({platform_name: run_me}, default_value) if not guards: return ({'cpython': True}, False) is_true = list(guards.values())[0] assert list(guards.values()) == [is_true] * len(guards) # all True or all False return (guards, not is_true) # Use the following check to guard CPython's implementation-specific tests -- # or to run them only on the implementation(s) guarded by the arguments. def check_impl_detail(**guards): """This function returns True or False depending on the host platform. Examples: if check_impl_detail(): # only on CPython (default) if check_impl_detail(jython=True): # only on Jython if check_impl_detail(cpython=False): # everywhere except on CPython """ guards, default = _parse_guards(guards) return guards.get(platform.python_implementation().lower(), default) def _filter_suite(suite, pred): """Recursively filter test cases in a suite based on a predicate.""" newtests = [] for test in suite._tests: if isinstance(test, unittest.TestSuite): _filter_suite(test, pred) newtests.append(test) else: if pred(test): newtests.append(test) suite._tests = newtests def _run_suite(suite): """Run tests from a unittest.TestSuite-derived class.""" if verbose: runner = unittest.TextTestRunner(sys.stdout, verbosity=2, failfast=failfast) else: runner = BasicTestRunner() result = runner.run(suite) if not result.wasSuccessful(): if len(result.errors) == 1 and not result.failures: err = result.errors[0][1] elif len(result.failures) == 1 and not result.errors: err = result.failures[0][1] else: err = "multiple errors occurred" if not verbose: err += "; run in verbose mode for details" raise TestFailed(err) def run_unittest(*classes): """Run tests from unittest.TestCase-derived classes.""" valid_types = (unittest.TestSuite, unittest.TestCase) suite = unittest.TestSuite() for cls in classes: if isinstance(cls, str): if cls in sys.modules: suite.addTest(unittest.findTestCases(sys.modules[cls])) else: raise ValueError("str arguments must be keys in sys.modules") elif isinstance(cls, valid_types): suite.addTest(cls) else: suite.addTest(unittest.makeSuite(cls)) def case_pred(test): if match_tests is None: return True for name in test.id().split("."): if fnmatch.fnmatchcase(name, match_tests): return True return False _filter_suite(suite, case_pred) _run_suite(suite) #======================================================================= # doctest driver. def run_doctest(module, verbosity=None): """Run doctest on the given module. Return (#failures, #tests). If optional argument verbosity is not specified (or is None), pass support's belief about verbosity on to doctest. Else doctest's usual behavior is used (it searches sys.argv for -v). """ import doctest if verbosity is None: verbosity = verbose else: verbosity = None f, t = doctest.testmod(module, verbose=verbosity) if f: raise TestFailed("%d of %d doctests failed" % (f, t)) if verbose: print('doctest (%s) ... %d tests with zero failures' % (module.__name__, t)) return f, t #======================================================================= # Support for saving and restoring the imported modules. def modules_setup(): return sys.modules.copy(), def modules_cleanup(oldmodules): # Encoders/decoders are registered permanently within the internal # codec cache. If we destroy the corresponding modules their # globals will be set to None which will trip up the cached functions. encodings = [(k, v) for k, v in sys.modules.items() if k.startswith('encodings.')] sys.modules.clear() sys.modules.update(encodings) # XXX: This kind of problem can affect more than just encodings. In particular # extension modules (such as _ssl) don't cope with reloading properly. # Really, test modules should be cleaning out the test specific modules they # know they added (ala test_runpy) rather than relying on this function (as # test_importhooks and test_pkg do currently). # Implicitly imported *real* modules should be left alone (see issue 10556). sys.modules.update(oldmodules) #======================================================================= # Threading support to prevent reporting refleaks when running regrtest.py -R # NOTE: we use thread._count() rather than threading.enumerate() (or the # moral equivalent thereof) because a threading.Thread object is still alive # until its __bootstrap() method has returned, even after it has been # unregistered from the threading module. # thread._count(), on the other hand, only gets decremented *after* the # __bootstrap() method has returned, which gives us reliable reference counts # at the end of a test run. def threading_setup(): if _thread: return _thread._count(), threading._dangling.copy() else: return 1, () def threading_cleanup(*original_values): if not _thread: return _MAX_COUNT = 10 for count in range(_MAX_COUNT): values = _thread._count(), threading._dangling if values == original_values: break time.sleep(0.1) gc_collect() # XXX print a warning in case of failure? def reap_threads(func): """Use this function when threads are being used. This will ensure that the threads are cleaned up even when the test fails. If threading is unavailable this function does nothing. """ if not _thread: return func @functools.wraps(func) def decorator(*args): key = threading_setup() try: return func(*args) finally: threading_cleanup(*key) return decorator def reap_children(): """Use this function at the end of test_main() whenever sub-processes are started. This will help ensure that no extra children (zombies) stick around to hog resources and create problems when looking for refleaks. """ # Reap all our dead child processes so we don't leave zombies around. # These hog resources and might be causing some of the buildbots to die. if hasattr(os, 'waitpid'): any_process = -1 while True: try: # This will raise an exception on Windows. That's ok. pid, status = os.waitpid(any_process, os.WNOHANG) if pid == 0: break except: break @contextlib.contextmanager def swap_attr(obj, attr, new_val): """Temporary swap out an attribute with a new object. Usage: with swap_attr(obj, "attr", 5): ... This will set obj.attr to 5 for the duration of the with: block, restoring the old value at the end of the block. If `attr` doesn't exist on `obj`, it will be created and then deleted at the end of the block. """ if hasattr(obj, attr): real_val = getattr(obj, attr) setattr(obj, attr, new_val) try: yield None finally: setattr(obj, attr, real_val) else: setattr(obj, attr, new_val) try: yield None finally: delattr(obj, attr) @contextlib.contextmanager def swap_item(obj, item, new_val): """Temporary swap out an item with a new object. Usage: with swap_item(obj, "item", 5): ... This will set obj["item"] to 5 for the duration of the with: block, restoring the old value at the end of the block. If `item` doesn't exist on `obj`, it will be created and then deleted at the end of the block. """ if item in obj: real_val = obj[item] obj[item] = new_val try: yield None finally: obj[item] = real_val else: obj[item] = new_val try: yield None finally: del obj[item] def strip_python_stderr(stderr): """Strip the stderr of a Python process from potential debug output emitted by the interpreter. This will typically be run on the result of the communicate() method of a subprocess.Popen object. """ stderr = re.sub(br"\[\d+ refs\]\r?\n?$", b"", stderr).strip() return stderr def args_from_interpreter_flags(): """Return a list of command-line arguments reproducing the current settings in sys.flags.""" flag_opt_map = { 'bytes_warning': 'b', 'dont_write_bytecode': 'B', 'hash_randomization': 'R', 'ignore_environment': 'E', 'no_user_site': 's', 'no_site': 'S', 'optimize': 'O', 'verbose': 'v', } args = [] for flag, opt in flag_opt_map.items(): v = getattr(sys.flags, flag) if v > 0: args.append('-' + opt * v) return args #============================================================ # Support for assertions about logging. #============================================================ class TestHandler(logging.handlers.BufferingHandler): def __init__(self, matcher): # BufferingHandler takes a "capacity" argument # so as to know when to flush. As we're overriding # shouldFlush anyway, we can set a capacity of zero. # You can call flush() manually to clear out the # buffer. logging.handlers.BufferingHandler.__init__(self, 0) self.matcher = matcher def shouldFlush(self, record): return False def emit(self, record): self.format(record) self.buffer.append(record.__dict__) def matches(self, **kwargs): """ Look for a saved dict whose keys/values match the supplied arguments. """ result = False for d in self.buffer: if self.matcher.matches(d, **kwargs): result = True break return result class Matcher(object): _partial_matches = ('msg', 'message') def matches(self, d, **kwargs): """ Try to match a single dict with the supplied arguments. Keys whose values are strings and which are in self._partial_matches will be checked for partial (i.e. substring) matches. You can extend this scheme to (for example) do regular expression matching, etc. """ result = True for k in kwargs: v = kwargs[k] dv = d.get(k) if not self.match_value(k, dv, v): result = False break return result def match_value(self, k, dv, v): """ Try to match a single stored value (dv) with a supplied value (v). """ if type(v) != type(dv): result = False elif type(dv) is not str or k not in self._partial_matches: result = (v == dv) else: result = dv.find(v) >= 0 return result _can_symlink = None # type: Any def can_symlink(): global _can_symlink if _can_symlink is not None: return _can_symlink symlink_path = TESTFN + "can_symlink" try: os.symlink(TESTFN, symlink_path) can = True except (OSError, NotImplementedError, AttributeError): can = False else: os.remove(symlink_path) _can_symlink = can return can def skip_unless_symlink(test): """Skip decorator for tests that require functional symlink""" ok = can_symlink() msg = "Requires functional symlink implementation" if ok: return test else: return unittest.skip(msg)(test) def patch(test_instance, object_to_patch, attr_name, new_value): """Override 'object_to_patch'.'attr_name' with 'new_value'. Also, add a cleanup procedure to 'test_instance' to restore 'object_to_patch' value for 'attr_name'. The 'attr_name' should be a valid attribute for 'object_to_patch'. """ # check that 'attr_name' is a real attribute for 'object_to_patch' # will raise AttributeError if it does not exist getattr(object_to_patch, attr_name) # keep a copy of the old value attr_is_local = False try: old_value = object_to_patch.__dict__[attr_name] except (AttributeError, KeyError): old_value = getattr(object_to_patch, attr_name, None) else: attr_is_local = True # restore the value when the test is done def cleanup(): if attr_is_local: setattr(object_to_patch, attr_name, old_value) else: delattr(object_to_patch, attr_name) test_instance.addCleanup(cleanup) # actually override the attribute setattr(object_to_patch, attr_name, new_value) mypy-0.560/test-data/stdlib-samples/3.2/test/test_base64.py0000644€tŠÔÚ€2›s®0000002703713215007205027467 0ustar jukkaDROPBOX\Domain Users00000000000000import unittest from test import support import base64 import binascii import sys import subprocess from typing import Any class LegacyBase64TestCase(unittest.TestCase): def test_encodebytes(self) -> None: eq = self.assertEqual eq(base64.encodebytes(b"www.python.org"), b"d3d3LnB5dGhvbi5vcmc=\n") eq(base64.encodebytes(b"a"), b"YQ==\n") eq(base64.encodebytes(b"ab"), b"YWI=\n") eq(base64.encodebytes(b"abc"), b"YWJj\n") eq(base64.encodebytes(b""), b"") eq(base64.encodebytes(b"abcdefghijklmnopqrstuvwxyz" b"ABCDEFGHIJKLMNOPQRSTUVWXYZ" b"0123456789!@#0^&*();:<>,. []{}"), b"YWJjZGVmZ2hpamtsbW5vcHFyc3R1dnd4eXpBQkNE" b"RUZHSElKS0xNTk9QUVJTVFVWV1hZWjAxMjM0\nNT" b"Y3ODkhQCMwXiYqKCk7Ojw+LC4gW117fQ==\n") self.assertRaises(TypeError, base64.encodebytes, "") def test_decodebytes(self) -> None: eq = self.assertEqual eq(base64.decodebytes(b"d3d3LnB5dGhvbi5vcmc=\n"), b"www.python.org") eq(base64.decodebytes(b"YQ==\n"), b"a") eq(base64.decodebytes(b"YWI=\n"), b"ab") eq(base64.decodebytes(b"YWJj\n"), b"abc") eq(base64.decodebytes(b"YWJjZGVmZ2hpamtsbW5vcHFyc3R1dnd4eXpBQkNE" b"RUZHSElKS0xNTk9QUVJTVFVWV1hZWjAxMjM0\nNT" b"Y3ODkhQCMwXiYqKCk7Ojw+LC4gW117fQ==\n"), b"abcdefghijklmnopqrstuvwxyz" b"ABCDEFGHIJKLMNOPQRSTUVWXYZ" b"0123456789!@#0^&*();:<>,. []{}") eq(base64.decodebytes(b''), b'') self.assertRaises(TypeError, base64.decodebytes, "") def test_encode(self) -> None: eq = self.assertEqual from io import BytesIO infp = BytesIO(b'abcdefghijklmnopqrstuvwxyz' b'ABCDEFGHIJKLMNOPQRSTUVWXYZ' b'0123456789!@#0^&*();:<>,. []{}') outfp = BytesIO() base64.encode(infp, outfp) eq(outfp.getvalue(), b'YWJjZGVmZ2hpamtsbW5vcHFyc3R1dnd4eXpBQkNE' b'RUZHSElKS0xNTk9QUVJTVFVWV1hZWjAxMjM0\nNT' b'Y3ODkhQCMwXiYqKCk7Ojw+LC4gW117fQ==\n') def test_decode(self) -> None: from io import BytesIO infp = BytesIO(b'd3d3LnB5dGhvbi5vcmc=') outfp = BytesIO() base64.decode(infp, outfp) self.assertEqual(outfp.getvalue(), b'www.python.org') class BaseXYTestCase(unittest.TestCase): def test_b64encode(self) -> None: eq = self.assertEqual # Test default alphabet eq(base64.b64encode(b"www.python.org"), b"d3d3LnB5dGhvbi5vcmc=") eq(base64.b64encode(b'\x00'), b'AA==') eq(base64.b64encode(b"a"), b"YQ==") eq(base64.b64encode(b"ab"), b"YWI=") eq(base64.b64encode(b"abc"), b"YWJj") eq(base64.b64encode(b""), b"") eq(base64.b64encode(b"abcdefghijklmnopqrstuvwxyz" b"ABCDEFGHIJKLMNOPQRSTUVWXYZ" b"0123456789!@#0^&*();:<>,. []{}"), b"YWJjZGVmZ2hpamtsbW5vcHFyc3R1dnd4eXpBQkNE" b"RUZHSElKS0xNTk9QUVJTVFVWV1hZWjAxMjM0NT" b"Y3ODkhQCMwXiYqKCk7Ojw+LC4gW117fQ==") # Test with arbitrary alternative characters eq(base64.b64encode(b'\xd3V\xbeo\xf7\x1d', altchars=b'*$'), b'01a*b$cd') # Check if passing a str object raises an error self.assertRaises(TypeError, base64.b64encode, "") self.assertRaises(TypeError, base64.b64encode, b"", altchars="") # Test standard alphabet eq(base64.standard_b64encode(b"www.python.org"), b"d3d3LnB5dGhvbi5vcmc=") eq(base64.standard_b64encode(b"a"), b"YQ==") eq(base64.standard_b64encode(b"ab"), b"YWI=") eq(base64.standard_b64encode(b"abc"), b"YWJj") eq(base64.standard_b64encode(b""), b"") eq(base64.standard_b64encode(b"abcdefghijklmnopqrstuvwxyz" b"ABCDEFGHIJKLMNOPQRSTUVWXYZ" b"0123456789!@#0^&*();:<>,. []{}"), b"YWJjZGVmZ2hpamtsbW5vcHFyc3R1dnd4eXpBQkNE" b"RUZHSElKS0xNTk9QUVJTVFVWV1hZWjAxMjM0NT" b"Y3ODkhQCMwXiYqKCk7Ojw+LC4gW117fQ==") # Check if passing a str object raises an error self.assertRaises(TypeError, base64.standard_b64encode, "") self.assertRaises(TypeError, base64.standard_b64encode, b"", altchars="") # Test with 'URL safe' alternative characters eq(base64.urlsafe_b64encode(b'\xd3V\xbeo\xf7\x1d'), b'01a-b_cd') # Check if passing a str object raises an error self.assertRaises(TypeError, base64.urlsafe_b64encode, "") def test_b64decode(self) -> None: eq = self.assertEqual eq(base64.b64decode(b"d3d3LnB5dGhvbi5vcmc="), b"www.python.org") eq(base64.b64decode(b'AA=='), b'\x00') eq(base64.b64decode(b"YQ=="), b"a") eq(base64.b64decode(b"YWI="), b"ab") eq(base64.b64decode(b"YWJj"), b"abc") eq(base64.b64decode(b"YWJjZGVmZ2hpamtsbW5vcHFyc3R1dnd4eXpBQkNE" b"RUZHSElKS0xNTk9QUVJTVFVWV1hZWjAxMjM0\nNT" b"Y3ODkhQCMwXiYqKCk7Ojw+LC4gW117fQ=="), b"abcdefghijklmnopqrstuvwxyz" b"ABCDEFGHIJKLMNOPQRSTUVWXYZ" b"0123456789!@#0^&*();:<>,. []{}") eq(base64.b64decode(b''), b'') # Test with arbitrary alternative characters eq(base64.b64decode(b'01a*b$cd', altchars=b'*$'), b'\xd3V\xbeo\xf7\x1d') # Check if passing a str object raises an error self.assertRaises(TypeError, base64.b64decode, "") self.assertRaises(TypeError, base64.b64decode, b"", altchars="") # Test standard alphabet eq(base64.standard_b64decode(b"d3d3LnB5dGhvbi5vcmc="), b"www.python.org") eq(base64.standard_b64decode(b"YQ=="), b"a") eq(base64.standard_b64decode(b"YWI="), b"ab") eq(base64.standard_b64decode(b"YWJj"), b"abc") eq(base64.standard_b64decode(b""), b"") eq(base64.standard_b64decode(b"YWJjZGVmZ2hpamtsbW5vcHFyc3R1dnd4eXpBQkNE" b"RUZHSElKS0xNTk9QUVJTVFVWV1hZWjAxMjM0NT" b"Y3ODkhQCMwXiYqKCk7Ojw+LC4gW117fQ=="), b"abcdefghijklmnopqrstuvwxyz" b"ABCDEFGHIJKLMNOPQRSTUVWXYZ" b"0123456789!@#0^&*();:<>,. []{}") # Check if passing a str object raises an error self.assertRaises(TypeError, base64.standard_b64decode, "") self.assertRaises(TypeError, base64.standard_b64decode, b"", altchars="") # Test with 'URL safe' alternative characters eq(base64.urlsafe_b64decode(b'01a-b_cd'), b'\xd3V\xbeo\xf7\x1d') self.assertRaises(TypeError, base64.urlsafe_b64decode, "") def test_b64decode_padding_error(self) -> None: self.assertRaises(binascii.Error, base64.b64decode, b'abc') def test_b64decode_invalid_chars(self) -> None: # issue 1466065: Test some invalid characters. tests = ((b'%3d==', b'\xdd'), (b'$3d==', b'\xdd'), (b'[==', b''), (b'YW]3=', b'am'), (b'3{d==', b'\xdd'), (b'3d}==', b'\xdd'), (b'@@', b''), (b'!', b''), (b'YWJj\nYWI=', b'abcab')) for bstr, res in tests: self.assertEqual(base64.b64decode(bstr), res) with self.assertRaises(binascii.Error): base64.b64decode(bstr, validate=True) def test_b32encode(self) -> None: eq = self.assertEqual eq(base64.b32encode(b''), b'') eq(base64.b32encode(b'\x00'), b'AA======') eq(base64.b32encode(b'a'), b'ME======') eq(base64.b32encode(b'ab'), b'MFRA====') eq(base64.b32encode(b'abc'), b'MFRGG===') eq(base64.b32encode(b'abcd'), b'MFRGGZA=') eq(base64.b32encode(b'abcde'), b'MFRGGZDF') self.assertRaises(TypeError, base64.b32encode, "") def test_b32decode(self) -> None: eq = self.assertEqual eq(base64.b32decode(b''), b'') eq(base64.b32decode(b'AA======'), b'\x00') eq(base64.b32decode(b'ME======'), b'a') eq(base64.b32decode(b'MFRA===='), b'ab') eq(base64.b32decode(b'MFRGG==='), b'abc') eq(base64.b32decode(b'MFRGGZA='), b'abcd') eq(base64.b32decode(b'MFRGGZDF'), b'abcde') self.assertRaises(TypeError, base64.b32decode, "") def test_b32decode_casefold(self) -> None: eq = self.assertEqual eq(base64.b32decode(b'', True), b'') eq(base64.b32decode(b'ME======', True), b'a') eq(base64.b32decode(b'MFRA====', True), b'ab') eq(base64.b32decode(b'MFRGG===', True), b'abc') eq(base64.b32decode(b'MFRGGZA=', True), b'abcd') eq(base64.b32decode(b'MFRGGZDF', True), b'abcde') # Lower cases eq(base64.b32decode(b'me======', True), b'a') eq(base64.b32decode(b'mfra====', True), b'ab') eq(base64.b32decode(b'mfrgg===', True), b'abc') eq(base64.b32decode(b'mfrggza=', True), b'abcd') eq(base64.b32decode(b'mfrggzdf', True), b'abcde') # Expected exceptions self.assertRaises(TypeError, base64.b32decode, b'me======') # Mapping zero and one eq(base64.b32decode(b'MLO23456'), b'b\xdd\xad\xf3\xbe') eq(base64.b32decode(b'M1023456', map01=b'L'), b'b\xdd\xad\xf3\xbe') eq(base64.b32decode(b'M1023456', map01=b'I'), b'b\x1d\xad\xf3\xbe') self.assertRaises(TypeError, base64.b32decode, b"", map01="") def test_b32decode_error(self) -> None: self.assertRaises(binascii.Error, base64.b32decode, b'abc') self.assertRaises(binascii.Error, base64.b32decode, b'ABCDEF==') def test_b16encode(self) -> None: eq = self.assertEqual eq(base64.b16encode(b'\x01\x02\xab\xcd\xef'), b'0102ABCDEF') eq(base64.b16encode(b'\x00'), b'00') self.assertRaises(TypeError, base64.b16encode, "") def test_b16decode(self) -> None: eq = self.assertEqual eq(base64.b16decode(b'0102ABCDEF'), b'\x01\x02\xab\xcd\xef') eq(base64.b16decode(b'00'), b'\x00') # Lower case is not allowed without a flag self.assertRaises(binascii.Error, base64.b16decode, b'0102abcdef') # Case fold eq(base64.b16decode(b'0102abcdef', True), b'\x01\x02\xab\xcd\xef') self.assertRaises(TypeError, base64.b16decode, "") def test_ErrorHeritage(self) -> None: self.assertTrue(issubclass(binascii.Error, ValueError)) class TestMain(unittest.TestCase): def get_output(self, *args_tuple: str, **options: Any) -> Any: args = [sys.executable, '-m', 'base64'] + list(args_tuple) return subprocess.check_output(args, **options) def test_encode_decode(self) -> None: output = self.get_output('-t') self.assertSequenceEqual(output.splitlines(), [ b"b'Aladdin:open sesame'", br"b'QWxhZGRpbjpvcGVuIHNlc2FtZQ==\n'", b"b'Aladdin:open sesame'", ]) def test_encode_file(self) -> None: with open(support.TESTFN, 'wb') as fp: fp.write(b'a\xffb\n') output = self.get_output('-e', support.TESTFN) self.assertEqual(output.rstrip(), b'Yf9iCg==') with open(support.TESTFN, 'rb') as fp: output = self.get_output('-e', stdin=fp) self.assertEqual(output.rstrip(), b'Yf9iCg==') def test_decode(self) -> None: with open(support.TESTFN, 'wb') as fp: fp.write(b'Yf9iCg==') output = self.get_output('-d', support.TESTFN) self.assertEqual(output.rstrip(), b'a\xffb') def test_main() -> None: support.run_unittest(__name__) if __name__ == '__main__': test_main() mypy-0.560/test-data/stdlib-samples/3.2/test/test_fnmatch.py0000644€tŠÔÚ€2›s®0000000602713215007205030017 0ustar jukkaDROPBOX\Domain Users00000000000000"""Test cases for the fnmatch module.""" from test import support import unittest from fnmatch import fnmatch, fnmatchcase, translate, filter from typing import Any, AnyStr, Callable class FnmatchTestCase(unittest.TestCase): def check_match(self, filename: AnyStr, pattern: AnyStr, should_match: int = 1, fn: Any = fnmatch) -> None: # see #270 if should_match: self.assertTrue(fn(filename, pattern), "expected %r to match pattern %r" % (filename, pattern)) else: self.assertTrue(not fn(filename, pattern), "expected %r not to match pattern %r" % (filename, pattern)) def test_fnmatch(self) -> None: check = self.check_match check('abc', 'abc') check('abc', '?*?') check('abc', '???*') check('abc', '*???') check('abc', '???') check('abc', '*') check('abc', 'ab[cd]') check('abc', 'ab[!de]') check('abc', 'ab[de]', 0) check('a', '??', 0) check('a', 'b', 0) # these test that '\' is handled correctly in character sets; # see SF bug #409651 check('\\', r'[\]') check('a', r'[!\]') check('\\', r'[!\]', 0) # test that filenames with newlines in them are handled correctly. # http://bugs.python.org/issue6665 check('foo\nbar', 'foo*') check('foo\nbar\n', 'foo*') check('\nfoo', 'foo*', False) check('\n', '*') def test_mix_bytes_str(self) -> None: self.assertRaises(TypeError, fnmatch, 'test', b'*') self.assertRaises(TypeError, fnmatch, b'test', '*') self.assertRaises(TypeError, fnmatchcase, 'test', b'*') self.assertRaises(TypeError, fnmatchcase, b'test', '*') def test_fnmatchcase(self) -> None: check = self.check_match check('AbC', 'abc', 0, fnmatchcase) check('abc', 'AbC', 0, fnmatchcase) def test_bytes(self) -> None: self.check_match(b'test', b'te*') self.check_match(b'test\xff', b'te*\xff') self.check_match(b'foo\nbar', b'foo*') class TranslateTestCase(unittest.TestCase): def test_translate(self) -> None: self.assertEqual(translate('*'), '.*\Z(?ms)') self.assertEqual(translate('?'), '.\Z(?ms)') self.assertEqual(translate('a?b*'), 'a.b.*\Z(?ms)') self.assertEqual(translate('[abc]'), '[abc]\Z(?ms)') self.assertEqual(translate('[]]'), '[]]\Z(?ms)') self.assertEqual(translate('[!x]'), '[^x]\Z(?ms)') self.assertEqual(translate('[^x]'), '[\\^x]\Z(?ms)') self.assertEqual(translate('[x'), '\\[x\Z(?ms)') class FilterTestCase(unittest.TestCase): def test_filter(self) -> None: self.assertEqual(filter(['a', 'b'], 'a'), ['a']) def test_main() -> None: support.run_unittest(FnmatchTestCase, TranslateTestCase, FilterTestCase) if __name__ == "__main__": test_main() mypy-0.560/test-data/stdlib-samples/3.2/test/test_genericpath.py0000644€tŠÔÚ€2›s®0000002632313215007205030671 0ustar jukkaDROPBOX\Domain Users00000000000000""" Tests common to genericpath, macpath, ntpath and posixpath """ import unittest from test import support import os import genericpath import imp imp.reload(genericpath) # Make sure we are using the local copy import sys from typing import Any, List def safe_rmdir(dirname: str) -> None: try: os.rmdir(dirname) except OSError: pass class GenericTest(unittest.TestCase): # The path module to be tested pathmodule = genericpath # type: Any common_attributes = ['commonprefix', 'getsize', 'getatime', 'getctime', 'getmtime', 'exists', 'isdir', 'isfile'] attributes = [] # type: List[str] def test_no_argument(self) -> None: for attr in self.common_attributes + self.attributes: with self.assertRaises(TypeError): getattr(self.pathmodule, attr)() self.fail("{}.{}() did not raise a TypeError" .format(self.pathmodule.__name__, attr)) def test_commonprefix(self) -> None: commonprefix = self.pathmodule.commonprefix self.assertEqual( commonprefix([]), "" ) self.assertEqual( commonprefix(["/home/swenson/spam", "/home/swen/spam"]), "/home/swen" ) self.assertEqual( commonprefix(["/home/swen/spam", "/home/swen/eggs"]), "/home/swen/" ) self.assertEqual( commonprefix(["/home/swen/spam", "/home/swen/spam"]), "/home/swen/spam" ) self.assertEqual( commonprefix(["home:swenson:spam", "home:swen:spam"]), "home:swen" ) self.assertEqual( commonprefix([":home:swen:spam", ":home:swen:eggs"]), ":home:swen:" ) self.assertEqual( commonprefix([":home:swen:spam", ":home:swen:spam"]), ":home:swen:spam" ) self.assertEqual( commonprefix([b"/home/swenson/spam", b"/home/swen/spam"]), b"/home/swen" ) self.assertEqual( commonprefix([b"/home/swen/spam", b"/home/swen/eggs"]), b"/home/swen/" ) self.assertEqual( commonprefix([b"/home/swen/spam", b"/home/swen/spam"]), b"/home/swen/spam" ) self.assertEqual( commonprefix([b"home:swenson:spam", b"home:swen:spam"]), b"home:swen" ) self.assertEqual( commonprefix([b":home:swen:spam", b":home:swen:eggs"]), b":home:swen:" ) self.assertEqual( commonprefix([b":home:swen:spam", b":home:swen:spam"]), b":home:swen:spam" ) testlist = ['', 'abc', 'Xbcd', 'Xb', 'XY', 'abcd', 'aXc', 'abd', 'ab', 'aX', 'abcX'] for s1 in testlist: for s2 in testlist: p = commonprefix([s1, s2]) self.assertTrue(s1.startswith(p)) self.assertTrue(s2.startswith(p)) if s1 != s2: n = len(p) self.assertNotEqual(s1[n:n+1], s2[n:n+1]) def test_getsize(self) -> None: f = open(support.TESTFN, "wb") try: f.write(b"foo") f.close() self.assertEqual(self.pathmodule.getsize(support.TESTFN), 3) finally: if not f.closed: f.close() support.unlink(support.TESTFN) def test_time(self) -> None: f = open(support.TESTFN, "wb") try: f.write(b"foo") f.close() f = open(support.TESTFN, "ab") f.write(b"bar") f.close() f = open(support.TESTFN, "rb") d = f.read() f.close() self.assertEqual(d, b"foobar") self.assertLessEqual( self.pathmodule.getctime(support.TESTFN), self.pathmodule.getmtime(support.TESTFN) ) finally: if not f.closed: f.close() support.unlink(support.TESTFN) def test_exists(self) -> None: self.assertIs(self.pathmodule.exists(support.TESTFN), False) f = open(support.TESTFN, "wb") try: f.write(b"foo") f.close() self.assertIs(self.pathmodule.exists(support.TESTFN), True) if not self.pathmodule == genericpath: self.assertIs(self.pathmodule.lexists(support.TESTFN), True) finally: if not f.closed: f.close() support.unlink(support.TESTFN) def test_isdir(self) -> None: self.assertIs(self.pathmodule.isdir(support.TESTFN), False) f = open(support.TESTFN, "wb") try: f.write(b"foo") f.close() self.assertIs(self.pathmodule.isdir(support.TESTFN), False) os.remove(support.TESTFN) os.mkdir(support.TESTFN) self.assertIs(self.pathmodule.isdir(support.TESTFN), True) os.rmdir(support.TESTFN) finally: if not f.closed: f.close() support.unlink(support.TESTFN) safe_rmdir(support.TESTFN) def test_isfile(self) -> None: self.assertIs(self.pathmodule.isfile(support.TESTFN), False) f = open(support.TESTFN, "wb") try: f.write(b"foo") f.close() self.assertIs(self.pathmodule.isfile(support.TESTFN), True) os.remove(support.TESTFN) os.mkdir(support.TESTFN) self.assertIs(self.pathmodule.isfile(support.TESTFN), False) os.rmdir(support.TESTFN) finally: if not f.closed: f.close() support.unlink(support.TESTFN) safe_rmdir(support.TESTFN) # Following TestCase is not supposed to be run from test_genericpath. # It is inherited by other test modules (macpath, ntpath, posixpath). class CommonTest(GenericTest): # The path module to be tested pathmodule = None # type: Any common_attributes = GenericTest.common_attributes + [ # Properties 'curdir', 'pardir', 'extsep', 'sep', 'pathsep', 'defpath', 'altsep', 'devnull', # Methods 'normcase', 'splitdrive', 'expandvars', 'normpath', 'abspath', 'join', 'split', 'splitext', 'isabs', 'basename', 'dirname', 'lexists', 'islink', 'ismount', 'expanduser', 'normpath', 'realpath', ] def test_normcase(self) -> None: normcase = self.pathmodule.normcase # check that normcase() is idempotent for p in ["FoO/./BaR", b"FoO/./BaR"]: p = normcase(p) self.assertEqual(p, normcase(p)) self.assertEqual(normcase(''), '') self.assertEqual(normcase(b''), b'') # check that normcase raises a TypeError for invalid types for path in (None, True, 0, 2.5, [], bytearray(b''), {'o','o'}): self.assertRaises(TypeError, normcase, path) def test_splitdrive(self) -> None: # splitdrive for non-NT paths splitdrive = self.pathmodule.splitdrive self.assertEqual(splitdrive("/foo/bar"), ("", "/foo/bar")) self.assertEqual(splitdrive("foo:bar"), ("", "foo:bar")) self.assertEqual(splitdrive(":foo:bar"), ("", ":foo:bar")) self.assertEqual(splitdrive(b"/foo/bar"), (b"", b"/foo/bar")) self.assertEqual(splitdrive(b"foo:bar"), (b"", b"foo:bar")) self.assertEqual(splitdrive(b":foo:bar"), (b"", b":foo:bar")) def test_expandvars(self) -> None: if self.pathmodule.__name__ == 'macpath': self.skipTest('macpath.expandvars is a stub') expandvars = self.pathmodule.expandvars with support.EnvironmentVarGuard() as env: env.clear() env["foo"] = "bar" env["{foo"] = "baz1" env["{foo}"] = "baz2" self.assertEqual(expandvars("foo"), "foo") self.assertEqual(expandvars("$foo bar"), "bar bar") self.assertEqual(expandvars("${foo}bar"), "barbar") self.assertEqual(expandvars("$[foo]bar"), "$[foo]bar") self.assertEqual(expandvars("$bar bar"), "$bar bar") self.assertEqual(expandvars("$?bar"), "$?bar") self.assertEqual(expandvars("${foo}bar"), "barbar") self.assertEqual(expandvars("$foo}bar"), "bar}bar") self.assertEqual(expandvars("${foo"), "${foo") self.assertEqual(expandvars("${{foo}}"), "baz1}") self.assertEqual(expandvars("$foo$foo"), "barbar") self.assertEqual(expandvars("$bar$bar"), "$bar$bar") self.assertEqual(expandvars(b"foo"), b"foo") self.assertEqual(expandvars(b"$foo bar"), b"bar bar") self.assertEqual(expandvars(b"${foo}bar"), b"barbar") self.assertEqual(expandvars(b"$[foo]bar"), b"$[foo]bar") self.assertEqual(expandvars(b"$bar bar"), b"$bar bar") self.assertEqual(expandvars(b"$?bar"), b"$?bar") self.assertEqual(expandvars(b"${foo}bar"), b"barbar") self.assertEqual(expandvars(b"$foo}bar"), b"bar}bar") self.assertEqual(expandvars(b"${foo"), b"${foo") self.assertEqual(expandvars(b"${{foo}}"), b"baz1}") self.assertEqual(expandvars(b"$foo$foo"), b"barbar") self.assertEqual(expandvars(b"$bar$bar"), b"$bar$bar") def test_abspath(self) -> None: self.assertIn("foo", self.pathmodule.abspath("foo")) self.assertIn(b"foo", self.pathmodule.abspath(b"foo")) # Abspath returns bytes when the arg is bytes for path in (b'', b'foo', b'f\xf2\xf2', b'/foo', b'C:\\'): self.assertIsInstance(self.pathmodule.abspath(path), bytes) def test_realpath(self) -> None: self.assertIn("foo", self.pathmodule.realpath("foo")) self.assertIn(b"foo", self.pathmodule.realpath(b"foo")) def test_normpath_issue5827(self) -> None: # Make sure normpath preserves unicode for path in ('', '.', '/', '\\', '///foo/.//bar//'): self.assertIsInstance(self.pathmodule.normpath(path), str) def test_abspath_issue3426(self) -> None: # Check that abspath returns unicode when the arg is unicode # with both ASCII and non-ASCII cwds. abspath = self.pathmodule.abspath for path in ('', 'fuu', 'f\xf9\xf9', '/fuu', 'U:\\'): self.assertIsInstance(abspath(path), str) unicwd = '\xe7w\xf0' try: fsencoding = support.TESTFN_ENCODING or "ascii" unicwd.encode(fsencoding) except (AttributeError, UnicodeEncodeError): # FS encoding is probably ASCII pass else: with support.temp_cwd(unicwd): for path in ('', 'fuu', 'f\xf9\xf9', '/fuu', 'U:\\'): self.assertIsInstance(abspath(path), str) @unittest.skipIf(sys.platform == 'darwin', "Mac OS X denies the creation of a directory with an invalid utf8 name") def test_nonascii_abspath(self) -> None: # Test non-ASCII, non-UTF8 bytes in the path. with support.temp_cwd(b'\xe7w\xf0'): self.test_abspath() def test_main() -> None: support.run_unittest(GenericTest) if __name__=="__main__": test_main() mypy-0.560/test-data/stdlib-samples/3.2/test/test_getopt.py0000644€tŠÔÚ€2›s®0000001576413215007205027711 0ustar jukkaDROPBOX\Domain Users00000000000000# test_getopt.py # David Goodger 2000-08-19 from test.support import verbose, run_doctest, run_unittest, EnvironmentVarGuard import unittest import getopt from typing import cast, Any sentinel = object() class GetoptTests(unittest.TestCase): def setUp(self) -> None: self.env = EnvironmentVarGuard() if "POSIXLY_CORRECT" in self.env: del self.env["POSIXLY_CORRECT"] def tearDown(self) -> None: self.env.__exit__() del self.env def assertError(self, *args: Any, **kwargs: Any) -> None: # JLe: work around mypy bug #229 cast(Any, self.assertRaises)(getopt.GetoptError, *args, **kwargs) def test_short_has_arg(self) -> None: self.assertTrue(getopt.short_has_arg('a', 'a:')) self.assertFalse(getopt.short_has_arg('a', 'a')) self.assertError(getopt.short_has_arg, 'a', 'b') def test_long_has_args(self) -> None: has_arg, option = getopt.long_has_args('abc', ['abc=']) self.assertTrue(has_arg) self.assertEqual(option, 'abc') has_arg, option = getopt.long_has_args('abc', ['abc']) self.assertFalse(has_arg) self.assertEqual(option, 'abc') has_arg, option = getopt.long_has_args('abc', ['abcd']) self.assertFalse(has_arg) self.assertEqual(option, 'abcd') self.assertError(getopt.long_has_args, 'abc', ['def']) self.assertError(getopt.long_has_args, 'abc', []) self.assertError(getopt.long_has_args, 'abc', ['abcd','abcde']) def test_do_shorts(self) -> None: opts, args = getopt.do_shorts([], 'a', 'a', []) self.assertEqual(opts, [('-a', '')]) self.assertEqual(args, []) opts, args = getopt.do_shorts([], 'a1', 'a:', []) self.assertEqual(opts, [('-a', '1')]) self.assertEqual(args, []) #opts, args = getopt.do_shorts([], 'a=1', 'a:', []) #self.assertEqual(opts, [('-a', '1')]) #self.assertEqual(args, []) opts, args = getopt.do_shorts([], 'a', 'a:', ['1']) self.assertEqual(opts, [('-a', '1')]) self.assertEqual(args, []) opts, args = getopt.do_shorts([], 'a', 'a:', ['1', '2']) self.assertEqual(opts, [('-a', '1')]) self.assertEqual(args, ['2']) self.assertError(getopt.do_shorts, [], 'a1', 'a', []) self.assertError(getopt.do_shorts, [], 'a', 'a:', []) def test_do_longs(self) -> None: opts, args = getopt.do_longs([], 'abc', ['abc'], []) self.assertEqual(opts, [('--abc', '')]) self.assertEqual(args, []) opts, args = getopt.do_longs([], 'abc=1', ['abc='], []) self.assertEqual(opts, [('--abc', '1')]) self.assertEqual(args, []) opts, args = getopt.do_longs([], 'abc=1', ['abcd='], []) self.assertEqual(opts, [('--abcd', '1')]) self.assertEqual(args, []) opts, args = getopt.do_longs([], 'abc', ['ab', 'abc', 'abcd'], []) self.assertEqual(opts, [('--abc', '')]) self.assertEqual(args, []) # Much like the preceding, except with a non-alpha character ("-") in # option name that precedes "="; failed in # http://python.org/sf/126863 opts, args = getopt.do_longs([], 'foo=42', ['foo-bar', 'foo=',], []) self.assertEqual(opts, [('--foo', '42')]) self.assertEqual(args, []) self.assertError(getopt.do_longs, [], 'abc=1', ['abc'], []) self.assertError(getopt.do_longs, [], 'abc', ['abc='], []) def test_getopt(self) -> None: # note: the empty string between '-a' and '--beta' is significant: # it simulates an empty string option argument ('-a ""') on the # command line. cmdline = ['-a', '1', '-b', '--alpha=2', '--beta', '-a', '3', '-a', '', '--beta', 'arg1', 'arg2'] opts, args = getopt.getopt(cmdline, 'a:b', ['alpha=', 'beta']) self.assertEqual(opts, [('-a', '1'), ('-b', ''), ('--alpha', '2'), ('--beta', ''), ('-a', '3'), ('-a', ''), ('--beta', '')]) # Note ambiguity of ('-b', '') and ('-a', '') above. This must be # accounted for in the code that calls getopt(). self.assertEqual(args, ['arg1', 'arg2']) self.assertError(getopt.getopt, cmdline, 'a:b', ['alpha', 'beta']) def test_gnu_getopt(self) -> None: # Test handling of GNU style scanning mode. cmdline = ['-a', 'arg1', '-b', '1', '--alpha', '--beta=2'] # GNU style opts, args = getopt.gnu_getopt(cmdline, 'ab:', ['alpha', 'beta=']) self.assertEqual(args, ['arg1']) self.assertEqual(opts, [('-a', ''), ('-b', '1'), ('--alpha', ''), ('--beta', '2')]) # recognize "-" as an argument opts, args = getopt.gnu_getopt(['-a', '-', '-b', '-'], 'ab:', []) self.assertEqual(args, ['-']) self.assertEqual(opts, [('-a', ''), ('-b', '-')]) # Posix style via + opts, args = getopt.gnu_getopt(cmdline, '+ab:', ['alpha', 'beta=']) self.assertEqual(opts, [('-a', '')]) self.assertEqual(args, ['arg1', '-b', '1', '--alpha', '--beta=2']) # Posix style via POSIXLY_CORRECT self.env["POSIXLY_CORRECT"] = "1" opts, args = getopt.gnu_getopt(cmdline, 'ab:', ['alpha', 'beta=']) self.assertEqual(opts, [('-a', '')]) self.assertEqual(args, ['arg1', '-b', '1', '--alpha', '--beta=2']) def test_libref_examples(self) -> None: s = """ Examples from the Library Reference: Doc/lib/libgetopt.tex An example using only Unix style options: >>> import getopt >>> args = '-a -b -cfoo -d bar a1 a2'.split() >>> args ['-a', '-b', '-cfoo', '-d', 'bar', 'a1', 'a2'] >>> optlist, args = getopt.getopt(args, 'abc:d:') >>> optlist [('-a', ''), ('-b', ''), ('-c', 'foo'), ('-d', 'bar')] >>> args ['a1', 'a2'] Using long option names is equally easy: >>> s = '--condition=foo --testing --output-file abc.def -x a1 a2' >>> args = s.split() >>> args ['--condition=foo', '--testing', '--output-file', 'abc.def', '-x', 'a1', 'a2'] >>> optlist, args = getopt.getopt(args, 'x', [ ... 'condition=', 'output-file=', 'testing']) >>> optlist [('--condition', 'foo'), ('--testing', ''), ('--output-file', 'abc.def'), ('-x', '')] >>> args ['a1', 'a2'] """ import types m = types.ModuleType("libreftest", s) run_doctest(m, verbose) def test_issue4629(self) -> None: longopts, shortopts = getopt.getopt(['--help='], '', ['help=']) self.assertEqual(longopts, [('--help', '')]) longopts, shortopts = getopt.getopt(['--help=x'], '', ['help=']) self.assertEqual(longopts, [('--help', 'x')]) self.assertRaises(getopt.GetoptError, getopt.getopt, ['--help='], '', ['help']) def test_main() -> None: run_unittest(GetoptTests) if __name__ == "__main__": test_main() mypy-0.560/test-data/stdlib-samples/3.2/test/test_glob.py0000644€tŠÔÚ€2›s®0000001073013215007205027316 0ustar jukkaDROPBOX\Domain Users00000000000000import unittest from test.support import run_unittest, TESTFN, skip_unless_symlink, can_symlink import glob import os import shutil from typing import TypeVar, Iterable, List, cast T = TypeVar('T') class GlobTests(unittest.TestCase): tempdir = '' # JLe: work around mypy issue #231 def norm(self, first: str, *parts: str) -> str: return os.path.normpath(os.path.join(self.tempdir, first, *parts)) def mktemp(self, *parts: str) -> None: filename = self.norm(*parts) base, file = os.path.split(filename) if not os.path.exists(base): os.makedirs(base) f = open(filename, 'w') f.close() def setUp(self) -> None: self.tempdir = TESTFN+"_dir" self.mktemp('a', 'D') self.mktemp('aab', 'F') self.mktemp('aaa', 'zzzF') self.mktemp('ZZZ') self.mktemp('a', 'bcd', 'EF') self.mktemp('a', 'bcd', 'efg', 'ha') if can_symlink(): os.symlink(self.norm('broken'), self.norm('sym1')) os.symlink(self.norm('broken'), self.norm('sym2')) def tearDown(self) -> None: shutil.rmtree(self.tempdir) def glob(self, *parts: str) -> List[str]: if len(parts) == 1: pattern = parts[0] else: pattern = os.path.join(*parts) p = os.path.join(self.tempdir, pattern) res = glob.glob(p) self.assertEqual(list(glob.iglob(p)), res) return res def assertSequencesEqual_noorder(self, l1: Iterable[T], l2: Iterable[T]) -> None: self.assertEqual(set(l1), set(l2)) def test_glob_literal(self) -> None: eq = self.assertSequencesEqual_noorder eq(self.glob('a'), [self.norm('a')]) eq(self.glob('a', 'D'), [self.norm('a', 'D')]) eq(self.glob('aab'), [self.norm('aab')]) eq(self.glob('zymurgy'), cast(List[str], [])) # JLe: work around #230 # test return types are unicode, but only if os.listdir # returns unicode filenames uniset = set([str]) tmp = os.listdir('.') if set(type(x) for x in tmp) == uniset: u1 = glob.glob('*') u2 = glob.glob('./*') self.assertEqual(set(type(r) for r in u1), uniset) self.assertEqual(set(type(r) for r in u2), uniset) def test_glob_one_directory(self) -> None: eq = self.assertSequencesEqual_noorder eq(self.glob('a*'), map(self.norm, ['a', 'aab', 'aaa'])) eq(self.glob('*a'), map(self.norm, ['a', 'aaa'])) eq(self.glob('aa?'), map(self.norm, ['aaa', 'aab'])) eq(self.glob('aa[ab]'), map(self.norm, ['aaa', 'aab'])) eq(self.glob('*q'), cast(List[str], [])) # JLe: work around #230 def test_glob_nested_directory(self) -> None: eq = self.assertSequencesEqual_noorder if os.path.normcase("abCD") == "abCD": # case-sensitive filesystem eq(self.glob('a', 'bcd', 'E*'), [self.norm('a', 'bcd', 'EF')]) else: # case insensitive filesystem eq(self.glob('a', 'bcd', 'E*'), [self.norm('a', 'bcd', 'EF'), self.norm('a', 'bcd', 'efg')]) eq(self.glob('a', 'bcd', '*g'), [self.norm('a', 'bcd', 'efg')]) def test_glob_directory_names(self) -> None: eq = self.assertSequencesEqual_noorder eq(self.glob('*', 'D'), [self.norm('a', 'D')]) eq(self.glob('*', '*a'), cast(List[str], [])) # JLe: work around #230 eq(self.glob('a', '*', '*', '*a'), [self.norm('a', 'bcd', 'efg', 'ha')]) eq(self.glob('?a?', '*F'), map(self.norm, [os.path.join('aaa', 'zzzF'), os.path.join('aab', 'F')])) def test_glob_directory_with_trailing_slash(self) -> None: # We are verifying that when there is wildcard pattern which # ends with os.sep doesn't blow up. res = glob.glob(self.tempdir + '*' + os.sep) self.assertEqual(len(res), 1) # either of these results are reasonable self.assertIn(res[0], [self.tempdir, self.tempdir + os.sep]) @skip_unless_symlink def test_glob_broken_symlinks(self) -> None: eq = self.assertSequencesEqual_noorder eq(self.glob('sym*'), [self.norm('sym1'), self.norm('sym2')]) eq(self.glob('sym1'), [self.norm('sym1')]) eq(self.glob('sym2'), [self.norm('sym2')]) def test_main() -> None: run_unittest(GlobTests) if __name__ == "__main__": test_main() mypy-0.560/test-data/stdlib-samples/3.2/test/test_posixpath.py0000644€tŠÔÚ€2›s®0000005420013215007205030412 0ustar jukkaDROPBOX\Domain Users00000000000000import unittest from test import support, test_genericpath import posixpath import genericpath import imp imp.reload(posixpath) # Make sure we are using the local copy imp.reload(genericpath) import os import sys from posixpath import realpath, abspath, dirname, basename import posix from typing import cast, Any, TypeVar, Callable T = TypeVar('T') # An absolute path to a temporary filename for testing. We can't rely on TESTFN # being an absolute path, so we need this. ABSTFN = abspath(support.TESTFN) def skip_if_ABSTFN_contains_backslash( test: Callable[[T], None]) -> Callable[[T], None]: """ On Windows, posixpath.abspath still returns paths with backslashes instead of posix forward slashes. If this is the case, several tests fail, so skip them. """ found_backslash = '\\' in ABSTFN msg = "ABSTFN is not a posix path - tests fail" return [test, unittest.skip(msg)(test)][found_backslash] def safe_rmdir(dirname: str) -> None: try: os.rmdir(dirname) except OSError: pass class PosixPathTest(unittest.TestCase): def setUp(self) -> None: self.tearDown() def tearDown(self) -> None: for suffix in ["", "1", "2"]: support.unlink(support.TESTFN + suffix) safe_rmdir(support.TESTFN + suffix) def test_join(self) -> None: self.assertEqual(posixpath.join("/foo", "bar", "/bar", "baz"), "/bar/baz") self.assertEqual(posixpath.join("/foo", "bar", "baz"), "/foo/bar/baz") self.assertEqual(posixpath.join("/foo/", "bar/", "baz/"), "/foo/bar/baz/") self.assertEqual(posixpath.join(b"/foo", b"bar", b"/bar", b"baz"), b"/bar/baz") self.assertEqual(posixpath.join(b"/foo", b"bar", b"baz"), b"/foo/bar/baz") self.assertEqual(posixpath.join(b"/foo/", b"bar/", b"baz/"), b"/foo/bar/baz/") self.assertRaises(TypeError, posixpath.join, b"bytes", "str") self.assertRaises(TypeError, posixpath.join, "str", b"bytes") def test_split(self) -> None: self.assertEqual(posixpath.split("/foo/bar"), ("/foo", "bar")) self.assertEqual(posixpath.split("/"), ("/", "")) self.assertEqual(posixpath.split("foo"), ("", "foo")) self.assertEqual(posixpath.split("////foo"), ("////", "foo")) self.assertEqual(posixpath.split("//foo//bar"), ("//foo", "bar")) self.assertEqual(posixpath.split(b"/foo/bar"), (b"/foo", b"bar")) self.assertEqual(posixpath.split(b"/"), (b"/", b"")) self.assertEqual(posixpath.split(b"foo"), (b"", b"foo")) self.assertEqual(posixpath.split(b"////foo"), (b"////", b"foo")) self.assertEqual(posixpath.split(b"//foo//bar"), (b"//foo", b"bar")) def splitextTest(self, path: str, filename: str, ext: str) -> None: self.assertEqual(posixpath.splitext(path), (filename, ext)) self.assertEqual(posixpath.splitext("/" + path), ("/" + filename, ext)) self.assertEqual(posixpath.splitext("abc/" + path), ("abc/" + filename, ext)) self.assertEqual(posixpath.splitext("abc.def/" + path), ("abc.def/" + filename, ext)) self.assertEqual(posixpath.splitext("/abc.def/" + path), ("/abc.def/" + filename, ext)) self.assertEqual(posixpath.splitext(path + "/"), (filename + ext + "/", "")) pathb = bytes(path, "ASCII") filenameb = bytes(filename, "ASCII") extb = bytes(ext, "ASCII") self.assertEqual(posixpath.splitext(pathb), (filenameb, extb)) self.assertEqual(posixpath.splitext(b"/" + pathb), (b"/" + filenameb, extb)) self.assertEqual(posixpath.splitext(b"abc/" + pathb), (b"abc/" + filenameb, extb)) self.assertEqual(posixpath.splitext(b"abc.def/" + pathb), (b"abc.def/" + filenameb, extb)) self.assertEqual(posixpath.splitext(b"/abc.def/" + pathb), (b"/abc.def/" + filenameb, extb)) self.assertEqual(posixpath.splitext(pathb + b"/"), (filenameb + extb + b"/", b"")) def test_splitext(self) -> None: self.splitextTest("foo.bar", "foo", ".bar") self.splitextTest("foo.boo.bar", "foo.boo", ".bar") self.splitextTest("foo.boo.biff.bar", "foo.boo.biff", ".bar") self.splitextTest(".csh.rc", ".csh", ".rc") self.splitextTest("nodots", "nodots", "") self.splitextTest(".cshrc", ".cshrc", "") self.splitextTest("...manydots", "...manydots", "") self.splitextTest("...manydots.ext", "...manydots", ".ext") self.splitextTest(".", ".", "") self.splitextTest("..", "..", "") self.splitextTest("........", "........", "") self.splitextTest("", "", "") def test_isabs(self) -> None: self.assertIs(posixpath.isabs(""), False) self.assertIs(posixpath.isabs("/"), True) self.assertIs(posixpath.isabs("/foo"), True) self.assertIs(posixpath.isabs("/foo/bar"), True) self.assertIs(posixpath.isabs("foo/bar"), False) self.assertIs(posixpath.isabs(b""), False) self.assertIs(posixpath.isabs(b"/"), True) self.assertIs(posixpath.isabs(b"/foo"), True) self.assertIs(posixpath.isabs(b"/foo/bar"), True) self.assertIs(posixpath.isabs(b"foo/bar"), False) def test_basename(self) -> None: self.assertEqual(posixpath.basename("/foo/bar"), "bar") self.assertEqual(posixpath.basename("/"), "") self.assertEqual(posixpath.basename("foo"), "foo") self.assertEqual(posixpath.basename("////foo"), "foo") self.assertEqual(posixpath.basename("//foo//bar"), "bar") self.assertEqual(posixpath.basename(b"/foo/bar"), b"bar") self.assertEqual(posixpath.basename(b"/"), b"") self.assertEqual(posixpath.basename(b"foo"), b"foo") self.assertEqual(posixpath.basename(b"////foo"), b"foo") self.assertEqual(posixpath.basename(b"//foo//bar"), b"bar") def test_dirname(self) -> None: self.assertEqual(posixpath.dirname("/foo/bar"), "/foo") self.assertEqual(posixpath.dirname("/"), "/") self.assertEqual(posixpath.dirname("foo"), "") self.assertEqual(posixpath.dirname("////foo"), "////") self.assertEqual(posixpath.dirname("//foo//bar"), "//foo") self.assertEqual(posixpath.dirname(b"/foo/bar"), b"/foo") self.assertEqual(posixpath.dirname(b"/"), b"/") self.assertEqual(posixpath.dirname(b"foo"), b"") self.assertEqual(posixpath.dirname(b"////foo"), b"////") self.assertEqual(posixpath.dirname(b"//foo//bar"), b"//foo") def test_islink(self) -> None: self.assertIs(posixpath.islink(support.TESTFN + "1"), False) self.assertIs(posixpath.lexists(support.TESTFN + "2"), False) f = open(support.TESTFN + "1", "wb") try: f.write(b"foo") f.close() self.assertIs(posixpath.islink(support.TESTFN + "1"), False) if support.can_symlink(): os.symlink(support.TESTFN + "1", support.TESTFN + "2") self.assertIs(posixpath.islink(support.TESTFN + "2"), True) os.remove(support.TESTFN + "1") self.assertIs(posixpath.islink(support.TESTFN + "2"), True) self.assertIs(posixpath.exists(support.TESTFN + "2"), False) self.assertIs(posixpath.lexists(support.TESTFN + "2"), True) finally: if not f.closed: f.close() @staticmethod def _create_file(filename: str) -> None: with open(filename, 'wb') as f: f.write(b'foo') def test_samefile(self) -> None: test_fn = support.TESTFN + "1" self._create_file(test_fn) self.assertTrue(posixpath.samefile(test_fn, test_fn)) self.assertRaises(TypeError, posixpath.samefile) @unittest.skipIf( sys.platform.startswith('win'), "posixpath.samefile does not work on links in Windows") @unittest.skipUnless(hasattr(os, "symlink"), "Missing symlink implementation") def test_samefile_on_links(self) -> None: test_fn1 = support.TESTFN + "1" test_fn2 = support.TESTFN + "2" self._create_file(test_fn1) os.symlink(test_fn1, test_fn2) self.assertTrue(posixpath.samefile(test_fn1, test_fn2)) os.remove(test_fn2) self._create_file(test_fn2) self.assertFalse(posixpath.samefile(test_fn1, test_fn2)) def test_samestat(self) -> None: test_fn = support.TESTFN + "1" self._create_file(test_fn) test_fns = [test_fn]*2 stats = map(os.stat, test_fns) self.assertTrue(posixpath.samestat(*stats)) @unittest.skipIf( sys.platform.startswith('win'), "posixpath.samestat does not work on links in Windows") @unittest.skipUnless(hasattr(os, "symlink"), "Missing symlink implementation") def test_samestat_on_links(self) -> None: test_fn1 = support.TESTFN + "1" test_fn2 = support.TESTFN + "2" self._create_file(test_fn1) test_fns = [test_fn1, test_fn2] cast(Any, os.symlink)(*test_fns) stats = map(os.stat, test_fns) self.assertTrue(posixpath.samestat(*stats)) os.remove(test_fn2) self._create_file(test_fn2) stats = map(os.stat, test_fns) self.assertFalse(posixpath.samestat(*stats)) self.assertRaises(TypeError, posixpath.samestat) def test_ismount(self) -> None: self.assertIs(posixpath.ismount("/"), True) self.assertIs(posixpath.ismount(b"/"), True) def test_ismount_non_existent(self) -> None: # Non-existent mountpoint. self.assertIs(posixpath.ismount(ABSTFN), False) try: os.mkdir(ABSTFN) self.assertIs(posixpath.ismount(ABSTFN), False) finally: safe_rmdir(ABSTFN) @unittest.skipUnless(support.can_symlink(), "Test requires symlink support") def test_ismount_symlinks(self) -> None: # Symlinks are never mountpoints. try: os.symlink("/", ABSTFN) self.assertIs(posixpath.ismount(ABSTFN), False) finally: os.unlink(ABSTFN) @unittest.skipIf(posix is None, "Test requires posix module") def test_ismount_different_device(self) -> None: # Simulate the path being on a different device from its parent by # mocking out st_dev. save_lstat = os.lstat def fake_lstat(path): st_ino = 0 st_dev = 0 if path == ABSTFN: st_dev = 1 st_ino = 1 return posix.stat_result((0, st_ino, st_dev, 0, 0, 0, 0, 0, 0, 0)) try: setattr(os, 'lstat', fake_lstat) # mypy: can't modify os directly self.assertIs(posixpath.ismount(ABSTFN), True) finally: setattr(os, 'lstat', save_lstat) def test_expanduser(self) -> None: self.assertEqual(posixpath.expanduser("foo"), "foo") self.assertEqual(posixpath.expanduser(b"foo"), b"foo") try: import pwd except ImportError: pass else: self.assertIsInstance(posixpath.expanduser("~/"), str) self.assertIsInstance(posixpath.expanduser(b"~/"), bytes) # if home directory == root directory, this test makes no sense if posixpath.expanduser("~") != '/': self.assertEqual( posixpath.expanduser("~") + "/", posixpath.expanduser("~/") ) self.assertEqual( posixpath.expanduser(b"~") + b"/", posixpath.expanduser(b"~/") ) self.assertIsInstance(posixpath.expanduser("~root/"), str) self.assertIsInstance(posixpath.expanduser("~foo/"), str) self.assertIsInstance(posixpath.expanduser(b"~root/"), bytes) self.assertIsInstance(posixpath.expanduser(b"~foo/"), bytes) with support.EnvironmentVarGuard() as env: env['HOME'] = '/' self.assertEqual(posixpath.expanduser("~"), "/") # expanduser should fall back to using the password database del env['HOME'] home = pwd.getpwuid(os.getuid()).pw_dir self.assertEqual(posixpath.expanduser("~"), home) def test_normpath(self) -> None: self.assertEqual(posixpath.normpath(""), ".") self.assertEqual(posixpath.normpath("/"), "/") self.assertEqual(posixpath.normpath("//"), "//") self.assertEqual(posixpath.normpath("///"), "/") self.assertEqual(posixpath.normpath("///foo/.//bar//"), "/foo/bar") self.assertEqual(posixpath.normpath("///foo/.//bar//.//..//.//baz"), "/foo/baz") self.assertEqual(posixpath.normpath("///..//./foo/.//bar"), "/foo/bar") self.assertEqual(posixpath.normpath(b""), b".") self.assertEqual(posixpath.normpath(b"/"), b"/") self.assertEqual(posixpath.normpath(b"//"), b"//") self.assertEqual(posixpath.normpath(b"///"), b"/") self.assertEqual(posixpath.normpath(b"///foo/.//bar//"), b"/foo/bar") self.assertEqual(posixpath.normpath(b"///foo/.//bar//.//..//.//baz"), b"/foo/baz") self.assertEqual(posixpath.normpath(b"///..//./foo/.//bar"), b"/foo/bar") @unittest.skipUnless(hasattr(os, "symlink"), "Missing symlink implementation") @skip_if_ABSTFN_contains_backslash def test_realpath_basic(self) -> None: # Basic operation. try: os.symlink(ABSTFN+"1", ABSTFN) self.assertEqual(realpath(ABSTFN), ABSTFN+"1") finally: support.unlink(ABSTFN) @unittest.skipUnless(hasattr(os, "symlink"), "Missing symlink implementation") @skip_if_ABSTFN_contains_backslash def test_realpath_relative(self) -> None: try: os.symlink(posixpath.relpath(ABSTFN+"1"), ABSTFN) self.assertEqual(realpath(ABSTFN), ABSTFN+"1") finally: support.unlink(ABSTFN) @unittest.skipUnless(hasattr(os, "symlink"), "Missing symlink implementation") @skip_if_ABSTFN_contains_backslash def test_realpath_symlink_loops(self) -> None: # Bug #930024, return the path unchanged if we get into an infinite # symlink loop. try: old_path = abspath('.') os.symlink(ABSTFN, ABSTFN) self.assertEqual(realpath(ABSTFN), ABSTFN) os.symlink(ABSTFN+"1", ABSTFN+"2") os.symlink(ABSTFN+"2", ABSTFN+"1") self.assertEqual(realpath(ABSTFN+"1"), ABSTFN+"1") self.assertEqual(realpath(ABSTFN+"2"), ABSTFN+"2") # Test using relative path as well. os.chdir(dirname(ABSTFN)) self.assertEqual(realpath(basename(ABSTFN)), ABSTFN) finally: os.chdir(old_path) support.unlink(ABSTFN) support.unlink(ABSTFN+"1") support.unlink(ABSTFN+"2") @unittest.skipUnless(hasattr(os, "symlink"), "Missing symlink implementation") @skip_if_ABSTFN_contains_backslash def test_realpath_resolve_parents(self) -> None: # We also need to resolve any symlinks in the parents of a relative # path passed to realpath. E.g.: current working directory is # /usr/doc with 'doc' being a symlink to /usr/share/doc. We call # realpath("a"). This should return /usr/share/doc/a/. try: old_path = abspath('.') os.mkdir(ABSTFN) os.mkdir(ABSTFN + "/y") os.symlink(ABSTFN + "/y", ABSTFN + "/k") os.chdir(ABSTFN + "/k") self.assertEqual(realpath("a"), ABSTFN + "/y/a") finally: os.chdir(old_path) support.unlink(ABSTFN + "/k") safe_rmdir(ABSTFN + "/y") safe_rmdir(ABSTFN) @unittest.skipUnless(hasattr(os, "symlink"), "Missing symlink implementation") @skip_if_ABSTFN_contains_backslash def test_realpath_resolve_before_normalizing(self) -> None: # Bug #990669: Symbolic links should be resolved before we # normalize the path. E.g.: if we have directories 'a', 'k' and 'y' # in the following hierarchy: # a/k/y # # and a symbolic link 'link-y' pointing to 'y' in directory 'a', # then realpath("link-y/..") should return 'k', not 'a'. try: old_path = abspath('.') os.mkdir(ABSTFN) os.mkdir(ABSTFN + "/k") os.mkdir(ABSTFN + "/k/y") os.symlink(ABSTFN + "/k/y", ABSTFN + "/link-y") # Absolute path. self.assertEqual(realpath(ABSTFN + "/link-y/.."), ABSTFN + "/k") # Relative path. os.chdir(dirname(ABSTFN)) self.assertEqual(realpath(basename(ABSTFN) + "/link-y/.."), ABSTFN + "/k") finally: os.chdir(old_path) support.unlink(ABSTFN + "/link-y") safe_rmdir(ABSTFN + "/k/y") safe_rmdir(ABSTFN + "/k") safe_rmdir(ABSTFN) @unittest.skipUnless(hasattr(os, "symlink"), "Missing symlink implementation") @skip_if_ABSTFN_contains_backslash def test_realpath_resolve_first(self) -> None: # Bug #1213894: The first component of the path, if not absolute, # must be resolved too. try: old_path = abspath('.') os.mkdir(ABSTFN) os.mkdir(ABSTFN + "/k") os.symlink(ABSTFN, ABSTFN + "link") os.chdir(dirname(ABSTFN)) base = basename(ABSTFN) self.assertEqual(realpath(base + "link"), ABSTFN) self.assertEqual(realpath(base + "link/k"), ABSTFN + "/k") finally: os.chdir(old_path) support.unlink(ABSTFN + "link") safe_rmdir(ABSTFN + "/k") safe_rmdir(ABSTFN) def test_relpath(self) -> None: real_getcwd = os.getcwd # mypy: can't modify os directly setattr(os, 'getcwd', lambda: r"/home/user/bar") try: curdir = os.path.split(os.getcwd())[-1] self.assertRaises(ValueError, posixpath.relpath, "") self.assertEqual(posixpath.relpath("a"), "a") self.assertEqual(posixpath.relpath(posixpath.abspath("a")), "a") self.assertEqual(posixpath.relpath("a/b"), "a/b") self.assertEqual(posixpath.relpath("../a/b"), "../a/b") self.assertEqual(posixpath.relpath("a", "../b"), "../"+curdir+"/a") self.assertEqual(posixpath.relpath("a/b", "../c"), "../"+curdir+"/a/b") self.assertEqual(posixpath.relpath("a", "b/c"), "../../a") self.assertEqual(posixpath.relpath("a", "a"), ".") self.assertEqual(posixpath.relpath("/foo/bar/bat", "/x/y/z"), '../../../foo/bar/bat') self.assertEqual(posixpath.relpath("/foo/bar/bat", "/foo/bar"), 'bat') self.assertEqual(posixpath.relpath("/foo/bar/bat", "/"), 'foo/bar/bat') self.assertEqual(posixpath.relpath("/", "/foo/bar/bat"), '../../..') self.assertEqual(posixpath.relpath("/foo/bar/bat", "/x"), '../foo/bar/bat') self.assertEqual(posixpath.relpath("/x", "/foo/bar/bat"), '../../../x') self.assertEqual(posixpath.relpath("/", "/"), '.') self.assertEqual(posixpath.relpath("/a", "/a"), '.') self.assertEqual(posixpath.relpath("/a/b", "/a/b"), '.') finally: setattr(os, 'getcwd', real_getcwd) def test_relpath_bytes(self) -> None: real_getcwdb = os.getcwdb # mypy: can't modify os directly setattr(os, 'getcwdb', lambda: br"/home/user/bar") try: curdir = os.path.split(os.getcwdb())[-1] self.assertRaises(ValueError, posixpath.relpath, b"") self.assertEqual(posixpath.relpath(b"a"), b"a") self.assertEqual(posixpath.relpath(posixpath.abspath(b"a")), b"a") self.assertEqual(posixpath.relpath(b"a/b"), b"a/b") self.assertEqual(posixpath.relpath(b"../a/b"), b"../a/b") self.assertEqual(posixpath.relpath(b"a", b"../b"), b"../"+curdir+b"/a") self.assertEqual(posixpath.relpath(b"a/b", b"../c"), b"../"+curdir+b"/a/b") self.assertEqual(posixpath.relpath(b"a", b"b/c"), b"../../a") self.assertEqual(posixpath.relpath(b"a", b"a"), b".") self.assertEqual(posixpath.relpath(b"/foo/bar/bat", b"/x/y/z"), b'../../../foo/bar/bat') self.assertEqual(posixpath.relpath(b"/foo/bar/bat", b"/foo/bar"), b'bat') self.assertEqual(posixpath.relpath(b"/foo/bar/bat", b"/"), b'foo/bar/bat') self.assertEqual(posixpath.relpath(b"/", b"/foo/bar/bat"), b'../../..') self.assertEqual(posixpath.relpath(b"/foo/bar/bat", b"/x"), b'../foo/bar/bat') self.assertEqual(posixpath.relpath(b"/x", b"/foo/bar/bat"), b'../../../x') self.assertEqual(posixpath.relpath(b"/", b"/"), b'.') self.assertEqual(posixpath.relpath(b"/a", b"/a"), b'.') self.assertEqual(posixpath.relpath(b"/a/b", b"/a/b"), b'.') self.assertRaises(TypeError, posixpath.relpath, b"bytes", "str") self.assertRaises(TypeError, posixpath.relpath, "str", b"bytes") finally: setattr(os, 'getcwdb', real_getcwdb) def test_sameopenfile(self) -> None: fname = support.TESTFN + "1" with open(fname, "wb") as a, open(fname, "wb") as b: self.assertTrue(posixpath.sameopenfile(a.fileno(), b.fileno())) class PosixCommonTest(test_genericpath.CommonTest): pathmodule = posixpath attributes = ['relpath', 'samefile', 'sameopenfile', 'samestat'] def test_main() -> None: support.run_unittest(PosixPathTest, PosixCommonTest) if __name__=="__main__": test_main() mypy-0.560/test-data/stdlib-samples/3.2/test/test_pprint.py0000644€tŠÔÚ€2›s®0000006514313215007205027717 0ustar jukkaDROPBOX\Domain Users00000000000000import pprint import test.support import unittest import test.test_set import random import collections import itertools from typing import List, Any, Dict, Tuple, cast, Callable # list, tuple and dict subclasses that do or don't overwrite __repr__ class list2(list): pass class list3(list): def __repr__(self) -> str: return list.__repr__(self) class tuple2(tuple): pass class tuple3(tuple): def __repr__(self) -> str: return tuple.__repr__(self) class dict2(dict): pass class dict3(dict): def __repr__(self) -> str: return dict.__repr__(self) class Unorderable: def __repr__(self) -> str: return str(id(self)) class QueryTestCase(unittest.TestCase): def setUp(self) -> None: self.a = list(range(100)) # type: List[Any] self.b = list(range(200)) # type: List[Any] self.a[-12] = self.b def test_basic(self) -> None: # Verify .isrecursive() and .isreadable() w/o recursion pp = pprint.PrettyPrinter() for safe in (2, 2.0, complex(0.0, 2.0), "abc", [3], (2,2), {3: 3}, "yaddayadda", self.a, self.b): # module-level convenience functions self.assertFalse(pprint.isrecursive(safe), "expected not isrecursive for %r" % (safe,)) self.assertTrue(pprint.isreadable(safe), "expected isreadable for %r" % (safe,)) # PrettyPrinter methods self.assertFalse(pp.isrecursive(safe), "expected not isrecursive for %r" % (safe,)) self.assertTrue(pp.isreadable(safe), "expected isreadable for %r" % (safe,)) def test_knotted(self) -> None: # Verify .isrecursive() and .isreadable() w/ recursion # Tie a knot. self.b[67] = self.a # Messy dict. self.d = {} # type: Dict[int, dict] self.d[0] = self.d[1] = self.d[2] = self.d pp = pprint.PrettyPrinter() for icky in self.a, self.b, self.d, (self.d, self.d): self.assertTrue(pprint.isrecursive(icky), "expected isrecursive") self.assertFalse(pprint.isreadable(icky), "expected not isreadable") self.assertTrue(pp.isrecursive(icky), "expected isrecursive") self.assertFalse(pp.isreadable(icky), "expected not isreadable") # Break the cycles. self.d.clear() del self.a[:] del self.b[:] for safe in self.a, self.b, self.d, (self.d, self.d): # module-level convenience functions self.assertFalse(pprint.isrecursive(safe), "expected not isrecursive for %r" % (safe,)) self.assertTrue(pprint.isreadable(safe), "expected isreadable for %r" % (safe,)) # PrettyPrinter methods self.assertFalse(pp.isrecursive(safe), "expected not isrecursive for %r" % (safe,)) self.assertTrue(pp.isreadable(safe), "expected isreadable for %r" % (safe,)) def test_unreadable(self) -> None: # Not recursive but not readable anyway pp = pprint.PrettyPrinter() for unreadable in type(3), pprint, pprint.isrecursive: # module-level convenience functions self.assertFalse(pprint.isrecursive(unreadable), "expected not isrecursive for %r" % (unreadable,)) self.assertFalse(pprint.isreadable(unreadable), "expected not isreadable for %r" % (unreadable,)) # PrettyPrinter methods self.assertFalse(pp.isrecursive(unreadable), "expected not isrecursive for %r" % (unreadable,)) self.assertFalse(pp.isreadable(unreadable), "expected not isreadable for %r" % (unreadable,)) def test_same_as_repr(self) -> None: # Simple objects, small containers and classes that overwrite __repr__ # For those the result should be the same as repr(). # Ahem. The docs don't say anything about that -- this appears to # be testing an implementation quirk. Starting in Python 2.5, it's # not true for dicts: pprint always sorts dicts by key now; before, # it sorted a dict display if and only if the display required # multiple lines. For that reason, dicts with more than one element # aren't tested here. for simple in (0, 0, complex(0.0), 0.0, "", b"", (), tuple2(), tuple3(), [], list2(), list3(), {}, dict2(), dict3(), self.assertTrue, pprint, -6, -6, complex(-6.,-6.), -1.5, "x", b"x", (3,), [3], {3: 6}, (1,2), [3,4], {5: 6}, tuple2((1,2)), tuple3((1,2)), tuple3(range(100)), # type: ignore [3,4], list2(cast(Any, [3,4])), list3(cast(Any, [3,4])), list3(cast(Any, range(100))), dict2(cast(Any, {5: 6})), dict3(cast(Any, {5: 6})), # JLe: work around mypy issue #233 range(10, -11, -1) ): native = repr(simple) for function in "pformat", "saferepr": f = getattr(pprint, function) got = f(simple) self.assertEqual(native, got, "expected %s got %s from pprint.%s" % (native, got, function)) def test_basic_line_wrap(self) -> None: # verify basic line-wrapping operation o = {'RPM_cal': 0, 'RPM_cal2': 48059, 'Speed_cal': 0, 'controldesk_runtime_us': 0, 'main_code_runtime_us': 0, 'read_io_runtime_us': 0, 'write_io_runtime_us': 43690} exp = """\ {'RPM_cal': 0, 'RPM_cal2': 48059, 'Speed_cal': 0, 'controldesk_runtime_us': 0, 'main_code_runtime_us': 0, 'read_io_runtime_us': 0, 'write_io_runtime_us': 43690}""" # JLe: work around mypy issue #232 for type in cast(List[Any], [dict, dict2]): self.assertEqual(pprint.pformat(type(o)), exp) o2 = range(100) exp = '[%s]' % ',\n '.join(map(str, o2)) for type in cast(List[Any], [list, list2]): self.assertEqual(pprint.pformat(type(o2)), exp) o3 = tuple(range(100)) exp = '(%s)' % ',\n '.join(map(str, o3)) for type in cast(List[Any], [tuple, tuple2]): self.assertEqual(pprint.pformat(type(o3)), exp) # indent parameter o4 = range(100) exp = '[ %s]' % ',\n '.join(map(str, o4)) for type in cast(List[Any], [list, list2]): self.assertEqual(pprint.pformat(type(o4), indent=4), exp) def test_nested_indentations(self) -> None: o1 = list(range(10)) o2 = {'first':1, 'second':2, 'third':3} o = [o1, o2] expected = """\ [ [0, 1, 2, 3, 4, 5, 6, 7, 8, 9], { 'first': 1, 'second': 2, 'third': 3}]""" self.assertEqual(pprint.pformat(o, indent=4, width=42), expected) def test_sorted_dict(self) -> None: # Starting in Python 2.5, pprint sorts dict displays by key regardless # of how small the dictionary may be. # Before the change, on 32-bit Windows pformat() gave order # 'a', 'c', 'b' here, so this test failed. d = {'a': 1, 'b': 1, 'c': 1} self.assertEqual(pprint.pformat(d), "{'a': 1, 'b': 1, 'c': 1}") self.assertEqual(pprint.pformat([d, d]), "[{'a': 1, 'b': 1, 'c': 1}, {'a': 1, 'b': 1, 'c': 1}]") # The next one is kind of goofy. The sorted order depends on the # alphabetic order of type names: "int" < "str" < "tuple". Before # Python 2.5, this was in the test_same_as_repr() test. It's worth # keeping around for now because it's one of few tests of pprint # against a crazy mix of types. self.assertEqual(pprint.pformat({"xy\tab\n": (3,), 5: [[]], (): {}}), r"{5: [[]], 'xy\tab\n': (3,), (): {}}") def test_ordered_dict(self) -> None: words = 'the quick brown fox jumped over a lazy dog'.split() d = collections.OrderedDict(zip(words, itertools.count())) self.assertEqual(pprint.pformat(d), """\ {'the': 0, 'quick': 1, 'brown': 2, 'fox': 3, 'jumped': 4, 'over': 5, 'a': 6, 'lazy': 7, 'dog': 8}""") def test_subclassing(self) -> None: o = {'names with spaces': 'should be presented using repr()', 'others.should.not.be': 'like.this'} exp = """\ {'names with spaces': 'should be presented using repr()', others.should.not.be: like.this}""" self.assertEqual(DottedPrettyPrinter().pformat(o), exp) @test.support.cpython_only def test_set_reprs(self) -> None: # This test creates a complex arrangement of frozensets and # compares the pretty-printed repr against a string hard-coded in # the test. The hard-coded repr depends on the sort order of # frozensets. # # However, as the docs point out: "Since sets only define # partial ordering (subset relationships), the output of the # list.sort() method is undefined for lists of sets." # # In a nutshell, the test assumes frozenset({0}) will always # sort before frozenset({1}), but: # # >>> frozenset({0}) < frozenset({1}) # False # >>> frozenset({1}) < frozenset({0}) # False # # Consequently, this test is fragile and # implementation-dependent. Small changes to Python's sort # algorithm cause the test to fail when it should pass. self.assertEqual(pprint.pformat(set()), 'set()') self.assertEqual(pprint.pformat(set(range(3))), '{0, 1, 2}') self.assertEqual(pprint.pformat(frozenset()), 'frozenset()') self.assertEqual(pprint.pformat(frozenset(range(3))), 'frozenset({0, 1, 2})') cube_repr_tgt = """\ {frozenset(): frozenset({frozenset({2}), frozenset({0}), frozenset({1})}), frozenset({0}): frozenset({frozenset(), frozenset({0, 2}), frozenset({0, 1})}), frozenset({1}): frozenset({frozenset(), frozenset({1, 2}), frozenset({0, 1})}), frozenset({2}): frozenset({frozenset(), frozenset({1, 2}), frozenset({0, 2})}), frozenset({1, 2}): frozenset({frozenset({2}), frozenset({1}), frozenset({0, 1, 2})}), frozenset({0, 2}): frozenset({frozenset({2}), frozenset({0}), frozenset({0, 1, 2})}), frozenset({0, 1}): frozenset({frozenset({0}), frozenset({1}), frozenset({0, 1, 2})}), frozenset({0, 1, 2}): frozenset({frozenset({1, 2}), frozenset({0, 2}), frozenset({0, 1})})}""" cube = test.test_set.cube(3) self.assertEqual(pprint.pformat(cube), cube_repr_tgt) cubo_repr_tgt = """\ {frozenset({frozenset({0, 2}), frozenset({0})}): frozenset({frozenset({frozenset({0, 2}), frozenset({0, 1, 2})}), frozenset({frozenset({0}), frozenset({0, 1})}), frozenset({frozenset(), frozenset({0})}), frozenset({frozenset({2}), frozenset({0, 2})})}), frozenset({frozenset({0, 1}), frozenset({1})}): frozenset({frozenset({frozenset({0, 1}), frozenset({0, 1, 2})}), frozenset({frozenset({0}), frozenset({0, 1})}), frozenset({frozenset({1}), frozenset({1, 2})}), frozenset({frozenset(), frozenset({1})})}), frozenset({frozenset({1, 2}), frozenset({1})}): frozenset({frozenset({frozenset({1, 2}), frozenset({0, 1, 2})}), frozenset({frozenset({2}), frozenset({1, 2})}), frozenset({frozenset(), frozenset({1})}), frozenset({frozenset({1}), frozenset({0, 1})})}), frozenset({frozenset({1, 2}), frozenset({2})}): frozenset({frozenset({frozenset({1, 2}), frozenset({0, 1, 2})}), frozenset({frozenset({1}), frozenset({1, 2})}), frozenset({frozenset({2}), frozenset({0, 2})}), frozenset({frozenset(), frozenset({2})})}), frozenset({frozenset(), frozenset({0})}): frozenset({frozenset({frozenset({0}), frozenset({0, 1})}), frozenset({frozenset({0}), frozenset({0, 2})}), frozenset({frozenset(), frozenset({1})}), frozenset({frozenset(), frozenset({2})})}), frozenset({frozenset(), frozenset({1})}): frozenset({frozenset({frozenset(), frozenset({0})}), frozenset({frozenset({1}), frozenset({1, 2})}), frozenset({frozenset(), frozenset({2})}), frozenset({frozenset({1}), frozenset({0, 1})})}), frozenset({frozenset({2}), frozenset()}): frozenset({frozenset({frozenset({2}), frozenset({1, 2})}), frozenset({frozenset(), frozenset({0})}), frozenset({frozenset(), frozenset({1})}), frozenset({frozenset({2}), frozenset({0, 2})})}), frozenset({frozenset({0, 1, 2}), frozenset({0, 1})}): frozenset({frozenset({frozenset({1, 2}), frozenset({0, 1, 2})}), frozenset({frozenset({0, 2}), frozenset({0, 1, 2})}), frozenset({frozenset({0}), frozenset({0, 1})}), frozenset({frozenset({1}), frozenset({0, 1})})}), frozenset({frozenset({0}), frozenset({0, 1})}): frozenset({frozenset({frozenset(), frozenset({0})}), frozenset({frozenset({0, 1}), frozenset({0, 1, 2})}), frozenset({frozenset({0}), frozenset({0, 2})}), frozenset({frozenset({1}), frozenset({0, 1})})}), frozenset({frozenset({2}), frozenset({0, 2})}): frozenset({frozenset({frozenset({0, 2}), frozenset({0, 1, 2})}), frozenset({frozenset({2}), frozenset({1, 2})}), frozenset({frozenset({0}), frozenset({0, 2})}), frozenset({frozenset(), frozenset({2})})}), frozenset({frozenset({0, 1, 2}), frozenset({0, 2})}): frozenset({frozenset({frozenset({1, 2}), frozenset({0, 1, 2})}), frozenset({frozenset({0, 1}), frozenset({0, 1, 2})}), frozenset({frozenset({0}), frozenset({0, 2})}), frozenset({frozenset({2}), frozenset({0, 2})})}), frozenset({frozenset({1, 2}), frozenset({0, 1, 2})}): frozenset({frozenset({frozenset({0, 2}), frozenset({0, 1, 2})}), frozenset({frozenset({0, 1}), frozenset({0, 1, 2})}), frozenset({frozenset({2}), frozenset({1, 2})}), frozenset({frozenset({1}), frozenset({1, 2})})})}""" cubo = test.test_set.linegraph(cube) self.assertEqual(pprint.pformat(cubo), cubo_repr_tgt) def test_depth(self) -> None: nested_tuple = (1, (2, (3, (4, (5, 6))))) nested_dict = {1: {2: {3: {4: {5: {6: 6}}}}}} nested_list = [1, [2, [3, [4, [5, [6, []]]]]]] self.assertEqual(pprint.pformat(nested_tuple), repr(nested_tuple)) self.assertEqual(pprint.pformat(nested_dict), repr(nested_dict)) self.assertEqual(pprint.pformat(nested_list), repr(nested_list)) lv1_tuple = '(1, (...))' lv1_dict = '{1: {...}}' lv1_list = '[1, [...]]' self.assertEqual(pprint.pformat(nested_tuple, depth=1), lv1_tuple) self.assertEqual(pprint.pformat(nested_dict, depth=1), lv1_dict) self.assertEqual(pprint.pformat(nested_list, depth=1), lv1_list) def test_sort_unorderable_values(self) -> None: # Issue 3976: sorted pprints fail for unorderable values. n = 20 keys = [Unorderable() for i in range(n)] random.shuffle(keys) skeys = sorted(keys, key=id) clean = lambda s: s.replace(' ', '').replace('\n','') # type: Callable[[str], str] self.assertEqual(clean(pprint.pformat(set(keys))), '{' + ','.join(map(repr, skeys)) + '}') self.assertEqual(clean(pprint.pformat(frozenset(keys))), 'frozenset({' + ','.join(map(repr, skeys)) + '})') self.assertEqual(clean(pprint.pformat(dict.fromkeys(keys))), '{' + ','.join('%r:None' % k for k in skeys) + '}') class DottedPrettyPrinter(pprint.PrettyPrinter): def format(self, object: object, context: Dict[int, Any], maxlevels: int, level: int) -> Tuple[str, int, int]: if isinstance(object, str): if ' ' in object: return repr(object), 1, 0 else: return object, 0, 0 else: return pprint.PrettyPrinter.format( self, object, context, maxlevels, level) def test_main() -> None: test.support.run_unittest(QueryTestCase) if __name__ == "__main__": test_main() mypy-0.560/test-data/stdlib-samples/3.2/test/test_random.py0000644€tŠÔÚ€2›s®0000005203213215007205027654 0ustar jukkaDROPBOX\Domain Users00000000000000#!/usr/bin/env python3 import unittest import random import time import pickle import warnings from math import log, exp, pi, fsum, sin from test import support from typing import Any, Dict, List, Callable, Generic, TypeVar, cast RT = TypeVar('RT', random.Random, random.SystemRandom) class TestBasicOps(unittest.TestCase, Generic[RT]): # Superclass with tests common to all generators. # Subclasses must arrange for self.gen to retrieve the Random instance # to be tested. gen = None # type: RT # Either Random or SystemRandom def randomlist(self, n: int) -> List[float]: """Helper function to make a list of random numbers""" return [self.gen.random() for i in range(n)] def test_autoseed(self) -> None: self.gen.seed() state1 = self.gen.getstate() time.sleep(0.1) self.gen.seed() # diffent seeds at different times state2 = self.gen.getstate() self.assertNotEqual(state1, state2) def test_saverestore(self) -> None: N = 1000 self.gen.seed() state = self.gen.getstate() randseq = self.randomlist(N) self.gen.setstate(state) # should regenerate the same sequence self.assertEqual(randseq, self.randomlist(N)) def test_seedargs(self) -> None: for arg in [None, 0, 0, 1, 1, -1, -1, 10**20, -(10**20), 3.14, complex(1., 2.), 'a', tuple('abc')]: self.gen.seed(arg) for arg in [list(range(3)), {'one': 1}]: self.assertRaises(TypeError, self.gen.seed, arg) self.assertRaises(TypeError, self.gen.seed, 1, 2, 3, 4) self.assertRaises(TypeError, type(self.gen), []) # type: ignore # mypy issue 1846 def test_choice(self) -> None: choice = self.gen.choice with self.assertRaises(IndexError): choice([]) self.assertEqual(choice([50]), 50) self.assertIn(choice([25, 75]), [25, 75]) def test_sample(self) -> None: # For the entire allowable range of 0 <= k <= N, validate that # the sample is of the correct length and contains only unique items N = 100 population = range(N) for k in range(N+1): s = self.gen.sample(population, k) self.assertEqual(len(s), k) uniq = set(s) self.assertEqual(len(uniq), k) self.assertTrue(uniq <= set(population)) self.assertEqual(self.gen.sample([], 0), []) # test edge case N==k==0 def test_sample_distribution(self) -> None: # For the entire allowable range of 0 <= k <= N, validate that # sample generates all possible permutations n = 5 pop = range(n) trials = 10000 # large num prevents false negatives without slowing normal case def factorial(n: int) -> int: if n == 0: return 1 return n * factorial(n - 1) for k in range(n): expected = factorial(n) // factorial(n-k) perms = {} # type: Dict[tuple, object] for i in range(trials): perms[tuple(self.gen.sample(pop, k))] = None if len(perms) == expected: break else: self.fail() def test_sample_inputs(self) -> None: # SF bug #801342 -- population can be any iterable defining __len__() self.gen.sample(set(range(20)), 2) self.gen.sample(range(20), 2) self.gen.sample(range(20), 2) self.gen.sample(str('abcdefghijklmnopqrst'), 2) self.gen.sample(tuple('abcdefghijklmnopqrst'), 2) def test_sample_on_dicts(self) -> None: self.assertRaises(TypeError, self.gen.sample, dict.fromkeys('abcdef'), 2) def test_gauss(self) -> None: # Ensure that the seed() method initializes all the hidden state. In # particular, through 2.2.1 it failed to reset a piece of state used # by (and only by) the .gauss() method. for seed in 1, 12, 123, 1234, 12345, 123456, 654321: self.gen.seed(seed) x1 = self.gen.random() y1 = self.gen.gauss(0, 1) self.gen.seed(seed) x2 = self.gen.random() y2 = self.gen.gauss(0, 1) self.assertEqual(x1, x2) self.assertEqual(y1, y2) def test_pickling(self) -> None: state = pickle.dumps(self.gen) origseq = [self.gen.random() for i in range(10)] newgen = pickle.loads(state) restoredseq = [newgen.random() for i in range(10)] self.assertEqual(origseq, restoredseq) def test_bug_1727780(self) -> None: # verify that version-2-pickles can be loaded # fine, whether they are created on 32-bit or 64-bit # platforms, and that version-3-pickles load fine. files = [("randv2_32.pck", 780), ("randv2_64.pck", 866), ("randv3.pck", 343)] for file, value in files: f = open(support.findfile(file),"rb") r = pickle.load(f) f.close() self.assertEqual(int(r.random()*1000), value) def test_bug_9025(self) -> None: # Had problem with an uneven distribution in int(n*random()) # Verify the fix by checking that distributions fall within expectations. n = 100000 randrange = self.gen.randrange k = sum(randrange(6755399441055744) % 3 == 2 for i in range(n)) self.assertTrue(0.30 < k/n and k/n < .37, (k/n)) class SystemRandom_TestBasicOps(TestBasicOps[random.SystemRandom]): gen = random.SystemRandom() def test_autoseed(self) -> None: # Doesn't need to do anything except not fail self.gen.seed() def test_saverestore(self) -> None: self.assertRaises(NotImplementedError, self.gen.getstate) self.assertRaises(NotImplementedError, self.gen.setstate, None) def test_seedargs(self) -> None: # Doesn't need to do anything except not fail self.gen.seed(100) def test_gauss(self) -> None: self.gen.gauss_next = None self.gen.seed(100) self.assertEqual(self.gen.gauss_next, None) def test_pickling(self) -> None: self.assertRaises(NotImplementedError, pickle.dumps, self.gen) def test_53_bits_per_float(self) -> None: # This should pass whenever a C double has 53 bit precision. span = 2 ** 53 # type: int cum = 0 for i in range(100): cum |= int(self.gen.random() * span) self.assertEqual(cum, span-1) def test_bigrand(self) -> None: # The randrange routine should build-up the required number of bits # in stages so that all bit positions are active. span = 2 ** 500 # type: int cum = 0 for i in range(100): r = self.gen.randrange(span) self.assertTrue(0 <= r < span) cum |= r self.assertEqual(cum, span-1) def test_bigrand_ranges(self) -> None: for i in [40,80, 160, 200, 211, 250, 375, 512, 550]: start = self.gen.randrange(2 ** i) stop = self.gen.randrange(2 ** (i-2)) if stop <= start: return self.assertTrue(start <= self.gen.randrange(start, stop) < stop) def test_rangelimits(self) -> None: for start, stop in [(-2,0), (-(2**60)-2,-(2**60)), (2**60,2**60+2)]: self.assertEqual(set(range(start,stop)), set([self.gen.randrange(start,stop) for i in range(100)])) def test_genrandbits(self) -> None: # Verify ranges for k in range(1, 1000): self.assertTrue(0 <= self.gen.getrandbits(k) < 2**k) # Verify all bits active getbits = self.gen.getrandbits for span in [1, 2, 3, 4, 31, 32, 32, 52, 53, 54, 119, 127, 128, 129]: cum = 0 for i in range(100): cum |= getbits(span) self.assertEqual(cum, 2**span-1) # Verify argument checking self.assertRaises(TypeError, self.gen.getrandbits) self.assertRaises(TypeError, self.gen.getrandbits, 1, 2) self.assertRaises(ValueError, self.gen.getrandbits, 0) self.assertRaises(ValueError, self.gen.getrandbits, -1) self.assertRaises(TypeError, self.gen.getrandbits, 10.1) def test_randbelow_logic(self, _log: Callable[[float, float], float] = log, int: Callable[[float], int] = int) -> None: # check bitcount transition points: 2**i and 2**(i+1)-1 # show that: k = int(1.001 + _log(n, 2)) # is equal to or one greater than the number of bits in n for i in range(1, 1000): n = 1 << i # check an exact power of two numbits = i+1 k = int(1.00001 + _log(n, 2)) self.assertEqual(k, numbits) self.assertEqual(n, 2**(k-1)) n += n - 1 # check 1 below the next power of two k = int(1.00001 + _log(n, 2)) self.assertIn(k, [numbits, numbits+1]) self.assertTrue(2**k > n > 2**(k-2)) n -= n >> 15 # check a little farther below the next power of two k = int(1.00001 + _log(n, 2)) self.assertEqual(k, numbits) # note the stronger assertion self.assertTrue(2**k > n > 2**(k-1)) # note the stronger assertion class MersenneTwister_TestBasicOps(TestBasicOps[random.Random]): gen = random.Random() def test_guaranteed_stable(self) -> None: # These sequences are guaranteed to stay the same across versions of python self.gen.seed(3456147, version=1) self.assertEqual([self.gen.random().hex() for i in range(4)], ['0x1.ac362300d90d2p-1', '0x1.9d16f74365005p-1', '0x1.1ebb4352e4c4dp-1', '0x1.1a7422abf9c11p-1']) self.gen.seed("the quick brown fox", version=2) self.assertEqual([self.gen.random().hex() for i in range(4)], ['0x1.1239ddfb11b7cp-3', '0x1.b3cbb5c51b120p-4', '0x1.8c4f55116b60fp-1', '0x1.63eb525174a27p-1']) def test_setstate_first_arg(self) -> None: self.assertRaises(ValueError, self.gen.setstate, (1, None, None)) def test_setstate_middle_arg(self) -> None: # Wrong type, s/b tuple self.assertRaises(TypeError, self.gen.setstate, (2, None, None)) # Wrong length, s/b 625 self.assertRaises(ValueError, self.gen.setstate, (2, (1,2,3), None)) # Wrong type, s/b tuple of 625 ints self.assertRaises(TypeError, self.gen.setstate, (2, tuple(['a',]*625), None)) # Last element s/b an int also self.assertRaises(TypeError, self.gen.setstate, (2, cast(Any, (0,))*624+('a',), None)) def test_referenceImplementation(self) -> None: # Compare the python implementation with results from the original # code. Create 2000 53-bit precision random floats. Compare only # the last ten entries to show that the independent implementations # are tracking. Here is the main() function needed to create the # list of expected random numbers: # void main(void){ # int i; # unsigned long init[4]={61731, 24903, 614, 42143}, length=4; # init_by_array(init, length); # for (i=0; i<2000; i++) { # printf("%.15f ", genrand_res53()); # if (i%5==4) printf("\n"); # } # } expected = [0.45839803073713259, 0.86057815201978782, 0.92848331726782152, 0.35932681119782461, 0.081823493762449573, 0.14332226470169329, 0.084297823823520024, 0.53814864671831453, 0.089215024911993401, 0.78486196105372907] self.gen.seed(61731 + (24903<<32) + (614<<64) + (42143<<96)) actual = self.randomlist(2000)[-10:] for a, e in zip(actual, expected): self.assertAlmostEqual(a,e,places=14) def test_strong_reference_implementation(self) -> None: # Like test_referenceImplementation, but checks for exact bit-level # equality. This should pass on any box where C double contains # at least 53 bits of precision (the underlying algorithm suffers # no rounding errors -- all results are exact). from math import ldexp expected = [0x0eab3258d2231f, 0x1b89db315277a5, 0x1db622a5518016, 0x0b7f9af0d575bf, 0x029e4c4db82240, 0x04961892f5d673, 0x02b291598e4589, 0x11388382c15694, 0x02dad977c9e1fe, 0x191d96d4d334c6] self.gen.seed(61731 + (24903<<32) + (614<<64) + (42143<<96)) actual = self.randomlist(2000)[-10:] for a, e in zip(actual, expected): self.assertEqual(int(ldexp(a, 53)), e) def test_long_seed(self) -> None: # This is most interesting to run in debug mode, just to make sure # nothing blows up. Under the covers, a dynamically resized array # is allocated, consuming space proportional to the number of bits # in the seed. Unfortunately, that's a quadratic-time algorithm, # so don't make this horribly big. seed = (1 << (10000 * 8)) - 1 # about 10K bytes self.gen.seed(seed) def test_53_bits_per_float(self) -> None: # This should pass whenever a C double has 53 bit precision. span = 2 ** 53 # type: int cum = 0 for i in range(100): cum |= int(self.gen.random() * span) self.assertEqual(cum, span-1) def test_bigrand(self) -> None: # The randrange routine should build-up the required number of bits # in stages so that all bit positions are active. span = 2 ** 500 # type: int cum = 0 for i in range(100): r = self.gen.randrange(span) self.assertTrue(0 <= r < span) cum |= r self.assertEqual(cum, span-1) def test_bigrand_ranges(self) -> None: for i in [40,80, 160, 200, 211, 250, 375, 512, 550]: start = self.gen.randrange(2 ** i) stop = self.gen.randrange(2 ** (i-2)) if stop <= start: return self.assertTrue(start <= self.gen.randrange(start, stop) < stop) def test_rangelimits(self) -> None: for start, stop in [(-2,0), (-(2**60)-2,-(2**60)), (2**60,2**60+2)]: self.assertEqual(set(range(start,stop)), set([self.gen.randrange(start,stop) for i in range(100)])) def test_genrandbits(self) -> None: # Verify cross-platform repeatability self.gen.seed(1234567) self.assertEqual(self.gen.getrandbits(100), 97904845777343510404718956115) # Verify ranges for k in range(1, 1000): self.assertTrue(0 <= self.gen.getrandbits(k) < 2**k) # Verify all bits active getbits = self.gen.getrandbits for span in [1, 2, 3, 4, 31, 32, 32, 52, 53, 54, 119, 127, 128, 129]: cum = 0 for i in range(100): cum |= getbits(span) self.assertEqual(cum, 2**span-1) # Verify argument checking self.assertRaises(TypeError, self.gen.getrandbits) self.assertRaises(TypeError, self.gen.getrandbits, 'a') self.assertRaises(TypeError, self.gen.getrandbits, 1, 2) self.assertRaises(ValueError, self.gen.getrandbits, 0) self.assertRaises(ValueError, self.gen.getrandbits, -1) def test_randbelow_logic(self, _log: Callable[[int, float], float] = log, int: Callable[[float], int] = int) -> None: # check bitcount transition points: 2**i and 2**(i+1)-1 # show that: k = int(1.001 + _log(n, 2)) # is equal to or one greater than the number of bits in n for i in range(1, 1000): n = 1 << i # check an exact power of two numbits = i+1 k = int(1.00001 + _log(n, 2)) self.assertEqual(k, numbits) self.assertEqual(n, 2**(k-1)) n += n - 1 # check 1 below the next power of two k = int(1.00001 + _log(n, 2)) self.assertIn(k, [numbits, numbits+1]) self.assertTrue(2**k > n > 2**(k-2)) n -= n >> 15 # check a little farther below the next power of two k = int(1.00001 + _log(n, 2)) self.assertEqual(k, numbits) # note the stronger assertion self.assertTrue(2**k > n > 2**(k-1)) # note the stronger assertion def test_randrange_bug_1590891(self) -> None: start = 1000000000000 stop = -100000000000000000000 step = -200 x = self.gen.randrange(start, stop, step) self.assertTrue(stop < x <= start) self.assertEqual((x+stop)%step, 0) def gamma(z: float, sqrt2pi: float = (2.0*pi)**0.5) -> float: # Reflection to right half of complex plane if z < 0.5: return pi / sin(pi*z) / gamma(1.0-z) # Lanczos approximation with g=7 az = z + (7.0 - 0.5) return az ** (z-0.5) / exp(az) * sqrt2pi * fsum([ 0.9999999999995183, 676.5203681218835 / z, -1259.139216722289 / (z+1.0), 771.3234287757674 / (z+2.0), -176.6150291498386 / (z+3.0), 12.50734324009056 / (z+4.0), -0.1385710331296526 / (z+5.0), 0.9934937113930748e-05 / (z+6.0), 0.1659470187408462e-06 / (z+7.0), ]) class TestDistributions(unittest.TestCase): def test_zeroinputs(self) -> None: # Verify that distributions can handle a series of zero inputs' g = random.Random() x = [g.random() for i in range(50)] + [0.0]*5 def patch() -> None: setattr(g, 'random', x[:].pop) patch(); g.uniform(1.0,10.0) patch(); g.paretovariate(1.0) patch(); g.expovariate(1.0) patch(); g.weibullvariate(1.0, 1.0) patch(); g.normalvariate(0.0, 1.0) patch(); g.gauss(0.0, 1.0) patch(); g.lognormvariate(0.0, 1.0) patch(); g.vonmisesvariate(0.0, 1.0) patch(); g.gammavariate(0.01, 1.0) patch(); g.gammavariate(1.0, 1.0) patch(); g.gammavariate(200.0, 1.0) patch(); g.betavariate(3.0, 3.0) patch(); g.triangular(0.0, 1.0, 1.0/3.0) def test_avg_std(self) -> None: # Use integration to test distribution average and standard deviation. # Only works for distributions which do not consume variates in pairs g = random.Random() N = 5000 x = [i/float(N) for i in range(1,N)] variate = None # type: Any for variate, args, mu, sigmasqrd in [ (g.uniform, (1.0,10.0), (10.0+1.0)/2, (10.0-1.0)**2/12), (g.triangular, (0.0, 1.0, 1.0/3.0), 4.0/9.0, 7.0/9.0/18.0), (g.expovariate, (1.5,), 1/1.5, 1/1.5**2), (g.paretovariate, (5.0,), 5.0/(5.0-1), 5.0/((5.0-1)**2*(5.0-2))), (g.weibullvariate, (1.0, 3.0), gamma(1+1/3.0), gamma(1+2/3.0)-gamma(1+1/3.0)**2) ]: setattr(g, 'random', x[:].pop) y = [] # type: List[float] for i in range(len(x)): try: y.append(variate(*args)) except IndexError: pass s1 = s2 = 0.0 for e in y: s1 += e s2 += (e - mu) ** 2 N = len(y) self.assertAlmostEqual(s1/N, mu, places=2) self.assertAlmostEqual(s2/(N-1), sigmasqrd, places=2) class TestModule(unittest.TestCase): def testMagicConstants(self) -> None: self.assertAlmostEqual(random.NV_MAGICCONST, 1.71552776992141) self.assertAlmostEqual(random.TWOPI, 6.28318530718) self.assertAlmostEqual(random.LOG4, 1.38629436111989) self.assertAlmostEqual(random.SG_MAGICCONST, 2.50407739677627) def test__all__(self) -> None: # tests validity but not completeness of the __all__ list self.assertTrue(set(random.__all__) <= set(dir(random))) def test_random_subclass_with_kwargs(self) -> None: # SF bug #1486663 -- this used to erroneously raise a TypeError class Subclass(random.Random): def __init__(self, newarg: object = None) -> None: random.Random.__init__(self) Subclass(newarg=1) def test_main(verbose: bool = None) -> None: testclasses = [MersenneTwister_TestBasicOps, TestDistributions, TestModule] try: random.SystemRandom().random() except NotImplementedError: pass else: testclasses.append(SystemRandom_TestBasicOps) support.run_unittest(*testclasses) # verify reference counting import sys if verbose and hasattr(sys, "gettotalrefcount"): counts = [None] * 5 # type: List[int] for i in range(len(counts)): support.run_unittest(*testclasses) counts[i] = sys.gettotalrefcount() print(counts) if __name__ == "__main__": test_main(verbose=True) mypy-0.560/test-data/stdlib-samples/3.2/test/test_set.py0000644€tŠÔÚ€2›s®0000017611513215007205027200 0ustar jukkaDROPBOX\Domain Users00000000000000import unittest from test import support import gc import weakref import operator import copy import pickle from random import randrange, shuffle import sys import warnings import collections from typing import Set, Any class PassThru(Exception): pass def check_pass_thru(): raise PassThru yield 1 class BadCmp: def __hash__(self): return 1 def __eq__(self, other): raise RuntimeError class ReprWrapper: 'Used to test self-referential repr() calls' def __repr__(self): return repr(self.value) #class HashCountingInt(int): # 'int-like object that counts the number of times __hash__ is called' # def __init__(self, *args): # self.hash_count = 0 # def __hash__(self): # self.hash_count += 1 # return int.__hash__(self) class TestJointOps(unittest.TestCase): # Tests common to both set and frozenset def setUp(self): self.word = word = 'simsalabim' self.otherword = 'madagascar' self.letters = 'abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ' self.s = self.thetype(word) self.d = dict.fromkeys(word) def test_new_or_init(self): self.assertRaises(TypeError, self.thetype, [], 2) self.assertRaises(TypeError, set().__init__, a=1) def test_uniquification(self): actual = sorted(self.s) expected = sorted(self.d) self.assertEqual(actual, expected) self.assertRaises(PassThru, self.thetype, check_pass_thru()) self.assertRaises(TypeError, self.thetype, [[]]) def test_len(self): self.assertEqual(len(self.s), len(self.d)) def test_contains(self): for c in self.letters: self.assertEqual(c in self.s, c in self.d) self.assertRaises(TypeError, self.s.__contains__, [[]]) s = self.thetype([frozenset(self.letters)]) self.assertIn(self.thetype(self.letters), s) def test_union(self): u = self.s.union(self.otherword) for c in self.letters: self.assertEqual(c in u, c in self.d or c in self.otherword) self.assertEqual(self.s, self.thetype(self.word)) self.assertEqual(type(u), self.basetype) self.assertRaises(PassThru, self.s.union, check_pass_thru()) self.assertRaises(TypeError, self.s.union, [[]]) for C in set, frozenset, dict.fromkeys, str, list, tuple: self.assertEqual(self.thetype('abcba').union(C('cdc')), set('abcd')) self.assertEqual(self.thetype('abcba').union(C('efgfe')), set('abcefg')) self.assertEqual(self.thetype('abcba').union(C('ccb')), set('abc')) self.assertEqual(self.thetype('abcba').union(C('ef')), set('abcef')) self.assertEqual(self.thetype('abcba').union(C('ef'), C('fg')), set('abcefg')) # Issue #6573 x = self.thetype() self.assertEqual(x.union(set([1]), x, set([2])), self.thetype([1, 2])) def test_or(self): i = self.s.union(self.otherword) self.assertEqual(self.s | set(self.otherword), i) self.assertEqual(self.s | frozenset(self.otherword), i) try: self.s | self.otherword except TypeError: pass else: self.fail("s|t did not screen-out general iterables") def test_intersection(self): i = self.s.intersection(self.otherword) for c in self.letters: self.assertEqual(c in i, c in self.d and c in self.otherword) self.assertEqual(self.s, self.thetype(self.word)) self.assertEqual(type(i), self.basetype) self.assertRaises(PassThru, self.s.intersection, check_pass_thru()) for C in set, frozenset, dict.fromkeys, str, list, tuple: self.assertEqual(self.thetype('abcba').intersection(C('cdc')), set('cc')) self.assertEqual(self.thetype('abcba').intersection(C('efgfe')), set('')) self.assertEqual(self.thetype('abcba').intersection(C('ccb')), set('bc')) self.assertEqual(self.thetype('abcba').intersection(C('ef')), set('')) self.assertEqual(self.thetype('abcba').intersection(C('cbcf'), C('bag')), set('b')) s = self.thetype('abcba') z = s.intersection() if self.thetype == frozenset(): self.assertEqual(id(s), id(z)) else: self.assertNotEqual(id(s), id(z)) def test_isdisjoint(self): def f(s1, s2): 'Pure python equivalent of isdisjoint()' return not set(s1).intersection(s2) for larg in '', 'a', 'ab', 'abc', 'ababac', 'cdc', 'cc', 'efgfe', 'ccb', 'ef': s1 = self.thetype(larg) for rarg in '', 'a', 'ab', 'abc', 'ababac', 'cdc', 'cc', 'efgfe', 'ccb', 'ef': for C in set, frozenset, dict.fromkeys, str, list, tuple: s2 = C(rarg) actual = s1.isdisjoint(s2) expected = f(s1, s2) self.assertEqual(actual, expected) self.assertTrue(actual is True or actual is False) def test_and(self): i = self.s.intersection(self.otherword) self.assertEqual(self.s & set(self.otherword), i) self.assertEqual(self.s & frozenset(self.otherword), i) try: self.s & self.otherword except TypeError: pass else: self.fail("s&t did not screen-out general iterables") def test_difference(self): i = self.s.difference(self.otherword) for c in self.letters: self.assertEqual(c in i, c in self.d and c not in self.otherword) self.assertEqual(self.s, self.thetype(self.word)) self.assertEqual(type(i), self.basetype) self.assertRaises(PassThru, self.s.difference, check_pass_thru()) self.assertRaises(TypeError, self.s.difference, [[]]) for C in set, frozenset, dict.fromkeys, str, list, tuple: self.assertEqual(self.thetype('abcba').difference(C('cdc')), set('ab')) self.assertEqual(self.thetype('abcba').difference(C('efgfe')), set('abc')) self.assertEqual(self.thetype('abcba').difference(C('ccb')), set('a')) self.assertEqual(self.thetype('abcba').difference(C('ef')), set('abc')) self.assertEqual(self.thetype('abcba').difference(), set('abc')) self.assertEqual(self.thetype('abcba').difference(C('a'), C('b')), set('c')) def test_sub(self): i = self.s.difference(self.otherword) self.assertEqual(self.s - set(self.otherword), i) self.assertEqual(self.s - frozenset(self.otherword), i) try: self.s - self.otherword except TypeError: pass else: self.fail("s-t did not screen-out general iterables") def test_symmetric_difference(self): i = self.s.symmetric_difference(self.otherword) for c in self.letters: self.assertEqual(c in i, (c in self.d) ^ (c in self.otherword)) self.assertEqual(self.s, self.thetype(self.word)) self.assertEqual(type(i), self.basetype) self.assertRaises(PassThru, self.s.symmetric_difference, check_pass_thru()) self.assertRaises(TypeError, self.s.symmetric_difference, [[]]) for C in set, frozenset, dict.fromkeys, str, list, tuple: self.assertEqual(self.thetype('abcba').symmetric_difference(C('cdc')), set('abd')) self.assertEqual(self.thetype('abcba').symmetric_difference(C('efgfe')), set('abcefg')) self.assertEqual(self.thetype('abcba').symmetric_difference(C('ccb')), set('a')) self.assertEqual(self.thetype('abcba').symmetric_difference(C('ef')), set('abcef')) def test_xor(self): i = self.s.symmetric_difference(self.otherword) self.assertEqual(self.s ^ set(self.otherword), i) self.assertEqual(self.s ^ frozenset(self.otherword), i) try: self.s ^ self.otherword except TypeError: pass else: self.fail("s^t did not screen-out general iterables") def test_equality(self): self.assertEqual(self.s, set(self.word)) self.assertEqual(self.s, frozenset(self.word)) self.assertEqual(self.s == self.word, False) self.assertNotEqual(self.s, set(self.otherword)) self.assertNotEqual(self.s, frozenset(self.otherword)) self.assertEqual(self.s != self.word, True) def test_setOfFrozensets(self): t = map(frozenset, ['abcdef', 'bcd', 'bdcb', 'fed', 'fedccba']) s = self.thetype(t) self.assertEqual(len(s), 3) def test_sub_and_super(self): p, q, r = map(self.thetype, ['ab', 'abcde', 'def']) self.assertTrue(p < q) self.assertTrue(p <= q) self.assertTrue(q <= q) self.assertTrue(q > p) self.assertTrue(q >= p) self.assertFalse(q < r) self.assertFalse(q <= r) self.assertFalse(q > r) self.assertFalse(q >= r) self.assertTrue(set('a').issubset('abc')) self.assertTrue(set('abc').issuperset('a')) self.assertFalse(set('a').issubset('cbs')) self.assertFalse(set('cbs').issuperset('a')) def test_pickling(self): for i in range(pickle.HIGHEST_PROTOCOL + 1): p = pickle.dumps(self.s, i) dup = pickle.loads(p) self.assertEqual(self.s, dup, "%s != %s" % (self.s, dup)) if type(self.s) not in (set, frozenset): self.s.x = 10 p = pickle.dumps(self.s) dup = pickle.loads(p) self.assertEqual(self.s.x, dup.x) def test_deepcopy(self): class Tracer: def __init__(self, value): self.value = value def __hash__(self): return self.value def __deepcopy__(self, memo=None): return Tracer(self.value + 1) t = Tracer(10) s = self.thetype([t]) dup = copy.deepcopy(s) self.assertNotEqual(id(s), id(dup)) for elem in dup: newt = elem self.assertNotEqual(id(t), id(newt)) self.assertEqual(t.value + 1, newt.value) def test_gc(self): # Create a nest of cycles to exercise overall ref count check class A: pass s = set(A() for i in range(1000)) for elem in s: elem.cycle = s elem.sub = elem elem.set = set([elem]) def test_subclass_with_custom_hash(self): raise NotImplementedError() # runtime computed base class below # Bug #1257731 class H: # (self.thetype): def __hash__(self): return int(id(self) & 0x7fffffff) s=H() f=set() f.add(s) self.assertIn(s, f) f.remove(s) f.add(s) f.discard(s) def test_badcmp(self): s = self.thetype([BadCmp()]) # Detect comparison errors during insertion and lookup self.assertRaises(RuntimeError, self.thetype, [BadCmp(), BadCmp()]) self.assertRaises(RuntimeError, s.__contains__, BadCmp()) # Detect errors during mutating operations if hasattr(s, 'add'): self.assertRaises(RuntimeError, s.add, BadCmp()) self.assertRaises(RuntimeError, s.discard, BadCmp()) self.assertRaises(RuntimeError, s.remove, BadCmp()) def test_cyclical_repr(self): w = ReprWrapper() s = self.thetype([w]) w.value = s if self.thetype == set: self.assertEqual(repr(s), '{set(...)}') else: name = repr(s).partition('(')[0] # strip class name self.assertEqual(repr(s), '%s({%s(...)})' % (name, name)) def test_cyclical_print(self): w = ReprWrapper() s = self.thetype([w]) w.value = s fo = open(support.TESTFN, "w") try: fo.write(str(s)) fo.close() fo = open(support.TESTFN, "r") self.assertEqual(fo.read(), repr(s)) finally: fo.close() support.unlink(support.TESTFN) def test_do_not_rehash_dict_keys(self): raise NotImplementedError() # cannot subclass int n = 10 d = None # dict.fromkeys(map(HashCountingInt, range(n))) self.assertEqual(sum(elem.hash_count for elem in d), n) s = self.thetype(d) self.assertEqual(sum(elem.hash_count for elem in d), n) s.difference(d) self.assertEqual(sum(elem.hash_count for elem in d), n) if hasattr(s, 'symmetric_difference_update'): s.symmetric_difference_update(d) self.assertEqual(sum(elem.hash_count for elem in d), n) d2 = dict.fromkeys(set(d)) self.assertEqual(sum(elem.hash_count for elem in d), n) d3 = dict.fromkeys(frozenset(d)) self.assertEqual(sum(elem.hash_count for elem in d), n) d3 = dict.fromkeys(frozenset(d), 123) self.assertEqual(sum(elem.hash_count for elem in d), n) self.assertEqual(d3, dict.fromkeys(d, 123)) def test_container_iterator(self): # Bug #3680: tp_traverse was not implemented for set iterator object class C(object): pass obj = C() ref = weakref.ref(obj) container = set([obj, 1]) obj.x = iter(container) obj = None container = None gc.collect() self.assertTrue(ref() is None, "Cycle was not collected") class TestSet(TestJointOps): thetype = set basetype = set def test_init(self): s = self.thetype() s.__init__(self.word) self.assertEqual(s, set(self.word)) s.__init__(self.otherword) self.assertEqual(s, set(self.otherword)) self.assertRaises(TypeError, s.__init__, s, 2); self.assertRaises(TypeError, s.__init__, 1) def test_constructor_identity(self): s = self.thetype(range(3)) t = self.thetype(s) self.assertNotEqual(id(s), id(t)) def test_set_literal(self): raise NotImplementedError() #s = set([1,2,3]) #t = {1,2,3} #self.assertEqual(s, t) def test_hash(self): self.assertRaises(TypeError, hash, self.s) def test_clear(self): self.s.clear() self.assertEqual(self.s, set()) self.assertEqual(len(self.s), 0) def test_copy(self): dup = self.s.copy() self.assertEqual(self.s, dup) self.assertNotEqual(id(self.s), id(dup)) self.assertEqual(type(dup), self.basetype) def test_add(self): self.s.add('Q') self.assertIn('Q', self.s) dup = self.s.copy() self.s.add('Q') self.assertEqual(self.s, dup) self.assertRaises(TypeError, self.s.add, []) def test_remove(self): self.s.remove('a') self.assertNotIn('a', self.s) self.assertRaises(KeyError, self.s.remove, 'Q') self.assertRaises(TypeError, self.s.remove, []) s = self.thetype([frozenset(self.word)]) self.assertIn(self.thetype(self.word), s) s.remove(self.thetype(self.word)) self.assertNotIn(self.thetype(self.word), s) self.assertRaises(KeyError, self.s.remove, self.thetype(self.word)) def test_remove_keyerror_unpacking(self): # bug: www.python.org/sf/1576657 for v1 in ['Q', (1,)]: try: self.s.remove(v1) except KeyError as e: v2 = e.args[0] self.assertEqual(v1, v2) else: self.fail() def test_remove_keyerror_set(self): key = self.thetype([3, 4]) try: self.s.remove(key) except KeyError as e: self.assertTrue(e.args[0] is key, "KeyError should be {0}, not {1}".format(key, e.args[0])) else: self.fail() def test_discard(self): self.s.discard('a') self.assertNotIn('a', self.s) self.s.discard('Q') self.assertRaises(TypeError, self.s.discard, []) s = self.thetype([frozenset(self.word)]) self.assertIn(self.thetype(self.word), s) s.discard(self.thetype(self.word)) self.assertNotIn(self.thetype(self.word), s) s.discard(self.thetype(self.word)) def test_pop(self): for i in range(len(self.s)): elem = self.s.pop() self.assertNotIn(elem, self.s) self.assertRaises(KeyError, self.s.pop) def test_update(self): retval = self.s.update(self.otherword) self.assertEqual(retval, None) for c in (self.word + self.otherword): self.assertIn(c, self.s) self.assertRaises(PassThru, self.s.update, check_pass_thru()) self.assertRaises(TypeError, self.s.update, [[]]) for p, q in (('cdc', 'abcd'), ('efgfe', 'abcefg'), ('ccb', 'abc'), ('ef', 'abcef')): for C in set, frozenset, dict.fromkeys, str, list, tuple: s = self.thetype('abcba') self.assertEqual(s.update(C(p)), None) self.assertEqual(s, set(q)) for p in ('cdc', 'efgfe', 'ccb', 'ef', 'abcda'): q = 'ahi' for C in set, frozenset, dict.fromkeys, str, list, tuple: s = self.thetype('abcba') self.assertEqual(s.update(C(p), C(q)), None) self.assertEqual(s, set(s) | set(p) | set(q)) def test_ior(self): self.s |= set(self.otherword) for c in (self.word + self.otherword): self.assertIn(c, self.s) def test_intersection_update(self): retval = self.s.intersection_update(self.otherword) self.assertEqual(retval, None) for c in (self.word + self.otherword): if c in self.otherword and c in self.word: self.assertIn(c, self.s) else: self.assertNotIn(c, self.s) self.assertRaises(PassThru, self.s.intersection_update, check_pass_thru()) self.assertRaises(TypeError, self.s.intersection_update, [[]]) for p, q in (('cdc', 'c'), ('efgfe', ''), ('ccb', 'bc'), ('ef', '')): for C in set, frozenset, dict.fromkeys, str, list, tuple: s = self.thetype('abcba') self.assertEqual(s.intersection_update(C(p)), None) self.assertEqual(s, set(q)) ss = 'abcba' s = self.thetype(ss) t = 'cbc' self.assertEqual(s.intersection_update(C(p), C(t)), None) self.assertEqual(s, set('abcba')&set(p)&set(t)) def test_iand(self): self.s &= set(self.otherword) for c in (self.word + self.otherword): if c in self.otherword and c in self.word: self.assertIn(c, self.s) else: self.assertNotIn(c, self.s) def test_difference_update(self): retval = self.s.difference_update(self.otherword) self.assertEqual(retval, None) for c in (self.word + self.otherword): if c in self.word and c not in self.otherword: self.assertIn(c, self.s) else: self.assertNotIn(c, self.s) self.assertRaises(PassThru, self.s.difference_update, check_pass_thru()) self.assertRaises(TypeError, self.s.difference_update, [[]]) self.assertRaises(TypeError, self.s.symmetric_difference_update, [[]]) for p, q in (('cdc', 'ab'), ('efgfe', 'abc'), ('ccb', 'a'), ('ef', 'abc')): for C in set, frozenset, dict.fromkeys, str, list, tuple: s = self.thetype('abcba') self.assertEqual(s.difference_update(C(p)), None) self.assertEqual(s, set(q)) s = self.thetype('abcdefghih') s.difference_update() self.assertEqual(s, self.thetype('abcdefghih')) s = self.thetype('abcdefghih') s.difference_update(C('aba')) self.assertEqual(s, self.thetype('cdefghih')) s = self.thetype('abcdefghih') s.difference_update(C('cdc'), C('aba')) self.assertEqual(s, self.thetype('efghih')) def test_isub(self): self.s -= set(self.otherword) for c in (self.word + self.otherword): if c in self.word and c not in self.otherword: self.assertIn(c, self.s) else: self.assertNotIn(c, self.s) def test_symmetric_difference_update(self): retval = self.s.symmetric_difference_update(self.otherword) self.assertEqual(retval, None) for c in (self.word + self.otherword): if (c in self.word) ^ (c in self.otherword): self.assertIn(c, self.s) else: self.assertNotIn(c, self.s) self.assertRaises(PassThru, self.s.symmetric_difference_update, check_pass_thru()) self.assertRaises(TypeError, self.s.symmetric_difference_update, [[]]) for p, q in (('cdc', 'abd'), ('efgfe', 'abcefg'), ('ccb', 'a'), ('ef', 'abcef')): for C in set, frozenset, dict.fromkeys, str, list, tuple: s = self.thetype('abcba') self.assertEqual(s.symmetric_difference_update(C(p)), None) self.assertEqual(s, set(q)) def test_ixor(self): self.s ^= set(self.otherword) for c in (self.word + self.otherword): if (c in self.word) ^ (c in self.otherword): self.assertIn(c, self.s) else: self.assertNotIn(c, self.s) def test_inplace_on_self(self): t = self.s.copy() t |= t self.assertEqual(t, self.s) t &= t self.assertEqual(t, self.s) t -= t self.assertEqual(t, self.thetype()) t = self.s.copy() t ^= t self.assertEqual(t, self.thetype()) def test_weakref(self): s = self.thetype('gallahad') p = weakref.proxy(s) self.assertEqual(str(p), str(s)) s = None self.assertRaises(ReferenceError, str, p) def test_rich_compare(self): class TestRichSetCompare: def __gt__(self, some_set): self.gt_called = True return False def __lt__(self, some_set): self.lt_called = True return False def __ge__(self, some_set): self.ge_called = True return False def __le__(self, some_set): self.le_called = True return False # This first tries the builtin rich set comparison, which doesn't know # how to handle the custom object. Upon returning NotImplemented, the # corresponding comparison on the right object is invoked. myset = {1, 2, 3} myobj = TestRichSetCompare() myset < myobj self.assertTrue(myobj.gt_called) myobj = TestRichSetCompare() myset > myobj self.assertTrue(myobj.lt_called) myobj = TestRichSetCompare() myset <= myobj self.assertTrue(myobj.ge_called) myobj = TestRichSetCompare() myset >= myobj self.assertTrue(myobj.le_called) # C API test only available in a debug build if hasattr(set, "test_c_api"): def test_c_api(self): self.assertEqual(set().test_c_api(), True) class SetSubclass(set): pass class TestSetSubclass(TestSet): thetype = SetSubclass basetype = set class SetSubclassWithKeywordArgs(set): def __init__(self, iterable=[], newarg=None): set.__init__(self, iterable) class TestSetSubclassWithKeywordArgs(TestSet): def test_keywords_in_subclass(self): 'SF bug #1486663 -- this used to erroneously raise a TypeError' SetSubclassWithKeywordArgs(newarg=1) class TestFrozenSet(TestJointOps): thetype = frozenset basetype = frozenset def test_init(self): s = self.thetype(self.word) s.__init__(self.otherword) self.assertEqual(s, set(self.word)) def test_singleton_empty_frozenset(self): f = frozenset() efs = [frozenset(), frozenset([]), frozenset(()), frozenset(''), frozenset(), frozenset([]), frozenset(()), frozenset(''), frozenset(range(0)), frozenset(frozenset()), frozenset(f), f] # All of the empty frozensets should have just one id() self.assertEqual(len(set(map(id, efs))), 1) def test_constructor_identity(self): s = self.thetype(range(3)) t = self.thetype(s) self.assertEqual(id(s), id(t)) def test_hash(self): self.assertEqual(hash(self.thetype('abcdeb')), hash(self.thetype('ebecda'))) # make sure that all permutations give the same hash value n = 100 seq = [randrange(n) for i in range(n)] results = set() for i in range(200): shuffle(seq) results.add(hash(self.thetype(seq))) self.assertEqual(len(results), 1) def test_copy(self): dup = self.s.copy() self.assertEqual(id(self.s), id(dup)) def test_frozen_as_dictkey(self): seq = list(range(10)) + list('abcdefg') + ['apple'] key1 = self.thetype(seq) key2 = self.thetype(reversed(seq)) self.assertEqual(key1, key2) self.assertNotEqual(id(key1), id(key2)) d = {} d[key1] = 42 self.assertEqual(d[key2], 42) def test_hash_caching(self): f = self.thetype('abcdcda') self.assertEqual(hash(f), hash(f)) def test_hash_effectiveness(self): n = 13 hashvalues = set() addhashvalue = hashvalues.add elemmasks = [(i+1, 1<=": "issuperset", } reverse = {"==": "==", "!=": "!=", "<": ">", ">": "<", "<=": ">=", ">=": "<=", } def test_issubset(self): raise NotImplementedError() # eval not supported below x = self.left y = self.right for case in "!=", "==", "<", "<=", ">", ">=": expected = case in self.cases # Test the binary infix spelling. result = None ## eval("x" + case + "y", locals()) self.assertEqual(result, expected) # Test the "friendly" method-name spelling, if one exists. if case in TestSubsets.case2method: method = getattr(x, TestSubsets.case2method[case]) result = method(y) self.assertEqual(result, expected) # Now do the same for the operands reversed. rcase = TestSubsets.reverse[case] result = None ## eval("y" + rcase + "x", locals()) self.assertEqual(result, expected) if rcase in TestSubsets.case2method: method = getattr(y, TestSubsets.case2method[rcase]) result = method(x) self.assertEqual(result, expected) #------------------------------------------------------------------------------ class TestSubsetEqualEmpty(TestSubsets): left = set() # type: Any right = set() # type: Any name = "both empty" cases = "==", "<=", ">=" #------------------------------------------------------------------------------ class TestSubsetEqualNonEmpty(TestSubsets): left = set([1, 2]) right = set([1, 2]) name = "equal pair" cases = "==", "<=", ">=" #------------------------------------------------------------------------------ class TestSubsetEmptyNonEmpty(TestSubsets): left = set() # type: Any right = set([1, 2]) name = "one empty, one non-empty" cases = "!=", "<", "<=" #------------------------------------------------------------------------------ class TestSubsetPartial(TestSubsets): left = set([1]) right = set([1, 2]) name = "one a non-empty proper subset of other" cases = "!=", "<", "<=" #------------------------------------------------------------------------------ class TestSubsetNonOverlap(TestSubsets): left = set([1]) right = set([2]) name = "neither empty, neither contains" cases = "!=" #============================================================================== class TestOnlySetsInBinaryOps(unittest.TestCase): def test_eq_ne(self): # Unlike the others, this is testing that == and != *are* allowed. self.assertEqual(self.other == self.set, False) self.assertEqual(self.set == self.other, False) self.assertEqual(self.other != self.set, True) self.assertEqual(self.set != self.other, True) def test_ge_gt_le_lt(self): self.assertRaises(TypeError, lambda: self.set < self.other) self.assertRaises(TypeError, lambda: self.set <= self.other) self.assertRaises(TypeError, lambda: self.set > self.other) self.assertRaises(TypeError, lambda: self.set >= self.other) self.assertRaises(TypeError, lambda: self.other < self.set) self.assertRaises(TypeError, lambda: self.other <= self.set) self.assertRaises(TypeError, lambda: self.other > self.set) self.assertRaises(TypeError, lambda: self.other >= self.set) def test_update_operator(self): try: self.set |= self.other except TypeError: pass else: self.fail("expected TypeError") def test_update(self): if self.otherIsIterable: self.set.update(self.other) else: self.assertRaises(TypeError, self.set.update, self.other) def test_union(self): self.assertRaises(TypeError, lambda: self.set | self.other) self.assertRaises(TypeError, lambda: self.other | self.set) if self.otherIsIterable: self.set.union(self.other) else: self.assertRaises(TypeError, self.set.union, self.other) def test_intersection_update_operator(self): try: self.set &= self.other except TypeError: pass else: self.fail("expected TypeError") def test_intersection_update(self): if self.otherIsIterable: self.set.intersection_update(self.other) else: self.assertRaises(TypeError, self.set.intersection_update, self.other) def test_intersection(self): self.assertRaises(TypeError, lambda: self.set & self.other) self.assertRaises(TypeError, lambda: self.other & self.set) if self.otherIsIterable: self.set.intersection(self.other) else: self.assertRaises(TypeError, self.set.intersection, self.other) def test_sym_difference_update_operator(self): try: self.set ^= self.other except TypeError: pass else: self.fail("expected TypeError") def test_sym_difference_update(self): if self.otherIsIterable: self.set.symmetric_difference_update(self.other) else: self.assertRaises(TypeError, self.set.symmetric_difference_update, self.other) def test_sym_difference(self): self.assertRaises(TypeError, lambda: self.set ^ self.other) self.assertRaises(TypeError, lambda: self.other ^ self.set) if self.otherIsIterable: self.set.symmetric_difference(self.other) else: self.assertRaises(TypeError, self.set.symmetric_difference, self.other) def test_difference_update_operator(self): try: self.set -= self.other except TypeError: pass else: self.fail("expected TypeError") def test_difference_update(self): if self.otherIsIterable: self.set.difference_update(self.other) else: self.assertRaises(TypeError, self.set.difference_update, self.other) def test_difference(self): self.assertRaises(TypeError, lambda: self.set - self.other) self.assertRaises(TypeError, lambda: self.other - self.set) if self.otherIsIterable: self.set.difference(self.other) else: self.assertRaises(TypeError, self.set.difference, self.other) #------------------------------------------------------------------------------ class TestOnlySetsNumeric(TestOnlySetsInBinaryOps): def setUp(self): self.set = set((1, 2, 3)) self.other = 19 self.otherIsIterable = False #------------------------------------------------------------------------------ class TestOnlySetsDict(TestOnlySetsInBinaryOps): def setUp(self): self.set = set((1, 2, 3)) self.other = {1:2, 3:4} self.otherIsIterable = True #------------------------------------------------------------------------------ class TestOnlySetsOperator(TestOnlySetsInBinaryOps): def setUp(self): self.set = set((1, 2, 3)) self.other = operator.add self.otherIsIterable = False #------------------------------------------------------------------------------ class TestOnlySetsTuple(TestOnlySetsInBinaryOps): def setUp(self): self.set = set((1, 2, 3)) self.other = (2, 4, 6) self.otherIsIterable = True #------------------------------------------------------------------------------ class TestOnlySetsString(TestOnlySetsInBinaryOps): def setUp(self): self.set = set((1, 2, 3)) self.other = 'abc' self.otherIsIterable = True #------------------------------------------------------------------------------ class TestOnlySetsGenerator(TestOnlySetsInBinaryOps): def setUp(self): def gen(): for i in range(0, 10, 2): yield i self.set = set((1, 2, 3)) self.other = gen() self.otherIsIterable = True #============================================================================== class TestCopying(unittest.TestCase): def test_copy(self): dup = self.set.copy() dup_list = sorted(dup, key=repr) set_list = sorted(self.set, key=repr) self.assertEqual(len(dup_list), len(set_list)) for i in range(len(dup_list)): self.assertTrue(dup_list[i] is set_list[i]) def test_deep_copy(self): dup = copy.deepcopy(self.set) ##print type(dup), repr(dup) dup_list = sorted(dup, key=repr) set_list = sorted(self.set, key=repr) self.assertEqual(len(dup_list), len(set_list)) for i in range(len(dup_list)): self.assertEqual(dup_list[i], set_list[i]) #------------------------------------------------------------------------------ class TestCopyingEmpty(TestCopying): def setUp(self): self.set = set() #------------------------------------------------------------------------------ class TestCopyingSingleton(TestCopying): def setUp(self): self.set = set(["hello"]) #------------------------------------------------------------------------------ class TestCopyingTriple(TestCopying): def setUp(self): self.set = set(["zero", 0, None]) #------------------------------------------------------------------------------ class TestCopyingTuple(TestCopying): def setUp(self): self.set = set([(1, 2)]) #------------------------------------------------------------------------------ class TestCopyingNested(TestCopying): def setUp(self): self.set = set([((1, 2), (3, 4))]) #============================================================================== class TestIdentities(unittest.TestCase): def setUp(self): self.a = set('abracadabra') self.b = set('alacazam') def test_binopsVsSubsets(self): a, b = self.a, self.b self.assertTrue(a - b < a) self.assertTrue(b - a < b) self.assertTrue(a & b < a) self.assertTrue(a & b < b) self.assertTrue(a | b > a) self.assertTrue(a | b > b) self.assertTrue(a ^ b < a | b) def test_commutativity(self): a, b = self.a, self.b self.assertEqual(a&b, b&a) self.assertEqual(a|b, b|a) self.assertEqual(a^b, b^a) if a != b: self.assertNotEqual(a-b, b-a) def test_summations(self): # check that sums of parts equal the whole a, b = self.a, self.b self.assertEqual((a-b)|(a&b)|(b-a), a|b) self.assertEqual((a&b)|(a^b), a|b) self.assertEqual(a|(b-a), a|b) self.assertEqual((a-b)|b, a|b) self.assertEqual((a-b)|(a&b), a) self.assertEqual((b-a)|(a&b), b) self.assertEqual((a-b)|(b-a), a^b) def test_exclusion(self): # check that inverse operations show non-overlap a, b, zero = self.a, self.b, set() self.assertEqual((a-b)&b, zero) self.assertEqual((b-a)&a, zero) self.assertEqual((a&b)&(a^b), zero) # Tests derived from test_itertools.py ======================================= def R(seqn): 'Regular generator' for i in seqn: yield i class G: 'Sequence using __getitem__' def __init__(self, seqn): self.seqn = seqn def __getitem__(self, i): return self.seqn[i] class I: 'Sequence using iterator protocol' def __init__(self, seqn): self.seqn = seqn self.i = 0 def __iter__(self): return self def __next__(self): if self.i >= len(self.seqn): raise StopIteration v = self.seqn[self.i] self.i += 1 return v class Ig: 'Sequence using iterator protocol defined with a generator' def __init__(self, seqn): self.seqn = seqn self.i = 0 def __iter__(self): for val in self.seqn: yield val class X: 'Missing __getitem__ and __iter__' def __init__(self, seqn): self.seqn = seqn self.i = 0 def __next__(self): if self.i >= len(self.seqn): raise StopIteration v = self.seqn[self.i] self.i += 1 return v class N: 'Iterator missing __next__()' def __init__(self, seqn): self.seqn = seqn self.i = 0 def __iter__(self): return self class E: 'Test propagation of exceptions' def __init__(self, seqn): self.seqn = seqn self.i = 0 def __iter__(self): return self def __next__(self): 3 // 0 class S: 'Test immediate stop' def __init__(self, seqn): pass def __iter__(self): return self def __next__(self): raise StopIteration from itertools import chain def L(seqn): 'Test multiple tiers of iterators' return chain(map(lambda x:x, R(Ig(G(seqn))))) class TestVariousIteratorArgs(unittest.TestCase): def test_constructor(self): for cons in (set, frozenset): for s in ("123", "", range(1000), ('do', 1.2), range(2000,2200,5)): for g in (G, I, Ig, S, L, R): self.assertEqual(sorted(cons(g(s)), key=repr), sorted(g(s), key=repr)) self.assertRaises(TypeError, cons , X(s)) self.assertRaises(TypeError, cons , N(s)) self.assertRaises(ZeroDivisionError, cons , E(s)) def test_inline_methods(self): s = set('november') for data in ("123", "", range(1000), ('do', 1.2), range(2000,2200,5), 'december'): for meth in (s.union, s.intersection, s.difference, s.symmetric_difference, s.isdisjoint): for g in (G, I, Ig, L, R): expected = meth(data) actual = meth(G(data)) if isinstance(expected, bool): self.assertEqual(actual, expected) else: self.assertEqual(sorted(actual, key=repr), sorted(expected, key=repr)) self.assertRaises(TypeError, meth, X(s)) self.assertRaises(TypeError, meth, N(s)) self.assertRaises(ZeroDivisionError, meth, E(s)) def test_inplace_methods(self): for data in ("123", "", range(1000), ('do', 1.2), range(2000,2200,5), 'december'): for methname in ('update', 'intersection_update', 'difference_update', 'symmetric_difference_update'): for g in (G, I, Ig, S, L, R): s = set('january') t = s.copy() getattr(s, methname)(list(g(data))) getattr(t, methname)(g(data)) self.assertEqual(sorted(s, key=repr), sorted(t, key=repr)) self.assertRaises(TypeError, getattr(set('january'), methname), X(data)) self.assertRaises(TypeError, getattr(set('january'), methname), N(data)) self.assertRaises(ZeroDivisionError, getattr(set('january'), methname), E(data)) be_bad = set2 = dict2 = None # type: Any class bad_eq: def __eq__(self, other): if be_bad: set2.clear() raise ZeroDivisionError return self is other def __hash__(self): return 0 class bad_dict_clear: def __eq__(self, other): if be_bad: dict2.clear() return self is other def __hash__(self): return 0 class TestWeirdBugs(unittest.TestCase): def test_8420_set_merge(self): # This used to segfault global be_bad, set2, dict2 be_bad = False set1 = {bad_eq()} set2 = {bad_eq() for i in range(75)} be_bad = True self.assertRaises(ZeroDivisionError, set1.update, set2) be_bad = False set1 = {bad_dict_clear()} dict2 = {bad_dict_clear(): None} be_bad = True set1.symmetric_difference_update(dict2) # Application tests (based on David Eppstein's graph recipes ==================================== def powerset(U): """Generates all subsets of a set or sequence U.""" U = iter(U) try: x = frozenset([next(U)]) for S in powerset(U): yield S yield S | x except StopIteration: yield frozenset() def cube(n): """Graph of n-dimensional hypercube.""" singletons = [frozenset([x]) for x in range(n)] return dict([(x, frozenset([x^s for s in singletons])) for x in powerset(range(n))]) def linegraph(G): """Graph, the vertices of which are edges of G, with two vertices being adjacent iff the corresponding edges share a vertex.""" L = {} for x in G: for y in G[x]: nx = [frozenset([x,z]) for z in G[x] if z != y] ny = [frozenset([y,z]) for z in G[y] if z != x] L[frozenset([x,y])] = frozenset(nx+ny) return L def faces(G): 'Return a set of faces in G. Where a face is a set of vertices on that face' # currently limited to triangles,squares, and pentagons f = set() for v1, edges in G.items(): for v2 in edges: for v3 in G[v2]: if v1 == v3: continue if v1 in G[v3]: f.add(frozenset([v1, v2, v3])) else: for v4 in G[v3]: if v4 == v2: continue if v1 in G[v4]: f.add(frozenset([v1, v2, v3, v4])) else: for v5 in G[v4]: if v5 == v3 or v5 == v2: continue if v1 in G[v5]: f.add(frozenset([v1, v2, v3, v4, v5])) return f class TestGraphs(unittest.TestCase): def test_cube(self): g = cube(3) # vert --> {v1, v2, v3} vertices1 = set(g) self.assertEqual(len(vertices1), 8) # eight vertices for edge in g.values(): self.assertEqual(len(edge), 3) # each vertex connects to three edges vertices2 = set() for edges in g.values(): for v in edges: vertices2.add(v) self.assertEqual(vertices1, vertices2) # edge vertices in original set cubefaces = faces(g) self.assertEqual(len(cubefaces), 6) # six faces for face in cubefaces: self.assertEqual(len(face), 4) # each face is a square def test_cuboctahedron(self): # http://en.wikipedia.org/wiki/Cuboctahedron # 8 triangular faces and 6 square faces # 12 indentical vertices each connecting a triangle and square g = cube(3) cuboctahedron = linegraph(g) # V( --> {V1, V2, V3, V4} self.assertEqual(len(cuboctahedron), 12)# twelve vertices vertices = set(cuboctahedron) for edges in cuboctahedron.values(): self.assertEqual(len(edges), 4) # each vertex connects to four other vertices othervertices = set(edge for edges in cuboctahedron.values() for edge in edges) self.assertEqual(vertices, othervertices) # edge vertices in original set cubofaces = faces(cuboctahedron) facesizes = collections.defaultdict(int) for face in cubofaces: facesizes[len(face)] += 1 self.assertEqual(facesizes[3], 8) # eight triangular faces self.assertEqual(facesizes[4], 6) # six square faces for vertex in cuboctahedron: edge = vertex # Cuboctahedron vertices are edges in Cube self.assertEqual(len(edge), 2) # Two cube vertices define an edge for cubevert in edge: self.assertIn(cubevert, g) #============================================================================== def test_main(verbose=None): test_classes = ( TestSet, TestSetSubclass, TestSetSubclassWithKeywordArgs, TestFrozenSet, TestFrozenSetSubclass, TestSetOfSets, TestExceptionPropagation, TestBasicOpsEmpty, TestBasicOpsSingleton, TestBasicOpsTuple, TestBasicOpsTriple, TestBasicOpsString, TestBasicOpsBytes, TestBasicOpsMixedStringBytes, TestBinaryOps, TestUpdateOps, TestMutate, TestSubsetEqualEmpty, TestSubsetEqualNonEmpty, TestSubsetEmptyNonEmpty, TestSubsetPartial, TestSubsetNonOverlap, TestOnlySetsNumeric, TestOnlySetsDict, TestOnlySetsOperator, TestOnlySetsTuple, TestOnlySetsString, TestOnlySetsGenerator, TestCopyingEmpty, TestCopyingSingleton, TestCopyingTriple, TestCopyingTuple, TestCopyingNested, TestIdentities, TestVariousIteratorArgs, TestGraphs, TestWeirdBugs, ) support.run_unittest(*test_classes) # verify reference counting if verbose and hasattr(sys, "gettotalrefcount"): import gc counts = [None] * 5 for i in range(len(counts)): support.run_unittest(*test_classes) gc.collect() counts[i] = sys.gettotalrefcount() print(counts) if __name__ == "__main__": test_main(verbose=True) mypy-0.560/test-data/stdlib-samples/3.2/test/test_shutil.py0000644€tŠÔÚ€2›s®0000010741513215007205027712 0ustar jukkaDROPBOX\Domain Users00000000000000# Copyright (C) 2003 Python Software Foundation import unittest import shutil import tempfile import sys import stat import os import os.path import functools from test import support from test.support import TESTFN from os.path import splitdrive from distutils.spawn import find_executable, spawn from shutil import (_make_tarball, _make_zipfile, make_archive, register_archive_format, unregister_archive_format, get_archive_formats, Error, unpack_archive, register_unpack_format, RegistryError, unregister_unpack_format, get_unpack_formats) import tarfile import warnings from test import support from test.support import check_warnings, captured_stdout from typing import ( Any, Callable, Tuple, List, Sequence, BinaryIO, IO, Union, cast ) from types import TracebackType import bz2 BZ2_SUPPORTED = True TESTFN2 = TESTFN + "2" import grp import pwd UID_GID_SUPPORT = True import zlib import zipfile ZIP_SUPPORT = True def _fake_rename(*args: Any, **kwargs: Any) -> None: # Pretend the destination path is on a different filesystem. raise OSError() def mock_rename(func: Any) -> Any: @functools.wraps(func) def wrap(*args: Any, **kwargs: Any) -> Any: try: builtin_rename = shutil.rename shutil.rename = cast(Any, _fake_rename) return func(*args, **kwargs) finally: shutil.rename = cast(Any, builtin_rename) return wrap class TestShutil(unittest.TestCase): def setUp(self) -> None: super().setUp() self.tempdirs = [] # type: List[str] def tearDown(self) -> None: super().tearDown() while self.tempdirs: d = self.tempdirs.pop() shutil.rmtree(d, os.name in ('nt', 'cygwin')) def write_file(self, path: Union[str, List[str], tuple], content: str = 'xxx') -> None: """Writes a file in the given path. path can be a string or a sequence. """ if isinstance(path, list): path = os.path.join(*path) elif isinstance(path, tuple): path = cast(str, os.path.join(*path)) f = open(path, 'w') try: f.write(content) finally: f.close() def mkdtemp(self) -> str: """Create a temporary directory that will be cleaned up. Returns the path of the directory. """ d = tempfile.mkdtemp() self.tempdirs.append(d) return d def test_rmtree_errors(self) -> None: # filename is guaranteed not to exist filename = tempfile.mktemp() self.assertRaises(OSError, shutil.rmtree, filename) # See bug #1071513 for why we don't run this on cygwin # and bug #1076467 for why we don't run this as root. if (hasattr(os, 'chmod') and sys.platform[:6] != 'cygwin' and not (hasattr(os, 'geteuid') and os.geteuid() == 0)): def test_on_error(self) -> None: self.errorState = 0 os.mkdir(TESTFN) self.childpath = os.path.join(TESTFN, 'a') f = open(self.childpath, 'w') f.close() old_dir_mode = os.stat(TESTFN).st_mode old_child_mode = os.stat(self.childpath).st_mode # Make unwritable. os.chmod(self.childpath, stat.S_IREAD) os.chmod(TESTFN, stat.S_IREAD) shutil.rmtree(TESTFN, onerror=self.check_args_to_onerror) # Test whether onerror has actually been called. self.assertEqual(self.errorState, 2, "Expected call to onerror function did not happen.") # Make writable again. os.chmod(TESTFN, old_dir_mode) os.chmod(self.childpath, old_child_mode) # Clean up. shutil.rmtree(TESTFN) def check_args_to_onerror(self, func: Callable[[str], Any], arg: str, exc: Tuple[type, BaseException, TracebackType]) -> None: # test_rmtree_errors deliberately runs rmtree # on a directory that is chmod 400, which will fail. # This function is run when shutil.rmtree fails. # 99.9% of the time it initially fails to remove # a file in the directory, so the first time through # func is os.remove. # However, some Linux machines running ZFS on # FUSE experienced a failure earlier in the process # at os.listdir. The first failure may legally # be either. if self.errorState == 0: if func is os.remove: self.assertEqual(arg, self.childpath) else: self.assertIs(func, os.listdir, "func must be either os.remove or os.listdir") self.assertEqual(arg, TESTFN) self.assertTrue(issubclass(exc[0], OSError)) self.errorState = 1 else: self.assertEqual(func, os.rmdir) self.assertEqual(arg, TESTFN) self.assertTrue(issubclass(exc[0], OSError)) self.errorState = 2 def test_rmtree_dont_delete_file(self) -> None: # When called on a file instead of a directory, don't delete it. handle, path = tempfile.mkstemp() os.fdopen(handle).close() self.assertRaises(OSError, shutil.rmtree, path) os.remove(path) def _write_data(self, path: str, data: str) -> None: f = open(path, "w") f.write(data) f.close() def test_copytree_simple(self) -> None: def read_data(path: str) -> str: f = open(path) data = f.read() f.close() return data src_dir = tempfile.mkdtemp() dst_dir = os.path.join(tempfile.mkdtemp(), 'destination') self._write_data(os.path.join(src_dir, 'test.txt'), '123') os.mkdir(os.path.join(src_dir, 'test_dir')) self._write_data(os.path.join(src_dir, 'test_dir', 'test.txt'), '456') try: shutil.copytree(src_dir, dst_dir) self.assertTrue(os.path.isfile(os.path.join(dst_dir, 'test.txt'))) self.assertTrue(os.path.isdir(os.path.join(dst_dir, 'test_dir'))) self.assertTrue(os.path.isfile(os.path.join(dst_dir, 'test_dir', 'test.txt'))) actual = read_data(os.path.join(dst_dir, 'test.txt')) self.assertEqual(actual, '123') actual = read_data(os.path.join(dst_dir, 'test_dir', 'test.txt')) self.assertEqual(actual, '456') finally: for path in ( os.path.join(src_dir, 'test.txt'), os.path.join(dst_dir, 'test.txt'), os.path.join(src_dir, 'test_dir', 'test.txt'), os.path.join(dst_dir, 'test_dir', 'test.txt'), ): if os.path.exists(path): os.remove(path) for path in (src_dir, os.path.dirname(dst_dir) ): if os.path.exists(path): shutil.rmtree(path) def test_copytree_with_exclude(self) -> None: def read_data(path: str) -> str: f = open(path) data = f.read() f.close() return data # creating data join = os.path.join exists = os.path.exists src_dir = tempfile.mkdtemp() try: dst_dir = join(tempfile.mkdtemp(), 'destination') self._write_data(join(src_dir, 'test.txt'), '123') self._write_data(join(src_dir, 'test.tmp'), '123') os.mkdir(join(src_dir, 'test_dir')) self._write_data(join(src_dir, 'test_dir', 'test.txt'), '456') os.mkdir(join(src_dir, 'test_dir2')) self._write_data(join(src_dir, 'test_dir2', 'test.txt'), '456') os.mkdir(join(src_dir, 'test_dir2', 'subdir')) os.mkdir(join(src_dir, 'test_dir2', 'subdir2')) self._write_data(join(src_dir, 'test_dir2', 'subdir', 'test.txt'), '456') self._write_data(join(src_dir, 'test_dir2', 'subdir2', 'test.py'), '456') # testing glob-like patterns try: patterns = shutil.ignore_patterns('*.tmp', 'test_dir2') shutil.copytree(src_dir, dst_dir, ignore=patterns) # checking the result: some elements should not be copied self.assertTrue(exists(join(dst_dir, 'test.txt'))) self.assertTrue(not exists(join(dst_dir, 'test.tmp'))) self.assertTrue(not exists(join(dst_dir, 'test_dir2'))) finally: if os.path.exists(dst_dir): shutil.rmtree(dst_dir) try: patterns = shutil.ignore_patterns('*.tmp', 'subdir*') shutil.copytree(src_dir, dst_dir, ignore=patterns) # checking the result: some elements should not be copied self.assertTrue(not exists(join(dst_dir, 'test.tmp'))) self.assertTrue(not exists(join(dst_dir, 'test_dir2', 'subdir2'))) self.assertTrue(not exists(join(dst_dir, 'test_dir2', 'subdir'))) finally: if os.path.exists(dst_dir): shutil.rmtree(dst_dir) # testing callable-style try: def _filter(src: str, names: Sequence[str]) -> List[str]: res = [] # type: List[str] for name in names: path = os.path.join(src, name) if (os.path.isdir(path) and path.split()[-1] == 'subdir'): res.append(name) elif os.path.splitext(path)[-1] in ('.py'): res.append(name) return res shutil.copytree(src_dir, dst_dir, ignore=_filter) # checking the result: some elements should not be copied self.assertTrue(not exists(join(dst_dir, 'test_dir2', 'subdir2', 'test.py'))) self.assertTrue(not exists(join(dst_dir, 'test_dir2', 'subdir'))) finally: if os.path.exists(dst_dir): shutil.rmtree(dst_dir) finally: shutil.rmtree(src_dir) shutil.rmtree(os.path.dirname(dst_dir)) @unittest.skipUnless(hasattr(os, 'link'), 'requires os.link') def test_dont_copy_file_onto_link_to_itself(self) -> None: # Temporarily disable test on Windows. if os.name == 'nt': return # bug 851123. os.mkdir(TESTFN) src = os.path.join(TESTFN, 'cheese') dst = os.path.join(TESTFN, 'shop') try: with open(src, 'w') as f: f.write('cheddar') os.link(src, dst) self.assertRaises(shutil.Error, shutil.copyfile, src, dst) with open(src, 'r') as f: self.assertEqual(f.read(), 'cheddar') os.remove(dst) finally: shutil.rmtree(TESTFN, ignore_errors=True) @support.skip_unless_symlink def test_dont_copy_file_onto_symlink_to_itself(self) -> None: # bug 851123. os.mkdir(TESTFN) src = os.path.join(TESTFN, 'cheese') dst = os.path.join(TESTFN, 'shop') try: with open(src, 'w') as f: f.write('cheddar') # Using `src` here would mean we end up with a symlink pointing # to TESTFN/TESTFN/cheese, while it should point at # TESTFN/cheese. os.symlink('cheese', dst) self.assertRaises(shutil.Error, shutil.copyfile, src, dst) with open(src, 'r') as f: self.assertEqual(f.read(), 'cheddar') os.remove(dst) finally: shutil.rmtree(TESTFN, ignore_errors=True) @support.skip_unless_symlink def test_rmtree_on_symlink(self) -> None: # bug 1669. os.mkdir(TESTFN) try: src = os.path.join(TESTFN, 'cheese') dst = os.path.join(TESTFN, 'shop') os.mkdir(src) os.symlink(src, dst) self.assertRaises(OSError, shutil.rmtree, dst) finally: shutil.rmtree(TESTFN, ignore_errors=True) if hasattr(os, "mkfifo"): # Issue #3002: copyfile and copytree block indefinitely on named pipes def test_copyfile_named_pipe(self) -> None: os.mkfifo(TESTFN) try: self.assertRaises(shutil.SpecialFileError, shutil.copyfile, TESTFN, TESTFN2) self.assertRaises(shutil.SpecialFileError, shutil.copyfile, __file__, TESTFN) finally: os.remove(TESTFN) @support.skip_unless_symlink def test_copytree_named_pipe(self) -> None: os.mkdir(TESTFN) try: subdir = os.path.join(TESTFN, "subdir") os.mkdir(subdir) pipe = os.path.join(subdir, "mypipe") os.mkfifo(pipe) try: shutil.copytree(TESTFN, TESTFN2) except shutil.Error as e: errors = e.args[0] self.assertEqual(len(errors), 1) src, dst, error_msg = errors[0] self.assertEqual("`%s` is a named pipe" % pipe, error_msg) else: self.fail("shutil.Error should have been raised") finally: shutil.rmtree(TESTFN, ignore_errors=True) shutil.rmtree(TESTFN2, ignore_errors=True) def test_copytree_special_func(self) -> None: src_dir = self.mkdtemp() dst_dir = os.path.join(self.mkdtemp(), 'destination') self._write_data(os.path.join(src_dir, 'test.txt'), '123') os.mkdir(os.path.join(src_dir, 'test_dir')) self._write_data(os.path.join(src_dir, 'test_dir', 'test.txt'), '456') copied = [] # type: List[Tuple[str, str]] def _copy(src: str, dst: str) -> None: copied.append((src, dst)) shutil.copytree(src_dir, dst_dir, copy_function=_copy) self.assertEqual(len(copied), 2) @support.skip_unless_symlink def test_copytree_dangling_symlinks(self) -> None: # a dangling symlink raises an error at the end src_dir = self.mkdtemp() dst_dir = os.path.join(self.mkdtemp(), 'destination') os.symlink('IDONTEXIST', os.path.join(src_dir, 'test.txt')) os.mkdir(os.path.join(src_dir, 'test_dir')) self._write_data(os.path.join(src_dir, 'test_dir', 'test.txt'), '456') self.assertRaises(Error, shutil.copytree, src_dir, dst_dir) # a dangling symlink is ignored with the proper flag dst_dir = os.path.join(self.mkdtemp(), 'destination2') shutil.copytree(src_dir, dst_dir, ignore_dangling_symlinks=True) self.assertNotIn('test.txt', os.listdir(dst_dir)) # a dangling symlink is copied if symlinks=True dst_dir = os.path.join(self.mkdtemp(), 'destination3') shutil.copytree(src_dir, dst_dir, symlinks=True) self.assertIn('test.txt', os.listdir(dst_dir)) def _copy_file(self, method: Callable[[str, str], None]) -> Tuple[str, str]: fname = 'test.txt' tmpdir = self.mkdtemp() self.write_file([tmpdir, fname]) file1 = os.path.join(tmpdir, fname) tmpdir2 = self.mkdtemp() method(file1, tmpdir2) file2 = os.path.join(tmpdir2, fname) return (file1, file2) @unittest.skipUnless(hasattr(os, 'chmod'), 'requires os.chmod') def test_copy(self) -> None: # Ensure that the copied file exists and has the same mode bits. file1, file2 = self._copy_file(shutil.copy) self.assertTrue(os.path.exists(file2)) self.assertEqual(os.stat(file1).st_mode, os.stat(file2).st_mode) @unittest.skipUnless(hasattr(os, 'chmod'), 'requires os.chmod') @unittest.skipUnless(hasattr(os, 'utime'), 'requires os.utime') def test_copy2(self) -> None: # Ensure that the copied file exists and has the same mode and # modification time bits. file1, file2 = self._copy_file(shutil.copy2) self.assertTrue(os.path.exists(file2)) file1_stat = os.stat(file1) file2_stat = os.stat(file2) self.assertEqual(file1_stat.st_mode, file2_stat.st_mode) for attr in 'st_atime', 'st_mtime': # The modification times may be truncated in the new file. self.assertLessEqual(getattr(file1_stat, attr), getattr(file2_stat, attr) + 1) if hasattr(os, 'chflags') and hasattr(file1_stat, 'st_flags'): self.assertEqual(getattr(file1_stat, 'st_flags'), getattr(file2_stat, 'st_flags')) @unittest.skipUnless(zlib, "requires zlib") def test_make_tarball(self) -> None: # creating something to tar tmpdir = self.mkdtemp() self.write_file([tmpdir, 'file1'], 'xxx') self.write_file([tmpdir, 'file2'], 'xxx') os.mkdir(os.path.join(tmpdir, 'sub')) self.write_file([tmpdir, 'sub', 'file3'], 'xxx') tmpdir2 = self.mkdtemp() # force shutil to create the directory os.rmdir(tmpdir2) unittest.skipUnless(splitdrive(tmpdir)[0] == splitdrive(tmpdir2)[0], "source and target should be on same drive") base_name = os.path.join(tmpdir2, 'archive') # working with relative paths to avoid tar warnings old_dir = os.getcwd() os.chdir(tmpdir) try: _make_tarball(splitdrive(base_name)[1], '.') finally: os.chdir(old_dir) # check if the compressed tarball was created tarball = base_name + '.tar.gz' self.assertTrue(os.path.exists(tarball)) # trying an uncompressed one base_name = os.path.join(tmpdir2, 'archive') old_dir = os.getcwd() os.chdir(tmpdir) try: _make_tarball(splitdrive(base_name)[1], '.', compress=None) finally: os.chdir(old_dir) tarball = base_name + '.tar' self.assertTrue(os.path.exists(tarball)) def _tarinfo(self, path: str) -> tuple: tar = tarfile.open(path) try: names = tar.getnames() names.sort() return tuple(names) finally: tar.close() def _create_files(self) -> Tuple[str, str, str]: # creating something to tar tmpdir = self.mkdtemp() dist = os.path.join(tmpdir, 'dist') os.mkdir(dist) self.write_file([dist, 'file1'], 'xxx') self.write_file([dist, 'file2'], 'xxx') os.mkdir(os.path.join(dist, 'sub')) self.write_file([dist, 'sub', 'file3'], 'xxx') os.mkdir(os.path.join(dist, 'sub2')) tmpdir2 = self.mkdtemp() base_name = os.path.join(tmpdir2, 'archive') return tmpdir, tmpdir2, base_name @unittest.skipUnless(zlib, "Requires zlib") @unittest.skipUnless(find_executable('tar') and find_executable('gzip'), 'Need the tar command to run') def test_tarfile_vs_tar(self) -> None: tmpdir, tmpdir2, base_name = self._create_files() old_dir = os.getcwd() os.chdir(tmpdir) try: _make_tarball(base_name, 'dist') finally: os.chdir(old_dir) # check if the compressed tarball was created tarball = base_name + '.tar.gz' self.assertTrue(os.path.exists(tarball)) # now create another tarball using `tar` tarball2 = os.path.join(tmpdir, 'archive2.tar.gz') tar_cmd = ['tar', '-cf', 'archive2.tar', 'dist'] gzip_cmd = ['gzip', '-f9', 'archive2.tar'] old_dir = os.getcwd() os.chdir(tmpdir) try: with captured_stdout() as s: spawn(tar_cmd) spawn(gzip_cmd) finally: os.chdir(old_dir) self.assertTrue(os.path.exists(tarball2)) # let's compare both tarballs self.assertEqual(self._tarinfo(tarball), self._tarinfo(tarball2)) # trying an uncompressed one base_name = os.path.join(tmpdir2, 'archive') old_dir = os.getcwd() os.chdir(tmpdir) try: _make_tarball(base_name, 'dist', compress=None) finally: os.chdir(old_dir) tarball = base_name + '.tar' self.assertTrue(os.path.exists(tarball)) # now for a dry_run base_name = os.path.join(tmpdir2, 'archive') old_dir = os.getcwd() os.chdir(tmpdir) try: _make_tarball(base_name, 'dist', compress=None, dry_run=True) finally: os.chdir(old_dir) tarball = base_name + '.tar' self.assertTrue(os.path.exists(tarball)) @unittest.skipUnless(zlib, "Requires zlib") @unittest.skipUnless(ZIP_SUPPORT, 'Need zip support to run') def test_make_zipfile(self) -> None: # creating something to tar tmpdir = self.mkdtemp() self.write_file([tmpdir, 'file1'], 'xxx') self.write_file([tmpdir, 'file2'], 'xxx') tmpdir2 = self.mkdtemp() # force shutil to create the directory os.rmdir(tmpdir2) base_name = os.path.join(tmpdir2, 'archive') _make_zipfile(base_name, tmpdir) # check if the compressed tarball was created tarball = base_name + '.zip' self.assertTrue(os.path.exists(tarball)) def test_make_archive(self) -> None: tmpdir = self.mkdtemp() base_name = os.path.join(tmpdir, 'archive') self.assertRaises(ValueError, make_archive, base_name, 'xxx') @unittest.skipUnless(zlib, "Requires zlib") def test_make_archive_owner_group(self) -> None: # testing make_archive with owner and group, with various combinations # this works even if there's not gid/uid support if UID_GID_SUPPORT: group = grp.getgrgid(0).gr_name owner = pwd.getpwuid(0).pw_name else: group = owner = 'root' base_dir, root_dir, base_name = self._create_files() base_name = os.path.join(self.mkdtemp() , 'archive') res = make_archive(base_name, 'zip', root_dir, base_dir, owner=owner, group=group) self.assertTrue(os.path.exists(res)) res = make_archive(base_name, 'zip', root_dir, base_dir) self.assertTrue(os.path.exists(res)) res = make_archive(base_name, 'tar', root_dir, base_dir, owner=owner, group=group) self.assertTrue(os.path.exists(res)) res = make_archive(base_name, 'tar', root_dir, base_dir, owner='kjhkjhkjg', group='oihohoh') self.assertTrue(os.path.exists(res)) @unittest.skipUnless(zlib, "Requires zlib") @unittest.skipUnless(UID_GID_SUPPORT, "Requires grp and pwd support") def test_tarfile_root_owner(self) -> None: tmpdir, tmpdir2, base_name = self._create_files() old_dir = os.getcwd() os.chdir(tmpdir) group = grp.getgrgid(0).gr_name owner = pwd.getpwuid(0).pw_name try: archive_name = _make_tarball(base_name, 'dist', compress=None, owner=owner, group=group) finally: os.chdir(old_dir) # check if the compressed tarball was created self.assertTrue(os.path.exists(archive_name)) # now checks the rights archive = tarfile.open(archive_name) try: for member in archive.getmembers(): self.assertEqual(member.uid, 0) self.assertEqual(member.gid, 0) finally: archive.close() def test_make_archive_cwd(self) -> None: current_dir = os.getcwd() def _breaks(*args: Any, **kw: Any) -> None: raise RuntimeError() register_archive_format('xxx', _breaks, [], 'xxx file') try: try: make_archive('xxx', 'xxx', root_dir=self.mkdtemp()) except Exception: pass self.assertEqual(os.getcwd(), current_dir) finally: unregister_archive_format('xxx') def test_register_archive_format(self) -> None: self.assertRaises(TypeError, register_archive_format, 'xxx', 1) self.assertRaises(TypeError, register_archive_format, 'xxx', lambda: 1/0, 1) self.assertRaises(TypeError, register_archive_format, 'xxx', lambda: 1/0, [(1, 2), (1, 2, 3)]) register_archive_format('xxx', lambda: 1/0, [('x', 2)], 'xxx file') formats = [name for name, params in get_archive_formats()] self.assertIn('xxx', formats) unregister_archive_format('xxx') formats = [name for name, params in get_archive_formats()] self.assertNotIn('xxx', formats) def _compare_dirs(self, dir1: str, dir2: str) -> List[str]: # check that dir1 and dir2 are equivalent, # return the diff diff = [] # type: List[str] for root, dirs, files in os.walk(dir1): for file_ in files: path = os.path.join(root, file_) target_path = os.path.join(dir2, os.path.split(path)[-1]) if not os.path.exists(target_path): diff.append(file_) return diff @unittest.skipUnless(zlib, "Requires zlib") def test_unpack_archive(self) -> None: formats = ['tar', 'gztar', 'zip'] if BZ2_SUPPORTED: formats.append('bztar') for format in formats: tmpdir = self.mkdtemp() base_dir, root_dir, base_name = self._create_files() tmpdir2 = self.mkdtemp() filename = make_archive(base_name, format, root_dir, base_dir) # let's try to unpack it now unpack_archive(filename, tmpdir2) diff = self._compare_dirs(tmpdir, tmpdir2) self.assertEqual(diff, []) # and again, this time with the format specified tmpdir3 = self.mkdtemp() unpack_archive(filename, tmpdir3, format=format) diff = self._compare_dirs(tmpdir, tmpdir3) self.assertEqual(diff, []) self.assertRaises(shutil.ReadError, unpack_archive, TESTFN) self.assertRaises(ValueError, unpack_archive, TESTFN, format='xxx') def test_unpack_registery(self) -> None: formats = get_unpack_formats() def _boo(filename: str, extract_dir: str, extra: int) -> None: self.assertEqual(extra, 1) self.assertEqual(filename, 'stuff.boo') self.assertEqual(extract_dir, 'xx') register_unpack_format('Boo', ['.boo', '.b2'], _boo, [('extra', 1)]) unpack_archive('stuff.boo', 'xx') # trying to register a .boo unpacker again self.assertRaises(RegistryError, register_unpack_format, 'Boo2', ['.boo'], _boo) # should work now unregister_unpack_format('Boo') register_unpack_format('Boo2', ['.boo'], _boo) self.assertIn(('Boo2', ['.boo'], ''), get_unpack_formats()) self.assertNotIn(('Boo', ['.boo'], ''), get_unpack_formats()) # let's leave a clean state unregister_unpack_format('Boo2') self.assertEqual(get_unpack_formats(), formats) class TestMove(unittest.TestCase): def setUp(self) -> None: filename = "foo" self.src_dir = tempfile.mkdtemp() self.dst_dir = tempfile.mkdtemp() self.src_file = os.path.join(self.src_dir, filename) self.dst_file = os.path.join(self.dst_dir, filename) with open(self.src_file, "wb") as f: f.write(b"spam") def tearDown(self) -> None: for d in (self.src_dir, self.dst_dir): try: if d: shutil.rmtree(d) except: pass def _check_move_file(self, src: str, dst: str, real_dst: str) -> None: with open(src, "rb") as f: contents = f.read() shutil.move(src, dst) with open(real_dst, "rb") as f: self.assertEqual(contents, f.read()) self.assertFalse(os.path.exists(src)) def _check_move_dir(self, src: str, dst: str, real_dst: str) -> None: contents = sorted(os.listdir(src)) shutil.move(src, dst) self.assertEqual(contents, sorted(os.listdir(real_dst))) self.assertFalse(os.path.exists(src)) def test_move_file(self) -> None: # Move a file to another location on the same filesystem. self._check_move_file(self.src_file, self.dst_file, self.dst_file) def test_move_file_to_dir(self) -> None: # Move a file inside an existing dir on the same filesystem. self._check_move_file(self.src_file, self.dst_dir, self.dst_file) @mock_rename def test_move_file_other_fs(self) -> None: # Move a file to an existing dir on another filesystem. self.test_move_file() @mock_rename def test_move_file_to_dir_other_fs(self) -> None: # Move a file to another location on another filesystem. self.test_move_file_to_dir() def test_move_dir(self) -> None: # Move a dir to another location on the same filesystem. dst_dir = tempfile.mktemp() try: self._check_move_dir(self.src_dir, dst_dir, dst_dir) finally: try: shutil.rmtree(dst_dir) except: pass @mock_rename def test_move_dir_other_fs(self) -> None: # Move a dir to another location on another filesystem. self.test_move_dir() def test_move_dir_to_dir(self) -> None: # Move a dir inside an existing dir on the same filesystem. self._check_move_dir(self.src_dir, self.dst_dir, os.path.join(self.dst_dir, os.path.basename(self.src_dir))) @mock_rename def test_move_dir_to_dir_other_fs(self) -> None: # Move a dir inside an existing dir on another filesystem. self.test_move_dir_to_dir() def test_existing_file_inside_dest_dir(self) -> None: # A file with the same name inside the destination dir already exists. with open(self.dst_file, "wb"): pass self.assertRaises(shutil.Error, shutil.move, self.src_file, self.dst_dir) def test_dont_move_dir_in_itself(self) -> None: # Moving a dir inside itself raises an Error. dst = os.path.join(self.src_dir, "bar") self.assertRaises(shutil.Error, shutil.move, self.src_dir, dst) def test_destinsrc_false_negative(self) -> None: os.mkdir(TESTFN) try: for src, dst in [('srcdir', 'srcdir/dest')]: src = os.path.join(TESTFN, src) dst = os.path.join(TESTFN, dst) self.assertTrue(shutil._destinsrc(src, dst), msg='_destinsrc() wrongly concluded that ' 'dst (%s) is not in src (%s)' % (dst, src)) finally: shutil.rmtree(TESTFN, ignore_errors=True) def test_destinsrc_false_positive(self) -> None: os.mkdir(TESTFN) try: for src, dst in [('srcdir', 'src/dest'), ('srcdir', 'srcdir.new')]: src = os.path.join(TESTFN, src) dst = os.path.join(TESTFN, dst) self.assertFalse(shutil._destinsrc(src, dst), msg='_destinsrc() wrongly concluded that ' 'dst (%s) is in src (%s)' % (dst, src)) finally: shutil.rmtree(TESTFN, ignore_errors=True) class TestCopyFile(unittest.TestCase): _delete = False class Faux(object): _entered = False _exited_with = None # type: tuple _raised = False def __init__(self, raise_in_exit: bool = False, suppress_at_exit: bool = True) -> None: self._raise_in_exit = raise_in_exit self._suppress_at_exit = suppress_at_exit def read(self, *args: Any) -> str: return '' def __enter__(self) -> None: self._entered = True def __exit__(self, exc_type: type, exc_val: BaseException, exc_tb: TracebackType) -> bool: self._exited_with = exc_type, exc_val, exc_tb if self._raise_in_exit: self._raised = True raise IOError("Cannot close") return self._suppress_at_exit def tearDown(self) -> None: shutil.open = open def _set_shutil_open(self, func: Any) -> None: shutil.open = func self._delete = True def test_w_source_open_fails(self) -> None: def _open(filename: str, mode: str= 'r') -> BinaryIO: if filename == 'srcfile': raise IOError('Cannot open "srcfile"') assert 0 # shouldn't reach here. self._set_shutil_open(_open) self.assertRaises(IOError, shutil.copyfile, 'srcfile', 'destfile') def test_w_dest_open_fails(self) -> None: srcfile = TestCopyFile.Faux() def _open(filename: str, mode: str = 'r') -> TestCopyFile.Faux: if filename == 'srcfile': return srcfile if filename == 'destfile': raise IOError('Cannot open "destfile"') assert 0 # shouldn't reach here. self._set_shutil_open(_open) shutil.copyfile('srcfile', 'destfile') self.assertTrue(srcfile._entered) self.assertTrue(srcfile._exited_with[0] is IOError) self.assertEqual(srcfile._exited_with[1].args, ('Cannot open "destfile"',)) def test_w_dest_close_fails(self) -> None: srcfile = TestCopyFile.Faux() destfile = TestCopyFile.Faux(True) def _open(filename: str, mode: str = 'r') -> TestCopyFile.Faux: if filename == 'srcfile': return srcfile if filename == 'destfile': return destfile assert 0 # shouldn't reach here. self._set_shutil_open(_open) shutil.copyfile('srcfile', 'destfile') self.assertTrue(srcfile._entered) self.assertTrue(destfile._entered) self.assertTrue(destfile._raised) self.assertTrue(srcfile._exited_with[0] is IOError) self.assertEqual(srcfile._exited_with[1].args, ('Cannot close',)) def test_w_source_close_fails(self) -> None: srcfile = TestCopyFile.Faux(True) destfile = TestCopyFile.Faux() def _open(filename: str, mode: str= 'r') -> TestCopyFile.Faux: if filename == 'srcfile': return srcfile if filename == 'destfile': return destfile assert 0 # shouldn't reach here. self._set_shutil_open(_open) self.assertRaises(IOError, shutil.copyfile, 'srcfile', 'destfile') self.assertTrue(srcfile._entered) self.assertTrue(destfile._entered) self.assertFalse(destfile._raised) self.assertTrue(srcfile._exited_with[0] is None) self.assertTrue(srcfile._raised) def test_move_dir_caseinsensitive(self) -> None: # Renames a folder to the same name # but a different case. self.src_dir = tempfile.mkdtemp() dst_dir = os.path.join( os.path.dirname(self.src_dir), os.path.basename(self.src_dir).upper()) self.assertNotEqual(self.src_dir, dst_dir) try: shutil.move(self.src_dir, dst_dir) self.assertTrue(os.path.isdir(dst_dir)) finally: if os.path.exists(dst_dir): os.rmdir(dst_dir) def test_main() -> None: support.run_unittest(TestShutil, TestMove, TestCopyFile) if __name__ == '__main__': test_main() mypy-0.560/test-data/stdlib-samples/3.2/test/test_subprocess.py0000644€tŠÔÚ€2›s®0000021742013215007205030570 0ustar jukkaDROPBOX\Domain Users00000000000000import unittest from test import support import subprocess import sys import signal import io import os import errno import tempfile import time import re import sysconfig import warnings import select import shutil import gc import resource from typing import Any, Dict, Callable, Iterable, List, Set, Tuple, cast mswindows = (sys.platform == "win32") # # Depends on the following external programs: Python # if mswindows: SETBINARY = ('import msvcrt; msvcrt.setmode(sys.stdout.fileno(), ' 'os.O_BINARY);') else: SETBINARY = '' try: mkstemp = tempfile.mkstemp except AttributeError: # tempfile.mkstemp is not available def _mkstemp() -> Tuple[int, str]: """Replacement for mkstemp, calling mktemp.""" fname = tempfile.mktemp() return os.open(fname, os.O_RDWR|os.O_CREAT), fname mkstemp = cast(Any, _mkstemp) class BaseTestCase(unittest.TestCase): def setUp(self) -> None: # Try to minimize the number of children we have so this test # doesn't crash on some buildbots (Alphas in particular). support.reap_children() def tearDown(self) -> None: for inst in subprocess._active: inst.wait() subprocess._cleanup() self.assertFalse(subprocess._active, "subprocess._active not empty") def assertStderrEqual(self, stderr: bytes, expected: bytes, msg: object = None) -> None: # In a debug build, stuff like "[6580 refs]" is printed to stderr at # shutdown time. That frustrates tests trying to check stderr produced # from a spawned Python process. actual = support.strip_python_stderr(stderr) self.assertEqual(actual, expected, msg) class ProcessTestCase(BaseTestCase): def test_call_seq(self) -> None: # call() function with sequence argument rc = subprocess.call([sys.executable, "-c", "import sys; sys.exit(47)"]) self.assertEqual(rc, 47) def test_check_call_zero(self) -> None: # check_call() function with zero return code rc = subprocess.check_call([sys.executable, "-c", "import sys; sys.exit(0)"]) self.assertEqual(rc, 0) def test_check_call_nonzero(self) -> None: # check_call() function with non-zero return code with self.assertRaises(subprocess.CalledProcessError) as c: subprocess.check_call([sys.executable, "-c", "import sys; sys.exit(47)"]) self.assertEqual(c.exception.returncode, 47) def test_check_output(self) -> None: # check_output() function with zero return code output = subprocess.check_output( [sys.executable, "-c", "print('BDFL')"]) self.assertIn(b'BDFL', cast(Any, output)) # see #39 def test_check_output_nonzero(self) -> None: # check_call() function with non-zero return code with self.assertRaises(subprocess.CalledProcessError) as c: subprocess.check_output( [sys.executable, "-c", "import sys; sys.exit(5)"]) self.assertEqual(c.exception.returncode, 5) def test_check_output_stderr(self) -> None: # check_output() function stderr redirected to stdout output = subprocess.check_output( [sys.executable, "-c", "import sys; sys.stderr.write('BDFL')"], stderr=subprocess.STDOUT) self.assertIn(b'BDFL', cast(Any, output)) # see #39 def test_check_output_stdout_arg(self) -> None: # check_output() function stderr redirected to stdout with self.assertRaises(ValueError) as c: output = subprocess.check_output( [sys.executable, "-c", "print('will not be run')"], stdout=sys.stdout) self.fail("Expected ValueError when stdout arg supplied.") self.assertIn('stdout', c.exception.args[0]) def test_call_kwargs(self) -> None: # call() function with keyword args newenv = os.environ.copy() newenv["FRUIT"] = "banana" rc = subprocess.call([sys.executable, "-c", 'import sys, os;' 'sys.exit(os.getenv("FRUIT")=="banana")'], env=newenv) self.assertEqual(rc, 1) def test_invalid_args(self) -> None: # Popen() called with invalid arguments should raise TypeError # but Popen.__del__ should not complain (issue #12085) with support.captured_stderr() as s: self.assertRaises(TypeError, subprocess.Popen, invalid_arg_name=1) argcount = subprocess.Popen.__init__.__code__.co_argcount too_many_args = [0] * (argcount + 1) self.assertRaises(TypeError, subprocess.Popen, *too_many_args) self.assertEqual(s.getvalue(), '') def test_stdin_none(self) -> None: # .stdin is None when not redirected p = subprocess.Popen([sys.executable, "-c", 'print("banana")'], stdout=subprocess.PIPE, stderr=subprocess.PIPE) self.addCleanup(p.stdout.close) self.addCleanup(p.stderr.close) p.wait() self.assertEqual(p.stdin, None) def test_stdout_none(self) -> None: # .stdout is None when not redirected p = subprocess.Popen([sys.executable, "-c", 'print(" this bit of output is from a ' 'test of stdout in a different ' 'process ...")'], stdin=subprocess.PIPE, stderr=subprocess.PIPE) self.addCleanup(p.stdin.close) self.addCleanup(p.stderr.close) p.wait() self.assertEqual(p.stdout, None) def test_stderr_none(self) -> None: # .stderr is None when not redirected p = subprocess.Popen([sys.executable, "-c", 'print("banana")'], stdin=subprocess.PIPE, stdout=subprocess.PIPE) self.addCleanup(p.stdout.close) self.addCleanup(p.stdin.close) p.wait() self.assertEqual(p.stderr, None) def test_executable_with_cwd(self) -> None: python_dir = os.path.dirname(os.path.realpath(sys.executable)) p = subprocess.Popen(["somethingyoudonthave", "-c", "import sys; sys.exit(47)"], executable=sys.executable, cwd=python_dir) p.wait() self.assertEqual(p.returncode, 47) @unittest.skipIf(sysconfig.is_python_build(), "need an installed Python. See #7774") def test_executable_without_cwd(self) -> None: # For a normal installation, it should work without 'cwd' # argument. For test runs in the build directory, see #7774. p = subprocess.Popen(["somethingyoudonthave", "-c", "import sys; sys.exit(47)"], executable=sys.executable) p.wait() self.assertEqual(p.returncode, 47) def test_stdin_pipe(self) -> None: # stdin redirection p = subprocess.Popen([sys.executable, "-c", 'import sys; sys.exit(sys.stdin.read() == "pear")'], stdin=subprocess.PIPE) p.stdin.write(b"pear") p.stdin.close() p.wait() self.assertEqual(p.returncode, 1) def test_stdin_filedes(self) -> None: # stdin is set to open file descriptor tf = tempfile.TemporaryFile() self.addCleanup(tf.close) d = tf.fileno() os.write(d, b"pear") os.lseek(d, 0, 0) p = subprocess.Popen([sys.executable, "-c", 'import sys; sys.exit(sys.stdin.read() == "pear")'], stdin=d) p.wait() self.assertEqual(p.returncode, 1) def test_stdin_fileobj(self) -> None: # stdin is set to open file object tf = tempfile.TemporaryFile() self.addCleanup(tf.close) tf.write(b"pear") tf.seek(0) p = subprocess.Popen([sys.executable, "-c", 'import sys; sys.exit(sys.stdin.read() == "pear")'], stdin=tf) p.wait() self.assertEqual(p.returncode, 1) def test_stdout_pipe(self) -> None: # stdout redirection p = subprocess.Popen([sys.executable, "-c", 'import sys; sys.stdout.write("orange")'], stdout=subprocess.PIPE) self.addCleanup(p.stdout.close) self.assertEqual(p.stdout.read(), b"orange") def test_stdout_filedes(self) -> None: # stdout is set to open file descriptor tf = tempfile.TemporaryFile() self.addCleanup(tf.close) d = tf.fileno() p = subprocess.Popen([sys.executable, "-c", 'import sys; sys.stdout.write("orange")'], stdout=d) p.wait() os.lseek(d, 0, 0) self.assertEqual(os.read(d, 1024), b"orange") def test_stdout_fileobj(self) -> None: # stdout is set to open file object tf = tempfile.TemporaryFile() self.addCleanup(tf.close) p = subprocess.Popen([sys.executable, "-c", 'import sys; sys.stdout.write("orange")'], stdout=tf) p.wait() tf.seek(0) self.assertEqual(tf.read(), b"orange") def test_stderr_pipe(self) -> None: # stderr redirection p = subprocess.Popen([sys.executable, "-c", 'import sys; sys.stderr.write("strawberry")'], stderr=subprocess.PIPE) self.addCleanup(p.stderr.close) self.assertStderrEqual(p.stderr.read(), b"strawberry") def test_stderr_filedes(self) -> None: # stderr is set to open file descriptor tf = tempfile.TemporaryFile() self.addCleanup(tf.close) d = tf.fileno() p = subprocess.Popen([sys.executable, "-c", 'import sys; sys.stderr.write("strawberry")'], stderr=d) p.wait() os.lseek(d, 0, 0) self.assertStderrEqual(os.read(d, 1024), b"strawberry") def test_stderr_fileobj(self) -> None: # stderr is set to open file object tf = tempfile.TemporaryFile() self.addCleanup(tf.close) p = subprocess.Popen([sys.executable, "-c", 'import sys; sys.stderr.write("strawberry")'], stderr=tf) p.wait() tf.seek(0) self.assertStderrEqual(tf.read(), b"strawberry") def test_stdout_stderr_pipe(self) -> None: # capture stdout and stderr to the same pipe p = subprocess.Popen([sys.executable, "-c", 'import sys;' 'sys.stdout.write("apple");' 'sys.stdout.flush();' 'sys.stderr.write("orange")'], stdout=subprocess.PIPE, stderr=subprocess.STDOUT) self.addCleanup(p.stdout.close) self.assertStderrEqual(p.stdout.read(), b"appleorange") def test_stdout_stderr_file(self) -> None: # capture stdout and stderr to the same open file tf = tempfile.TemporaryFile() self.addCleanup(tf.close) p = subprocess.Popen([sys.executable, "-c", 'import sys;' 'sys.stdout.write("apple");' 'sys.stdout.flush();' 'sys.stderr.write("orange")'], stdout=tf, stderr=tf) p.wait() tf.seek(0) self.assertStderrEqual(tf.read(), b"appleorange") def test_stdout_filedes_of_stdout(self) -> None: # stdout is set to 1 (#1531862). cmd = r"import sys, os; sys.exit(os.write(sys.stdout.fileno(), b'.\n'))" rc = subprocess.call([sys.executable, "-c", cmd], stdout=1) self.assertEqual(rc, 2) def test_cwd(self) -> None: tmpdir = tempfile.gettempdir() # We cannot use os.path.realpath to canonicalize the path, # since it doesn't expand Tru64 {memb} strings. See bug 1063571. cwd = os.getcwd() os.chdir(tmpdir) tmpdir = os.getcwd() os.chdir(cwd) p = subprocess.Popen([sys.executable, "-c", 'import sys,os;' 'sys.stdout.write(os.getcwd())'], stdout=subprocess.PIPE, cwd=tmpdir) self.addCleanup(p.stdout.close) normcase = os.path.normcase self.assertEqual(normcase(p.stdout.read().decode("utf-8")), normcase(tmpdir)) def test_env(self) -> None: newenv = os.environ.copy() newenv["FRUIT"] = "orange" with subprocess.Popen([sys.executable, "-c", 'import sys,os;' 'sys.stdout.write(os.getenv("FRUIT"))'], stdout=subprocess.PIPE, env=newenv) as p: stdout, stderr = p.communicate() self.assertEqual(stdout, b"orange") # Windows requires at least the SYSTEMROOT environment variable to start # Python @unittest.skipIf(sys.platform == 'win32', 'cannot test an empty env on Windows') @unittest.skipIf(sysconfig.get_config_var('Py_ENABLE_SHARED') is not None, 'the python library cannot be loaded ' 'with an empty environment') def test_empty_env(self) -> None: with subprocess.Popen([sys.executable, "-c", 'import os; ' 'print(list(os.environ.keys()))'], stdout=subprocess.PIPE, env={}) as p: stdout, stderr = p.communicate() self.assertIn(stdout.strip(), [b"[]", # Mac OS X adds __CF_USER_TEXT_ENCODING variable to an empty # environment b"['__CF_USER_TEXT_ENCODING']"]) def test_communicate_stdin(self) -> None: p = subprocess.Popen([sys.executable, "-c", 'import sys;' 'sys.exit(sys.stdin.read() == "pear")'], stdin=subprocess.PIPE) p.communicate(b"pear") self.assertEqual(p.returncode, 1) def test_communicate_stdout(self) -> None: p = subprocess.Popen([sys.executable, "-c", 'import sys; sys.stdout.write("pineapple")'], stdout=subprocess.PIPE) (stdout, stderr) = p.communicate() self.assertEqual(stdout, b"pineapple") self.assertEqual(stderr, None) def test_communicate_stderr(self) -> None: p = subprocess.Popen([sys.executable, "-c", 'import sys; sys.stderr.write("pineapple")'], stderr=subprocess.PIPE) (stdout, stderr) = p.communicate() self.assertEqual(stdout, None) self.assertStderrEqual(stderr, b"pineapple") def test_communicate(self) -> None: p = subprocess.Popen([sys.executable, "-c", 'import sys,os;' 'sys.stderr.write("pineapple");' 'sys.stdout.write(sys.stdin.read())'], stdin=subprocess.PIPE, stdout=subprocess.PIPE, stderr=subprocess.PIPE) self.addCleanup(p.stdout.close) self.addCleanup(p.stderr.close) self.addCleanup(p.stdin.close) (stdout, stderr) = p.communicate(b"banana") self.assertEqual(stdout, b"banana") self.assertStderrEqual(stderr, b"pineapple") # Test for the fd leak reported in http://bugs.python.org/issue2791. def test_communicate_pipe_fd_leak(self) -> None: for stdin_pipe in (False, True): for stdout_pipe in (False, True): for stderr_pipe in (False, True): options = {} # type: Dict[str, Any] if stdin_pipe: options['stdin'] = subprocess.PIPE if stdout_pipe: options['stdout'] = subprocess.PIPE if stderr_pipe: options['stderr'] = subprocess.PIPE if not options: continue p = subprocess.Popen([sys.executable, "-c", "pass"], **options) p.communicate() if p.stdin is not None: self.assertTrue(p.stdin.closed) if p.stdout is not None: self.assertTrue(p.stdout.closed) if p.stderr is not None: self.assertTrue(p.stderr.closed) def test_communicate_returns(self) -> None: # communicate() should return None if no redirection is active p = subprocess.Popen([sys.executable, "-c", "import sys; sys.exit(47)"]) (stdout, stderr) = p.communicate() self.assertEqual(stdout, None) self.assertEqual(stderr, None) def test_communicate_pipe_buf(self) -> None: # communicate() with writes larger than pipe_buf # This test will probably deadlock rather than fail, if # communicate() does not work properly. x, y = os.pipe() if mswindows: pipe_buf = 512 else: pipe_buf = os.fpathconf(x, "PC_PIPE_BUF") os.close(x) os.close(y) p = subprocess.Popen([sys.executable, "-c", 'import sys,os;' 'sys.stdout.write(sys.stdin.read(47));' 'sys.stderr.write("xyz"*%d);' 'sys.stdout.write(sys.stdin.read())' % pipe_buf], stdin=subprocess.PIPE, stdout=subprocess.PIPE, stderr=subprocess.PIPE) self.addCleanup(p.stdout.close) self.addCleanup(p.stderr.close) self.addCleanup(p.stdin.close) string_to_write = b"abc"*pipe_buf (stdout, stderr) = p.communicate(string_to_write) self.assertEqual(stdout, string_to_write) def test_writes_before_communicate(self) -> None: # stdin.write before communicate() p = subprocess.Popen([sys.executable, "-c", 'import sys,os;' 'sys.stdout.write(sys.stdin.read())'], stdin=subprocess.PIPE, stdout=subprocess.PIPE, stderr=subprocess.PIPE) self.addCleanup(p.stdout.close) self.addCleanup(p.stderr.close) self.addCleanup(p.stdin.close) p.stdin.write(b"banana") (stdout, stderr) = p.communicate(b"split") self.assertEqual(stdout, b"bananasplit") self.assertStderrEqual(stderr, b"") def test_universal_newlines(self) -> None: p = subprocess.Popen([sys.executable, "-c", 'import sys,os;' + SETBINARY + 'sys.stdout.write(sys.stdin.readline());' 'sys.stdout.flush();' 'sys.stdout.write("line2\\n");' 'sys.stdout.flush();' 'sys.stdout.write(sys.stdin.read());' 'sys.stdout.flush();' 'sys.stdout.write("line4\\n");' 'sys.stdout.flush();' 'sys.stdout.write("line5\\r\\n");' 'sys.stdout.flush();' 'sys.stdout.write("line6\\r");' 'sys.stdout.flush();' 'sys.stdout.write("\\nline7");' 'sys.stdout.flush();' 'sys.stdout.write("\\nline8");'], stdin=subprocess.PIPE, stdout=subprocess.PIPE, universal_newlines=1) p.stdin.write("line1\n") self.assertEqual(p.stdout.readline(), "line1\n") p.stdin.write("line3\n") p.stdin.close() self.addCleanup(p.stdout.close) self.assertEqual(p.stdout.readline(), "line2\n") self.assertEqual(p.stdout.read(6), "line3\n") self.assertEqual(p.stdout.read(), "line4\nline5\nline6\nline7\nline8") def test_universal_newlines_communicate(self) -> None: # universal newlines through communicate() p = subprocess.Popen([sys.executable, "-c", 'import sys,os;' + SETBINARY + 'sys.stdout.write("line2\\n");' 'sys.stdout.flush();' 'sys.stdout.write("line4\\n");' 'sys.stdout.flush();' 'sys.stdout.write("line5\\r\\n");' 'sys.stdout.flush();' 'sys.stdout.write("line6\\r");' 'sys.stdout.flush();' 'sys.stdout.write("\\nline7");' 'sys.stdout.flush();' 'sys.stdout.write("\\nline8");'], stderr=subprocess.PIPE, stdout=subprocess.PIPE, universal_newlines=1) self.addCleanup(p.stdout.close) self.addCleanup(p.stderr.close) # BUG: can't give a non-empty stdin because it breaks both the # select- and poll-based communicate() implementations. (stdout, stderr) = p.communicate() self.assertEqual(stdout, "line2\nline4\nline5\nline6\nline7\nline8") def test_universal_newlines_communicate_stdin(self) -> None: # universal newlines through communicate(), with only stdin p = subprocess.Popen([sys.executable, "-c", 'import sys,os;' + SETBINARY + '''\nif True: s = sys.stdin.readline() assert s == "line1\\n", repr(s) s = sys.stdin.read() assert s == "line3\\n", repr(s) '''], stdin=subprocess.PIPE, universal_newlines=1) (stdout, stderr) = p.communicate("line1\nline3\n") self.assertEqual(p.returncode, 0) def test_no_leaking(self) -> None: # Make sure we leak no resources if not mswindows: max_handles = 1026 # too much for most UNIX systems else: max_handles = 2050 # too much for (at least some) Windows setups handles = [] # type: List[int] tmpdir = tempfile.mkdtemp() try: for i in range(max_handles): try: tmpfile = os.path.join(tmpdir, support.TESTFN) handles.append(os.open(tmpfile, os.O_WRONLY|os.O_CREAT)) except OSError as e: if e.errno != errno.EMFILE: raise break else: self.skipTest("failed to reach the file descriptor limit " "(tried %d)" % max_handles) # Close a couple of them (should be enough for a subprocess) for i in range(10): os.close(handles.pop()) # Loop creating some subprocesses. If one of them leaks some fds, # the next loop iteration will fail by reaching the max fd limit. for i in range(15): p = subprocess.Popen([sys.executable, "-c", "import sys;" "sys.stdout.write(sys.stdin.read())"], stdin=subprocess.PIPE, stdout=subprocess.PIPE, stderr=subprocess.PIPE) data = p.communicate(b"lime")[0] self.assertEqual(data, b"lime") finally: for h in handles: os.close(h) shutil.rmtree(tmpdir) def test_list2cmdline(self) -> None: self.assertEqual(subprocess.list2cmdline(['a b c', 'd', 'e']), '"a b c" d e') self.assertEqual(subprocess.list2cmdline(['ab"c', '\\', 'd']), 'ab\\"c \\ d') self.assertEqual(subprocess.list2cmdline(['ab"c', ' \\', 'd']), 'ab\\"c " \\\\" d') self.assertEqual(subprocess.list2cmdline(['a\\\\\\b', 'de fg', 'h']), 'a\\\\\\b "de fg" h') self.assertEqual(subprocess.list2cmdline(['a\\"b', 'c', 'd']), 'a\\\\\\"b c d') self.assertEqual(subprocess.list2cmdline(['a\\\\b c', 'd', 'e']), '"a\\\\b c" d e') self.assertEqual(subprocess.list2cmdline(['a\\\\b\\ c', 'd', 'e']), '"a\\\\b\\ c" d e') self.assertEqual(subprocess.list2cmdline(['ab', '']), 'ab ""') def test_poll(self) -> None: p = subprocess.Popen([sys.executable, "-c", "import time; time.sleep(1)"]) count = 0 while p.poll() is None: time.sleep(0.1) count += 1 # We expect that the poll loop probably went around about 10 times, # but, based on system scheduling we can't control, it's possible # poll() never returned None. It "should be" very rare that it # didn't go around at least twice. self.assertGreaterEqual(count, 2) # Subsequent invocations should just return the returncode self.assertEqual(p.poll(), 0) def test_wait(self) -> None: p = subprocess.Popen([sys.executable, "-c", "import time; time.sleep(2)"]) self.assertEqual(p.wait(), 0) # Subsequent invocations should just return the returncode self.assertEqual(p.wait(), 0) def test_invalid_bufsize(self) -> None: # an invalid type of the bufsize argument should raise # TypeError. with self.assertRaises(TypeError): subprocess.Popen([sys.executable, "-c", "pass"], cast(Any, "orange")) def test_bufsize_is_none(self) -> None: # bufsize=None should be the same as bufsize=0. p = subprocess.Popen([sys.executable, "-c", "pass"], None) self.assertEqual(p.wait(), 0) # Again with keyword arg p = subprocess.Popen([sys.executable, "-c", "pass"], bufsize=None) self.assertEqual(p.wait(), 0) def test_leaking_fds_on_error(self) -> None: # see bug #5179: Popen leaks file descriptors to PIPEs if # the child fails to execute; this will eventually exhaust # the maximum number of open fds. 1024 seems a very common # value for that limit, but Windows has 2048, so we loop # 1024 times (each call leaked two fds). for i in range(1024): # Windows raises IOError. Others raise OSError. with self.assertRaises(EnvironmentError) as c: subprocess.Popen(['nonexisting_i_hope'], stdout=subprocess.PIPE, stderr=subprocess.PIPE) # ignore errors that indicate the command was not found if c.exception.errno not in (errno.ENOENT, errno.EACCES): raise c.exception def test_issue8780(self) -> None: # Ensure that stdout is inherited from the parent # if stdout=PIPE is not used code = ';'.join([ 'import subprocess, sys', 'retcode = subprocess.call(' "[sys.executable, '-c', 'print(\"Hello World!\")'])", 'assert retcode == 0']) output = subprocess.check_output([sys.executable, '-c', code]) self.assertTrue(output.startswith(b'Hello World!'), ascii(output)) def test_handles_closed_on_exception(self) -> None: # If CreateProcess exits with an error, ensure the # duplicate output handles are released ifhandle, ifname = mkstemp() ofhandle, ofname = mkstemp() efhandle, efname = mkstemp() try: subprocess.Popen (["*"], stdin=ifhandle, stdout=ofhandle, stderr=efhandle) except OSError: os.close(ifhandle) os.remove(ifname) os.close(ofhandle) os.remove(ofname) os.close(efhandle) os.remove(efname) self.assertFalse(os.path.exists(ifname)) self.assertFalse(os.path.exists(ofname)) self.assertFalse(os.path.exists(efname)) def test_communicate_epipe(self) -> None: # Issue 10963: communicate() should hide EPIPE p = subprocess.Popen([sys.executable, "-c", 'pass'], stdin=subprocess.PIPE, stdout=subprocess.PIPE, stderr=subprocess.PIPE) self.addCleanup(p.stdout.close) self.addCleanup(p.stderr.close) self.addCleanup(p.stdin.close) p.communicate(b"x" * 2**20) def test_communicate_epipe_only_stdin(self) -> None: # Issue 10963: communicate() should hide EPIPE p = subprocess.Popen([sys.executable, "-c", 'pass'], stdin=subprocess.PIPE) self.addCleanup(p.stdin.close) time.sleep(2) p.communicate(b"x" * 2**20) @unittest.skipUnless(hasattr(signal, 'SIGALRM'), "Requires signal.SIGALRM") def test_communicate_eintr(self) -> None: # Issue #12493: communicate() should handle EINTR def handler(signum, frame): pass old_handler = signal.signal(signal.SIGALRM, handler) self.addCleanup(signal.signal, signal.SIGALRM, old_handler) # the process is running for 2 seconds args = [sys.executable, "-c", 'import time; time.sleep(2)'] for stream in ('stdout', 'stderr'): kw = {stream: subprocess.PIPE} # type: Dict[str, Any] with subprocess.Popen(args, **kw) as process: signal.alarm(1) # communicate() will be interrupted by SIGALRM process.communicate() # context manager class _SuppressCoreFiles(object): """Try to prevent core files from being created.""" old_limit = None # type: Tuple[int, int] def __enter__(self) -> None: """Try to save previous ulimit, then set it to (0, 0).""" if resource is not None: try: self.old_limit = resource.getrlimit(resource.RLIMIT_CORE) resource.setrlimit(resource.RLIMIT_CORE, (0, 0)) except (ValueError, resource.error): pass if sys.platform == 'darwin': # Check if the 'Crash Reporter' on OSX was configured # in 'Developer' mode and warn that it will get triggered # when it is. # # This assumes that this context manager is used in tests # that might trigger the next manager. value = subprocess.Popen(['/usr/bin/defaults', 'read', 'com.apple.CrashReporter', 'DialogType'], stdout=subprocess.PIPE).communicate()[0] if value.strip() == b'developer': print("this tests triggers the Crash Reporter, " "that is intentional", end='') sys.stdout.flush() def __exit__(self, *args: Any) -> None: """Return core file behavior to default.""" if self.old_limit is None: return if resource is not None: try: resource.setrlimit(resource.RLIMIT_CORE, self.old_limit) except (ValueError, resource.error): pass @unittest.skipIf(mswindows, "POSIX specific tests") class POSIXProcessTestCase(BaseTestCase): def test_exceptions(self) -> None: nonexistent_dir = "/_this/pa.th/does/not/exist" try: os.chdir(nonexistent_dir) except OSError as e: # This avoids hard coding the errno value or the OS perror() # string and instead capture the exception that we want to see # below for comparison. desired_exception = e desired_exception.strerror += ': ' + repr(sys.executable) else: self.fail("chdir to nonexistant directory %s succeeded." % nonexistent_dir) # Error in the child re-raised in the parent. try: p = subprocess.Popen([sys.executable, "-c", ""], cwd=nonexistent_dir) except OSError as e: # Test that the child process chdir failure actually makes # it up to the parent process as the correct exception. self.assertEqual(desired_exception.errno, e.errno) self.assertEqual(desired_exception.strerror, e.strerror) else: self.fail("Expected OSError: %s" % desired_exception) def test_restore_signals(self) -> None: # Code coverage for both values of restore_signals to make sure it # at least does not blow up. # A test for behavior would be complex. Contributions welcome. subprocess.call([sys.executable, "-c", ""], restore_signals=True) subprocess.call([sys.executable, "-c", ""], restore_signals=False) def test_start_new_session(self) -> None: # For code coverage of calling setsid(). We don't care if we get an # EPERM error from it depending on the test execution environment, that # still indicates that it was called. try: output = subprocess.check_output( [sys.executable, "-c", "import os; print(os.getpgid(os.getpid()))"], start_new_session=True) except OSError as e: if e.errno != errno.EPERM: raise else: parent_pgid = os.getpgid(os.getpid()) child_pgid = int(output) self.assertNotEqual(parent_pgid, child_pgid) def test_run_abort(self) -> None: # returncode handles signal termination with _SuppressCoreFiles(): p = subprocess.Popen([sys.executable, "-c", 'import os; os.abort()']) p.wait() self.assertEqual(-p.returncode, signal.SIGABRT) def test_preexec(self) -> None: # DISCLAIMER: Setting environment variables is *not* a good use # of a preexec_fn. This is merely a test. p = subprocess.Popen([sys.executable, "-c", 'import sys,os;' 'sys.stdout.write(os.getenv("FRUIT"))'], stdout=subprocess.PIPE, preexec_fn=lambda: os.putenv("FRUIT", "apple")) self.addCleanup(p.stdout.close) self.assertEqual(p.stdout.read(), b"apple") def test_preexec_exception(self) -> None: def raise_it(): raise ValueError("What if two swallows carried a coconut?") try: p = subprocess.Popen([sys.executable, "-c", ""], preexec_fn=raise_it) except RuntimeError as e: self.assertTrue( subprocess._posixsubprocess, "Expected a ValueError from the preexec_fn") except ValueError as e2: self.assertIn("coconut", e2.args[0]) else: self.fail("Exception raised by preexec_fn did not make it " "to the parent process.") def test_preexec_gc_module_failure(self) -> None: # This tests the code that disables garbage collection if the child # process will execute any Python. def raise_runtime_error(): raise RuntimeError("this shouldn't escape") enabled = gc.isenabled() orig_gc_disable = gc.disable orig_gc_isenabled = gc.isenabled try: gc.disable() self.assertFalse(gc.isenabled()) subprocess.call([sys.executable, '-c', ''], preexec_fn=lambda: None) self.assertFalse(gc.isenabled(), "Popen enabled gc when it shouldn't.") gc.enable() self.assertTrue(gc.isenabled()) subprocess.call([sys.executable, '-c', ''], preexec_fn=lambda: None) self.assertTrue(gc.isenabled(), "Popen left gc disabled.") setattr(gc, 'disable', raise_runtime_error) self.assertRaises(RuntimeError, subprocess.Popen, [sys.executable, '-c', ''], preexec_fn=lambda: None) del gc.isenabled # force an AttributeError self.assertRaises(AttributeError, subprocess.Popen, [sys.executable, '-c', ''], preexec_fn=lambda: None) finally: setattr(gc, 'disable', orig_gc_disable) setattr(gc, 'isenabled', orig_gc_isenabled) if not enabled: gc.disable() def test_args_string(self) -> None: # args is a string fd, fname = mkstemp() # reopen in text mode with open(fd, "w", errors=cast(Any, "surrogateescape")) as fobj: # see #260 fobj.write("#!/bin/sh\n") fobj.write("exec '%s' -c 'import sys; sys.exit(47)'\n" % sys.executable) os.chmod(fname, 0o700) p = subprocess.Popen(fname) p.wait() os.remove(fname) self.assertEqual(p.returncode, 47) def test_invalid_args(self) -> None: # invalid arguments should raise ValueError self.assertRaises(ValueError, subprocess.call, [sys.executable, "-c", "import sys; sys.exit(47)"], startupinfo=47) self.assertRaises(ValueError, subprocess.call, [sys.executable, "-c", "import sys; sys.exit(47)"], creationflags=47) def test_shell_sequence(self) -> None: # Run command through the shell (sequence) newenv = os.environ.copy() newenv["FRUIT"] = "apple" p = subprocess.Popen(["echo $FRUIT"], shell=1, stdout=subprocess.PIPE, env=newenv) self.addCleanup(p.stdout.close) self.assertEqual(p.stdout.read().strip(b" \t\r\n\f"), b"apple") def test_shell_string(self) -> None: # Run command through the shell (string) newenv = os.environ.copy() newenv["FRUIT"] = "apple" p = subprocess.Popen("echo $FRUIT", shell=1, stdout=subprocess.PIPE, env=newenv) self.addCleanup(p.stdout.close) self.assertEqual(p.stdout.read().strip(b" \t\r\n\f"), b"apple") def test_call_string(self) -> None: # call() function with string argument on UNIX fd, fname = mkstemp() # reopen in text mode with open(fd, "w", errors=cast(Any, "surrogateescape")) as fobj: # see #260 fobj.write("#!/bin/sh\n") fobj.write("exec '%s' -c 'import sys; sys.exit(47)'\n" % sys.executable) os.chmod(fname, 0o700) rc = subprocess.call(fname) os.remove(fname) self.assertEqual(rc, 47) def test_specific_shell(self) -> None: # Issue #9265: Incorrect name passed as arg[0]. shells = [] # type: List[str] for prefix in ['/bin', '/usr/bin/', '/usr/local/bin']: for name in ['bash', 'ksh']: sh = os.path.join(prefix, name) if os.path.isfile(sh): shells.append(sh) if not shells: # Will probably work for any shell but csh. self.skipTest("bash or ksh required for this test") sh = '/bin/sh' if os.path.isfile(sh) and not os.path.islink(sh): # Test will fail if /bin/sh is a symlink to csh. shells.append(sh) for sh in shells: p = subprocess.Popen("echo $0", executable=sh, shell=True, stdout=subprocess.PIPE) self.addCleanup(p.stdout.close) self.assertEqual(p.stdout.read().strip(), bytes(sh, 'ascii')) def _kill_process(self, method: str, *args: Any) -> subprocess.Popen: # Do not inherit file handles from the parent. # It should fix failures on some platforms. p = subprocess.Popen([sys.executable, "-c", """if 1: import sys, time sys.stdout.write('x\\n') sys.stdout.flush() time.sleep(30) """], close_fds=True, stdin=subprocess.PIPE, stdout=subprocess.PIPE, stderr=subprocess.PIPE) # Wait for the interpreter to be completely initialized before # sending any signal. p.stdout.read(1) getattr(p, method)(*args) return p def test_send_signal(self) -> None: p = self._kill_process('send_signal', signal.SIGINT) _, stderr = p.communicate() self.assertIn(b'KeyboardInterrupt', stderr) self.assertNotEqual(p.wait(), 0) def test_kill(self) -> None: p = self._kill_process('kill') _, stderr = p.communicate() self.assertStderrEqual(stderr, b'') self.assertEqual(p.wait(), -signal.SIGKILL) def test_terminate(self) -> None: p = self._kill_process('terminate') _, stderr = p.communicate() self.assertStderrEqual(stderr, b'') self.assertEqual(p.wait(), -signal.SIGTERM) def check_close_std_fds(self, fds: Iterable[int]) -> None: # Issue #9905: test that subprocess pipes still work properly with # some standard fds closed stdin = 0 newfds = [] # type: List[int] for a in fds: b = os.dup(a) newfds.append(b) if a == 0: stdin = b try: for fd in fds: os.close(fd) out, err = subprocess.Popen([sys.executable, "-c", 'import sys;' 'sys.stdout.write("apple");' 'sys.stdout.flush();' 'sys.stderr.write("orange")'], stdin=stdin, stdout=subprocess.PIPE, stderr=subprocess.PIPE).communicate() err = support.strip_python_stderr(err) self.assertEqual((out, err), (b'apple', b'orange')) finally: for b, a in zip(newfds, fds): os.dup2(b, a) for b in newfds: os.close(b) def test_close_fd_0(self) -> None: self.check_close_std_fds([0]) def test_close_fd_1(self) -> None: self.check_close_std_fds([1]) def test_close_fd_2(self) -> None: self.check_close_std_fds([2]) def test_close_fds_0_1(self) -> None: self.check_close_std_fds([0, 1]) def test_close_fds_0_2(self) -> None: self.check_close_std_fds([0, 2]) def test_close_fds_1_2(self) -> None: self.check_close_std_fds([1, 2]) def test_close_fds_0_1_2(self) -> None: # Issue #10806: test that subprocess pipes still work properly with # all standard fds closed. self.check_close_std_fds([0, 1, 2]) def test_remapping_std_fds(self) -> None: # open up some temporary files temps = [mkstemp() for i in range(3)] try: temp_fds = [fd for fd, fname in temps] # unlink the files -- we won't need to reopen them for fd, fname in temps: os.unlink(fname) # write some data to what will become stdin, and rewind os.write(temp_fds[1], b"STDIN") os.lseek(temp_fds[1], 0, 0) # move the standard file descriptors out of the way saved_fds = [os.dup(fd) for fd in range(3)] try: # duplicate the file objects over the standard fd's for fd, temp_fd in enumerate(temp_fds): os.dup2(temp_fd, fd) # now use those files in the "wrong" order, so that subprocess # has to rearrange them in the child p = subprocess.Popen([sys.executable, "-c", 'import sys; got = sys.stdin.read();' 'sys.stdout.write("got %s"%got); sys.stderr.write("err")'], stdin=temp_fds[1], stdout=temp_fds[2], stderr=temp_fds[0]) p.wait() finally: # restore the original fd's underneath sys.stdin, etc. for std, saved in enumerate(saved_fds): os.dup2(saved, std) os.close(saved) for fd in temp_fds: os.lseek(fd, 0, 0) out = os.read(temp_fds[2], 1024) err = support.strip_python_stderr(os.read(temp_fds[0], 1024)) self.assertEqual(out, b"got STDIN") self.assertEqual(err, b"err") finally: for fd in temp_fds: os.close(fd) def check_swap_fds(self, stdin_no: int, stdout_no: int, stderr_no: int) -> None: # open up some temporary files temps = [mkstemp() for i in range(3)] temp_fds = [fd for fd, fname in temps] try: # unlink the files -- we won't need to reopen them for fd, fname in temps: os.unlink(fname) # save a copy of the standard file descriptors saved_fds = [os.dup(fd) for fd in range(3)] try: # duplicate the temp files over the standard fd's 0, 1, 2 for fd, temp_fd in enumerate(temp_fds): os.dup2(temp_fd, fd) # write some data to what will become stdin, and rewind os.write(stdin_no, b"STDIN") os.lseek(stdin_no, 0, 0) # now use those files in the given order, so that subprocess # has to rearrange them in the child p = subprocess.Popen([sys.executable, "-c", 'import sys; got = sys.stdin.read();' 'sys.stdout.write("got %s"%got); sys.stderr.write("err")'], stdin=stdin_no, stdout=stdout_no, stderr=stderr_no) p.wait() for fd in temp_fds: os.lseek(fd, 0, 0) out = os.read(stdout_no, 1024) err = support.strip_python_stderr(os.read(stderr_no, 1024)) finally: for std, saved in enumerate(saved_fds): os.dup2(saved, std) os.close(saved) self.assertEqual(out, b"got STDIN") self.assertEqual(err, b"err") finally: for fd in temp_fds: os.close(fd) # When duping fds, if there arises a situation where one of the fds is # either 0, 1 or 2, it is possible that it is overwritten (#12607). # This tests all combinations of this. def test_swap_fds(self) -> None: self.check_swap_fds(0, 1, 2) self.check_swap_fds(0, 2, 1) self.check_swap_fds(1, 0, 2) self.check_swap_fds(1, 2, 0) self.check_swap_fds(2, 0, 1) self.check_swap_fds(2, 1, 0) def test_surrogates_error_message(self) -> None: def prepare() -> None: raise ValueError("surrogate:\uDCff") try: subprocess.call( [sys.executable, "-c", "pass"], preexec_fn=prepare) except ValueError as err: # Pure Python implementations keeps the message self.assertIsNone(subprocess._posixsubprocess) self.assertEqual(str(err), "surrogate:\uDCff") except RuntimeError as err2: # _posixsubprocess uses a default message self.assertIsNotNone(subprocess._posixsubprocess) self.assertEqual(str(err2), "Exception occurred in preexec_fn.") else: self.fail("Expected ValueError or RuntimeError") def test_undecodable_env(self) -> None: for key, value in (('test', 'abc\uDCFF'), ('test\uDCFF', '42')): # test str with surrogates script = "import os; print(ascii(os.getenv(%s)))" % repr(key) env = os.environ.copy() env[key] = value # Use C locale to get ascii for the locale encoding to force # surrogate-escaping of \xFF in the child process; otherwise it can # be decoded as-is if the default locale is latin-1. env['LC_ALL'] = 'C' stdout = subprocess.check_output( [sys.executable, "-c", script], env=env) stdout = stdout.rstrip(b'\n\r') self.assertEqual(stdout.decode('ascii'), ascii(value)) # test bytes keyb = key.encode("ascii", "surrogateescape") valueb = value.encode("ascii", "surrogateescape") script = "import os; print(ascii(os.getenvb(%s)))" % repr(keyb) envb = dict(os.environ.copy().items()) # type: Dict[Any, Any] envb[keyb] = valueb stdout = subprocess.check_output( [sys.executable, "-c", script], env=envb) stdout = stdout.rstrip(b'\n\r') self.assertEqual(stdout.decode('ascii'), ascii(valueb)) def test_bytes_program(self) -> None: abs_program = os.fsencode(sys.executable) path, programs = os.path.split(sys.executable) program = os.fsencode(programs) # absolute bytes path exitcode = subprocess.call([abs_program, "-c", "pass"]) self.assertEqual(exitcode, 0) # bytes program, unicode PATH env = os.environ.copy() env["PATH"] = path exitcode = subprocess.call([program, "-c", "pass"], env=env) self.assertEqual(exitcode, 0) # bytes program, bytes PATH envb = os.environb.copy() envb[b"PATH"] = os.fsencode(path) exitcode = subprocess.call([program, "-c", "pass"], env=envb) self.assertEqual(exitcode, 0) def test_pipe_cloexec(self) -> None: sleeper = support.findfile("input_reader.py", subdir="subprocessdata") fd_status = support.findfile("fd_status.py", subdir="subprocessdata") p1 = subprocess.Popen([sys.executable, sleeper], stdin=subprocess.PIPE, stdout=subprocess.PIPE, stderr=subprocess.PIPE, close_fds=False) self.addCleanup(p1.communicate, b'') p2 = subprocess.Popen([sys.executable, fd_status], stdout=subprocess.PIPE, close_fds=False) output, error = p2.communicate() result_fds = set(map(int, output.split(b','))) unwanted_fds = set([p1.stdin.fileno(), p1.stdout.fileno(), p1.stderr.fileno()]) self.assertFalse(result_fds & unwanted_fds, "Expected no fds from %r to be open in child, " "found %r" % (unwanted_fds, result_fds & unwanted_fds)) def test_pipe_cloexec_real_tools(self) -> None: qcat = support.findfile("qcat.py", subdir="subprocessdata") qgrep = support.findfile("qgrep.py", subdir="subprocessdata") subdata = b'zxcvbn' data = subdata * 4 + b'\n' p1 = subprocess.Popen([sys.executable, qcat], stdin=subprocess.PIPE, stdout=subprocess.PIPE, close_fds=False) p2 = subprocess.Popen([sys.executable, qgrep, subdata], stdin=p1.stdout, stdout=subprocess.PIPE, close_fds=False) self.addCleanup(p1.wait) self.addCleanup(p2.wait) def kill_p1() -> None: #try: p1.terminate() #except ProcessLookupError: # pass def kill_p2() -> None: #try: p2.terminate() #except ProcessLookupError: # pass self.addCleanup(kill_p1) self.addCleanup(kill_p2) p1.stdin.write(data) p1.stdin.close() readfiles, ignored1, ignored2 = select.select([p2.stdout], [], [], 10) self.assertTrue(readfiles, "The child hung") self.assertEqual(p2.stdout.read(), data) p1.stdout.close() p2.stdout.close() def test_close_fds(self) -> None: fd_status = support.findfile("fd_status.py", subdir="subprocessdata") fds = os.pipe() self.addCleanup(os.close, fds[0]) self.addCleanup(os.close, fds[1]) open_fds = set([fds[0], fds[1]]) # add a bunch more fds for _ in range(9): fd = os.open("/dev/null", os.O_RDONLY) self.addCleanup(os.close, fd) open_fds.add(fd) p = subprocess.Popen([sys.executable, fd_status], stdout=subprocess.PIPE, close_fds=False) output, ignored = p.communicate() remaining_fds = set(map(int, output.split(b','))) self.assertEqual(remaining_fds & open_fds, open_fds, "Some fds were closed") p = subprocess.Popen([sys.executable, fd_status], stdout=subprocess.PIPE, close_fds=True) output, ignored = p.communicate() remaining_fds = set(map(int, output.split(b','))) self.assertFalse(remaining_fds & open_fds, "Some fds were left open") self.assertIn(1, remaining_fds, "Subprocess failed") # Keep some of the fd's we opened open in the subprocess. # This tests _posixsubprocess.c's proper handling of fds_to_keep. fds_to_keep = set(open_fds.pop() for _ in range(8)) p = subprocess.Popen([sys.executable, fd_status], stdout=subprocess.PIPE, close_fds=True, pass_fds=()) output, ignored = p.communicate() remaining_fds = set(map(int, output.split(b','))) self.assertFalse(remaining_fds & fds_to_keep & open_fds, "Some fds not in pass_fds were left open") self.assertIn(1, remaining_fds, "Subprocess failed") # Mac OS X Tiger (10.4) has a kernel bug: sometimes, the file # descriptor of a pipe closed in the parent process is valid in the # child process according to fstat(), but the mode of the file # descriptor is invalid, and read or write raise an error. @support.requires_mac_ver(10, 5) def test_pass_fds(self) -> None: fd_status = support.findfile("fd_status.py", subdir="subprocessdata") open_fds = set() # type: Set[int] for x in range(5): fds = os.pipe() self.addCleanup(os.close, fds[0]) self.addCleanup(os.close, fds[1]) open_fds.update([fds[0], fds[1]]) for fd in open_fds: p = subprocess.Popen([sys.executable, fd_status], stdout=subprocess.PIPE, close_fds=True, pass_fds=(fd, )) output, ignored = p.communicate() remaining_fds = set(map(int, output.split(b','))) to_be_closed = open_fds - {fd} self.assertIn(fd, remaining_fds, "fd to be passed not passed") self.assertFalse(remaining_fds & to_be_closed, "fd to be closed passed") # pass_fds overrides close_fds with a warning. with self.assertWarns(RuntimeWarning) as context: self.assertFalse(subprocess.call( [sys.executable, "-c", "import sys; sys.exit(0)"], close_fds=False, pass_fds=(fd, ))) self.assertIn('overriding close_fds', str(context.warning)) def test_stdout_stdin_are_single_inout_fd(self) -> None: with io.open(os.devnull, "r+") as inout: p = subprocess.Popen([sys.executable, "-c", "import sys; sys.exit(0)"], stdout=inout, stdin=inout) p.wait() def test_stdout_stderr_are_single_inout_fd(self) -> None: with io.open(os.devnull, "r+") as inout: p = subprocess.Popen([sys.executable, "-c", "import sys; sys.exit(0)"], stdout=inout, stderr=inout) p.wait() def test_stderr_stdin_are_single_inout_fd(self) -> None: with io.open(os.devnull, "r+") as inout: p = subprocess.Popen([sys.executable, "-c", "import sys; sys.exit(0)"], stderr=inout, stdin=inout) p.wait() def test_wait_when_sigchild_ignored(self) -> None: # NOTE: sigchild_ignore.py may not be an effective test on all OSes. sigchild_ignore = support.findfile("sigchild_ignore.py", subdir="subprocessdata") p = subprocess.Popen([sys.executable, sigchild_ignore], stdout=subprocess.PIPE, stderr=subprocess.PIPE) stdout, stderr = p.communicate() self.assertEqual(0, p.returncode, "sigchild_ignore.py exited" " non-zero with this error:\n%s" % stderr.decode('utf8')) def test_select_unbuffered(self) -> None: # Issue #11459: bufsize=0 should really set the pipes as # unbuffered (and therefore let select() work properly). select = support.import_module("select") p = subprocess.Popen([sys.executable, "-c", 'import sys;' 'sys.stdout.write("apple")'], stdout=subprocess.PIPE, bufsize=0) f = p.stdout self.addCleanup(f.close) try: self.assertEqual(f.read(4), b"appl") self.assertIn(f, select.select([f], [], [], 0.0)[0]) finally: p.wait() def test_zombie_fast_process_del(self) -> None: # Issue #12650: on Unix, if Popen.__del__() was called before the # process exited, it wouldn't be added to subprocess._active, and would # remain a zombie. # spawn a Popen, and delete its reference before it exits p = subprocess.Popen([sys.executable, "-c", 'import sys, time;' 'time.sleep(0.2)'], stdout=subprocess.PIPE, stderr=subprocess.PIPE) self.addCleanup(p.stdout.close) self.addCleanup(p.stderr.close) ident = id(p) pid = p.pid del p # check that p is in the active processes list self.assertIn(ident, [id(o) for o in subprocess._active]) def test_leak_fast_process_del_killed(self) -> None: # Issue #12650: on Unix, if Popen.__del__() was called before the # process exited, and the process got killed by a signal, it would never # be removed from subprocess._active, which triggered a FD and memory # leak. # spawn a Popen, delete its reference and kill it p = subprocess.Popen([sys.executable, "-c", 'import time;' 'time.sleep(3)'], stdout=subprocess.PIPE, stderr=subprocess.PIPE) self.addCleanup(p.stdout.close) self.addCleanup(p.stderr.close) ident = id(p) pid = p.pid del p os.kill(pid, signal.SIGKILL) # check that p is in the active processes list self.assertIn(ident, [id(o) for o in subprocess._active]) # let some time for the process to exit, and create a new Popen: this # should trigger the wait() of p time.sleep(0.2) with self.assertRaises(EnvironmentError) as c: with subprocess.Popen(['nonexisting_i_hope'], stdout=subprocess.PIPE, stderr=subprocess.PIPE) as proc: pass # p should have been wait()ed on, and removed from the _active list self.assertRaises(OSError, os.waitpid, pid, 0) self.assertNotIn(ident, [id(o) for o in subprocess._active]) @unittest.skipUnless(mswindows, "Windows specific tests") class Win32ProcessTestCase(BaseTestCase): def test_startupinfo(self) -> None: # startupinfo argument # We uses hardcoded constants, because we do not want to # depend on win32all. STARTF_USESHOWWINDOW = 1 SW_MAXIMIZE = 3 startupinfo = subprocess.STARTUPINFO() startupinfo.dwFlags = STARTF_USESHOWWINDOW startupinfo.wShowWindow = SW_MAXIMIZE # Since Python is a console process, it won't be affected # by wShowWindow, but the argument should be silently # ignored subprocess.call([sys.executable, "-c", "import sys; sys.exit(0)"], startupinfo=startupinfo) def test_creationflags(self) -> None: # creationflags argument CREATE_NEW_CONSOLE = 16 sys.stderr.write(" a DOS box should flash briefly ...\n") subprocess.call(sys.executable + ' -c "import time; time.sleep(0.25)"', creationflags=CREATE_NEW_CONSOLE) def test_invalid_args(self) -> None: # invalid arguments should raise ValueError self.assertRaises(ValueError, subprocess.call, [sys.executable, "-c", "import sys; sys.exit(47)"], preexec_fn=lambda: 1) self.assertRaises(ValueError, subprocess.call, [sys.executable, "-c", "import sys; sys.exit(47)"], stdout=subprocess.PIPE, close_fds=True) def test_close_fds(self) -> None: # close file descriptors rc = subprocess.call([sys.executable, "-c", "import sys; sys.exit(47)"], close_fds=True) self.assertEqual(rc, 47) def test_shell_sequence(self) -> None: # Run command through the shell (sequence) newenv = os.environ.copy() newenv["FRUIT"] = "physalis" p = subprocess.Popen(["set"], shell=1, stdout=subprocess.PIPE, env=newenv) self.addCleanup(p.stdout.close) self.assertIn(b"physalis", p.stdout.read()) def test_shell_string(self) -> None: # Run command through the shell (string) newenv = os.environ.copy() newenv["FRUIT"] = "physalis" p = subprocess.Popen("set", shell=1, stdout=subprocess.PIPE, env=newenv) self.addCleanup(p.stdout.close) self.assertIn(b"physalis", p.stdout.read()) def test_call_string(self) -> None: # call() function with string argument on Windows rc = subprocess.call(sys.executable + ' -c "import sys; sys.exit(47)"') self.assertEqual(rc, 47) def _kill_process(self, method: str, *args: Any) -> None: # Some win32 buildbot raises EOFError if stdin is inherited p = subprocess.Popen([sys.executable, "-c", """if 1: import sys, time sys.stdout.write('x\\n') sys.stdout.flush() time.sleep(30) """], stdin=subprocess.PIPE, stdout=subprocess.PIPE, stderr=subprocess.PIPE) self.addCleanup(p.stdout.close) self.addCleanup(p.stderr.close) self.addCleanup(p.stdin.close) # Wait for the interpreter to be completely initialized before # sending any signal. p.stdout.read(1) getattr(p, method)(*args) _, stderr = p.communicate() self.assertStderrEqual(stderr, b'') returncode = p.wait() self.assertNotEqual(returncode, 0) def test_send_signal(self) -> None: self._kill_process('send_signal', signal.SIGTERM) def test_kill(self) -> None: self._kill_process('kill') def test_terminate(self) -> None: self._kill_process('terminate') # The module says: # "NB This only works (and is only relevant) for UNIX." # # Actually, getoutput should work on any platform with an os.popen, but # I'll take the comment as given, and skip this suite. @unittest.skipUnless(os.name == 'posix', "only relevant for UNIX") class CommandTests(unittest.TestCase): def test_getoutput(self) -> None: self.assertEqual(subprocess.getoutput('echo xyzzy'), 'xyzzy') self.assertEqual(subprocess.getstatusoutput('echo xyzzy'), (0, 'xyzzy')) # we use mkdtemp in the next line to create an empty directory # under our exclusive control; from that, we can invent a pathname # that we _know_ won't exist. This is guaranteed to fail. dir = None # type: str try: dir = tempfile.mkdtemp() name = os.path.join(dir, "foo") status, output = subprocess.getstatusoutput('cat ' + name) self.assertNotEqual(status, 0) finally: if dir is not None: os.rmdir(dir) @unittest.skipUnless(getattr(subprocess, '_has_poll', False), "poll system call not supported") class ProcessTestCaseNoPoll(ProcessTestCase): def setUp(self) -> None: subprocess._has_poll = False ProcessTestCase.setUp(self) def tearDown(self) -> None: subprocess._has_poll = True ProcessTestCase.tearDown(self) #@unittest.skipUnless(getattr(subprocess, '_posixsubprocess', False), # "_posixsubprocess extension module not found.") #class ProcessTestCasePOSIXPurePython(ProcessTestCase, POSIXProcessTestCase): # @classmethod # def setUpClass(cls): # global subprocess # assert subprocess._posixsubprocess # # Reimport subprocess while forcing _posixsubprocess to not exist. # with support.check_warnings(('.*_posixsubprocess .* not being used.*', # RuntimeWarning)): # subprocess = support.import_fresh_module( # 'subprocess', blocked=['_posixsubprocess']) # assert not subprocess._posixsubprocess # # @classmethod # def tearDownClass(cls): # global subprocess # # Reimport subprocess as it should be, restoring order to the universe#. # subprocess = support.import_fresh_module('subprocess') # assert subprocess._posixsubprocess class HelperFunctionTests(unittest.TestCase): @unittest.skipIf(mswindows, "errno and EINTR make no sense on windows") def test_eintr_retry_call(self) -> None: record_calls = [] # type: List[Any] def fake_os_func(*args: Any) -> tuple: record_calls.append(args) if len(record_calls) == 2: raise OSError(errno.EINTR, "fake interrupted system call") return tuple(reversed(args)) self.assertEqual((999, 256), subprocess._eintr_retry_call(fake_os_func, 256, 999)) self.assertEqual([(256, 999)], record_calls) # This time there will be an EINTR so it will loop once. self.assertEqual((666,), subprocess._eintr_retry_call(fake_os_func, 666)) self.assertEqual([(256, 999), (666,), (666,)], record_calls) @unittest.skipUnless(mswindows, "Windows-specific tests") class CommandsWithSpaces (BaseTestCase): def setUp(self) -> None: super().setUp() f, fname = mkstemp(".py", "te st") self.fname = fname.lower () os.write(f, b"import sys;" b"sys.stdout.write('%d %s' % (len(sys.argv), [a.lower () for a in sys.argv]))" ) os.close(f) def tearDown(self) -> None: os.remove(self.fname) super().tearDown() def with_spaces(self, *args: Any, **kwargs: Any) -> None: kwargs['stdout'] = subprocess.PIPE p = subprocess.Popen(*args, **kwargs) self.addCleanup(p.stdout.close) self.assertEqual( p.stdout.read ().decode("mbcs"), "2 [%r, 'ab cd']" % self.fname ) def test_shell_string_with_spaces(self) -> None: # call() function with string argument with spaces on Windows self.with_spaces('"%s" "%s" "%s"' % (sys.executable, self.fname, "ab cd"), shell=1) def test_shell_sequence_with_spaces(self) -> None: # call() function with sequence argument with spaces on Windows self.with_spaces([sys.executable, self.fname, "ab cd"], shell=1) def test_noshell_string_with_spaces(self) -> None: # call() function with string argument with spaces on Windows self.with_spaces('"%s" "%s" "%s"' % (sys.executable, self.fname, "ab cd")) def test_noshell_sequence_with_spaces(self) -> None: # call() function with sequence argument with spaces on Windows self.with_spaces([sys.executable, self.fname, "ab cd"]) class ContextManagerTests(BaseTestCase): def test_pipe(self) -> None: with subprocess.Popen([sys.executable, "-c", "import sys;" "sys.stdout.write('stdout');" "sys.stderr.write('stderr');"], stdout=subprocess.PIPE, stderr=subprocess.PIPE) as proc: self.assertEqual(proc.stdout.read(), b"stdout") self.assertStderrEqual(proc.stderr.read(), b"stderr") self.assertTrue(proc.stdout.closed) self.assertTrue(proc.stderr.closed) def test_returncode(self) -> None: with subprocess.Popen([sys.executable, "-c", "import sys; sys.exit(100)"]) as proc: pass # __exit__ calls wait(), so the returncode should be set self.assertEqual(proc.returncode, 100) def test_communicate_stdin(self) -> None: with subprocess.Popen([sys.executable, "-c", "import sys;" "sys.exit(sys.stdin.read() == 'context')"], stdin=subprocess.PIPE) as proc: proc.communicate(b"context") self.assertEqual(proc.returncode, 1) def test_invalid_args(self) -> None: with self.assertRaises(EnvironmentError) as c: with subprocess.Popen(['nonexisting_i_hope'], stdout=subprocess.PIPE, stderr=subprocess.PIPE) as proc: pass if c.exception.errno != errno.ENOENT: # ignore "no such file" raise c.exception def test_main(): unit_tests = (ProcessTestCase, POSIXProcessTestCase, Win32ProcessTestCase, #ProcessTestCasePOSIXPurePython, CommandTests, ProcessTestCaseNoPoll, HelperFunctionTests, CommandsWithSpaces, ContextManagerTests, ) support.run_unittest(*unit_tests) support.reap_children() if __name__ == "__main__": unittest.main() mypy-0.560/test-data/stdlib-samples/3.2/test/test_tempfile.py0000644€tŠÔÚ€2›s®0000011153513215007205030205 0ustar jukkaDROPBOX\Domain Users00000000000000# tempfile.py unit tests. import tempfile import os import signal import sys import re import warnings import unittest from test import support from typing import Any, AnyStr, List, Dict, IO if hasattr(os, 'stat'): import stat has_stat = 1 else: has_stat = 0 has_textmode = (tempfile._text_openflags != tempfile._bin_openflags) has_spawnl = hasattr(os, 'spawnl') # TEST_FILES may need to be tweaked for systems depending on the maximum # number of files that can be opened at one time (see ulimit -n) if sys.platform in ('openbsd3', 'openbsd4'): TEST_FILES = 48 else: TEST_FILES = 100 # This is organized as one test for each chunk of code in tempfile.py, # in order of their appearance in the file. Testing which requires # threads is not done here. # Common functionality. class TC(unittest.TestCase): str_check = re.compile(r"[a-zA-Z0-9_-]{6}$") def setUp(self) -> None: self._warnings_manager = support.check_warnings() self._warnings_manager.__enter__() warnings.filterwarnings("ignore", category=RuntimeWarning, message="mktemp", module=__name__) def tearDown(self) -> None: self._warnings_manager.__exit__(None, None, None) def failOnException(self, what: str, ei: tuple = None) -> None: if ei is None: ei = sys.exc_info() self.fail("%s raised %s: %s" % (what, ei[0], ei[1])) def nameCheck(self, name: str, dir: str, pre: str, suf: str) -> None: (ndir, nbase) = os.path.split(name) npre = nbase[:len(pre)] nsuf = nbase[len(nbase)-len(suf):] # check for equality of the absolute paths! self.assertEqual(os.path.abspath(ndir), os.path.abspath(dir), "file '%s' not in directory '%s'" % (name, dir)) self.assertEqual(npre, pre, "file '%s' does not begin with '%s'" % (nbase, pre)) self.assertEqual(nsuf, suf, "file '%s' does not end with '%s'" % (nbase, suf)) nbase = nbase[len(pre):len(nbase)-len(suf)] self.assertTrue(self.str_check.match(nbase), "random string '%s' does not match /^[a-zA-Z0-9_-]{6}$/" % nbase) test_classes = [] # type: List[type] class test_exports(TC): def test_exports(self) -> None: # There are no surprising symbols in the tempfile module dict = tempfile.__dict__ expected = { "NamedTemporaryFile" : 1, "TemporaryFile" : 1, "mkstemp" : 1, "mkdtemp" : 1, "mktemp" : 1, "TMP_MAX" : 1, "gettempprefix" : 1, "gettempdir" : 1, "tempdir" : 1, "template" : 1, "SpooledTemporaryFile" : 1, "TemporaryDirectory" : 1, } unexp = [] # type: List[str] for key in dict: if key[0] != '_' and key not in expected: unexp.append(key) self.assertTrue(len(unexp) == 0, "unexpected keys: %s" % unexp) test_classes.append(test_exports) class test__RandomNameSequence(TC): """Test the internal iterator object _RandomNameSequence.""" def setUp(self) -> None: self.r = tempfile._RandomNameSequence() super().setUp() def test_get_six_char_str(self) -> None: # _RandomNameSequence returns a six-character string s = next(self.r) self.nameCheck(s, '', '', '') def test_many(self) -> None: # _RandomNameSequence returns no duplicate strings (stochastic) dict = {} # type: Dict[str, int] r = self.r for i in range(TEST_FILES): s = next(r) self.nameCheck(s, '', '', '') self.assertNotIn(s, dict) dict[s] = 1 def supports_iter(self) -> None: # _RandomNameSequence supports the iterator protocol i = 0 r = self.r try: for s in r: i += 1 if i == 20: break except: self.failOnException("iteration") @unittest.skipUnless(hasattr(os, 'fork'), "os.fork is required for this test") def test_process_awareness(self) -> None: # ensure that the random source differs between # child and parent. read_fd, write_fd = os.pipe() pid = None # type: int try: pid = os.fork() if not pid: os.close(read_fd) os.write(write_fd, next(self.r).encode("ascii")) os.close(write_fd) # bypass the normal exit handlers- leave those to # the parent. os._exit(0) parent_value = next(self.r) child_value = os.read(read_fd, len(parent_value)).decode("ascii") finally: if pid: # best effort to ensure the process can't bleed out # via any bugs above try: os.kill(pid, signal.SIGKILL) except EnvironmentError: pass os.close(read_fd) os.close(write_fd) self.assertNotEqual(child_value, parent_value) test_classes.append(test__RandomNameSequence) class test__candidate_tempdir_list(TC): """Test the internal function _candidate_tempdir_list.""" def test_nonempty_list(self) -> None: # _candidate_tempdir_list returns a nonempty list of strings cand = tempfile._candidate_tempdir_list() self.assertFalse(len(cand) == 0) for c in cand: self.assertIsInstance(c, str) def test_wanted_dirs(self) -> None: # _candidate_tempdir_list contains the expected directories # Make sure the interesting environment variables are all set. with support.EnvironmentVarGuard() as env: for envname in 'TMPDIR', 'TEMP', 'TMP': dirname = os.getenv(envname) if not dirname: env[envname] = os.path.abspath(envname) cand = tempfile._candidate_tempdir_list() for envname in 'TMPDIR', 'TEMP', 'TMP': dirname = os.getenv(envname) if not dirname: raise ValueError self.assertIn(dirname, cand) try: dirname = os.getcwd() except (AttributeError, os.error): dirname = os.curdir self.assertIn(dirname, cand) # Not practical to try to verify the presence of OS-specific # paths in this list. test_classes.append(test__candidate_tempdir_list) # We test _get_default_tempdir by testing gettempdir. class test__get_candidate_names(TC): """Test the internal function _get_candidate_names.""" def test_retval(self) -> None: # _get_candidate_names returns a _RandomNameSequence object obj = tempfile._get_candidate_names() self.assertIsInstance(obj, tempfile._RandomNameSequence) def test_same_thing(self) -> None: # _get_candidate_names always returns the same object a = tempfile._get_candidate_names() b = tempfile._get_candidate_names() self.assertTrue(a is b) test_classes.append(test__get_candidate_names) class test__mkstemp_inner(TC): """Test the internal function _mkstemp_inner.""" class mkstemped: _bflags = tempfile._bin_openflags _tflags = tempfile._text_openflags def __init__(self, dir: str, pre: str, suf: str, bin: int) -> None: if bin: flags = self._bflags else: flags = self._tflags (self.fd, self.name) = tempfile._mkstemp_inner(dir, pre, suf, flags) self._close = os.close self._unlink = os.unlink def write(self, str: bytes) -> None: os.write(self.fd, str) def __del__(self) -> None: self._close(self.fd) self._unlink(self.name) def do_create(self, dir: str = None, pre: str = "", suf: str= "", bin: int = 1) -> mkstemped: if dir is None: dir = tempfile.gettempdir() try: file = test__mkstemp_inner.mkstemped(dir, pre, suf, bin) # see #259 except: self.failOnException("_mkstemp_inner") self.nameCheck(file.name, dir, pre, suf) return file def test_basic(self) -> None: # _mkstemp_inner can create files self.do_create().write(b"blat") self.do_create(pre="a").write(b"blat") self.do_create(suf="b").write(b"blat") self.do_create(pre="a", suf="b").write(b"blat") self.do_create(pre="aa", suf=".txt").write(b"blat") def test_basic_many(self) -> None: # _mkstemp_inner can create many files (stochastic) extant = list(range(TEST_FILES)) # type: List[Any] for i in extant: extant[i] = self.do_create(pre="aa") def test_choose_directory(self) -> None: # _mkstemp_inner can create files in a user-selected directory dir = tempfile.mkdtemp() try: self.do_create(dir=dir).write(b"blat") finally: os.rmdir(dir) def test_file_mode(self) -> None: # _mkstemp_inner creates files with the proper mode if not has_stat: return # ugh, can't use SkipTest. file = self.do_create() mode = stat.S_IMODE(os.stat(file.name).st_mode) expected = 0o600 if sys.platform in ('win32', 'os2emx'): # There's no distinction among 'user', 'group' and 'world'; # replicate the 'user' bits. user = expected >> 6 expected = user * (1 + 8 + 64) self.assertEqual(mode, expected) def test_noinherit(self) -> None: # _mkstemp_inner file handles are not inherited by child processes if not has_spawnl: return # ugh, can't use SkipTest. if support.verbose: v="v" else: v="q" file = self.do_create() fd = "%d" % file.fd try: me = __file__ # type: str except NameError: me = sys.argv[0] # We have to exec something, so that FD_CLOEXEC will take # effect. The core of this test is therefore in # tf_inherit_check.py, which see. tester = os.path.join(os.path.dirname(os.path.abspath(me)), "tf_inherit_check.py") # On Windows a spawn* /path/ with embedded spaces shouldn't be quoted, # but an arg with embedded spaces should be decorated with double # quotes on each end if sys.platform in ('win32',): decorated = '"%s"' % sys.executable tester = '"%s"' % tester else: decorated = sys.executable retval = os.spawnl(os.P_WAIT, sys.executable, decorated, tester, v, fd) self.assertFalse(retval < 0, "child process caught fatal signal %d" % -retval) self.assertFalse(retval > 0, "child process reports failure %d"%retval) def test_textmode(self) -> None: # _mkstemp_inner can create files in text mode if not has_textmode: return # ugh, can't use SkipTest. # A text file is truncated at the first Ctrl+Z byte f = self.do_create(bin=0) f.write(b"blat\x1a") f.write(b"extra\n") os.lseek(f.fd, 0, os.SEEK_SET) self.assertEqual(os.read(f.fd, 20), b"blat") test_classes.append(test__mkstemp_inner) class test_gettempprefix(TC): """Test gettempprefix().""" def test_sane_template(self) -> None: # gettempprefix returns a nonempty prefix string p = tempfile.gettempprefix() self.assertIsInstance(p, str) self.assertTrue(len(p) > 0) def test_usable_template(self) -> None: # gettempprefix returns a usable prefix string # Create a temp directory, avoiding use of the prefix. # Then attempt to create a file whose name is # prefix + 'xxxxxx.xxx' in that directory. p = tempfile.gettempprefix() + "xxxxxx.xxx" d = tempfile.mkdtemp(prefix="") try: p = os.path.join(d, p) try: fd = os.open(p, os.O_RDWR | os.O_CREAT) except: self.failOnException("os.open") os.close(fd) os.unlink(p) finally: os.rmdir(d) test_classes.append(test_gettempprefix) class test_gettempdir(TC): """Test gettempdir().""" def test_directory_exists(self) -> None: # gettempdir returns a directory which exists dir = tempfile.gettempdir() self.assertTrue(os.path.isabs(dir) or dir == os.curdir, "%s is not an absolute path" % dir) self.assertTrue(os.path.isdir(dir), "%s is not a directory" % dir) def test_directory_writable(self) -> None: # gettempdir returns a directory writable by the user # sneaky: just instantiate a NamedTemporaryFile, which # defaults to writing into the directory returned by # gettempdir. try: file = tempfile.NamedTemporaryFile() file.write(b"blat") file.close() except: self.failOnException("create file in %s" % tempfile.gettempdir()) def test_same_thing(self) -> None: # gettempdir always returns the same object a = tempfile.gettempdir() b = tempfile.gettempdir() self.assertTrue(a is b) test_classes.append(test_gettempdir) class test_mkstemp(TC): """Test mkstemp().""" def do_create(self, dir: str = None, pre: str = "", suf: str = "") -> None: if dir is None: dir = tempfile.gettempdir() try: (fd, name) = tempfile.mkstemp(dir=dir, prefix=pre, suffix=suf) (ndir, nbase) = os.path.split(name) adir = os.path.abspath(dir) self.assertEqual(adir, ndir, "Directory '%s' incorrectly returned as '%s'" % (adir, ndir)) except: self.failOnException("mkstemp") try: self.nameCheck(name, dir, pre, suf) finally: os.close(fd) os.unlink(name) def test_basic(self) -> None: # mkstemp can create files self.do_create() self.do_create(pre="a") self.do_create(suf="b") self.do_create(pre="a", suf="b") self.do_create(pre="aa", suf=".txt") self.do_create(dir=".") def test_choose_directory(self) -> None: # mkstemp can create directories in a user-selected directory dir = tempfile.mkdtemp() try: self.do_create(dir=dir) finally: os.rmdir(dir) test_classes.append(test_mkstemp) class test_mkdtemp(TC): """Test mkdtemp().""" def do_create(self, dir: str = None, pre: str = "", suf: str = "") -> str: if dir is None: dir = tempfile.gettempdir() try: name = tempfile.mkdtemp(dir=dir, prefix=pre, suffix=suf) except: self.failOnException("mkdtemp") try: self.nameCheck(name, dir, pre, suf) return name except: os.rmdir(name) raise def test_basic(self) -> None: # mkdtemp can create directories os.rmdir(self.do_create()) os.rmdir(self.do_create(pre="a")) os.rmdir(self.do_create(suf="b")) os.rmdir(self.do_create(pre="a", suf="b")) os.rmdir(self.do_create(pre="aa", suf=".txt")) def test_basic_many(self) -> None: # mkdtemp can create many directories (stochastic) extant = list(range(TEST_FILES)) # type: List[Any] try: for i in extant: extant[i] = self.do_create(pre="aa") finally: for i in extant: if(isinstance(i, str)): os.rmdir(i) def test_choose_directory(self) -> None: # mkdtemp can create directories in a user-selected directory dir = tempfile.mkdtemp() try: os.rmdir(self.do_create(dir=dir)) finally: os.rmdir(dir) def test_mode(self) -> None: # mkdtemp creates directories with the proper mode if not has_stat: return # ugh, can't use SkipTest. dir = self.do_create() try: mode = stat.S_IMODE(os.stat(dir).st_mode) mode &= 0o777 # Mask off sticky bits inherited from /tmp expected = 0o700 if sys.platform in ('win32', 'os2emx'): # There's no distinction among 'user', 'group' and 'world'; # replicate the 'user' bits. user = expected >> 6 expected = user * (1 + 8 + 64) self.assertEqual(mode, expected) finally: os.rmdir(dir) test_classes.append(test_mkdtemp) class test_mktemp(TC): """Test mktemp().""" # For safety, all use of mktemp must occur in a private directory. # We must also suppress the RuntimeWarning it generates. def setUp(self) -> None: self.dir = tempfile.mkdtemp() super().setUp() def tearDown(self) -> None: if self.dir: os.rmdir(self.dir) self.dir = None super().tearDown() class mktemped: def _unlink(self, path: str) -> None: os.unlink(path) _bflags = tempfile._bin_openflags def __init__(self, dir: str, pre: str, suf: str) -> None: self.name = tempfile.mktemp(dir=dir, prefix=pre, suffix=suf) # Create the file. This will raise an exception if it's # mysteriously appeared in the meanwhile. os.close(os.open(self.name, self._bflags, 0o600)) def __del__(self) -> None: self._unlink(self.name) def do_create(self, pre: str = "", suf: str = "") -> mktemped: try: file = test_mktemp.mktemped(self.dir, pre, suf) # see #259 except: self.failOnException("mktemp") self.nameCheck(file.name, self.dir, pre, suf) return file def test_basic(self) -> None: # mktemp can choose usable file names self.do_create() self.do_create(pre="a") self.do_create(suf="b") self.do_create(pre="a", suf="b") self.do_create(pre="aa", suf=".txt") def test_many(self) -> None: # mktemp can choose many usable file names (stochastic) extant = list(range(TEST_FILES)) # type: List[Any] for i in extant: extant[i] = self.do_create(pre="aa") ## def test_warning(self): ## # mktemp issues a warning when used ## warnings.filterwarnings("error", ## category=RuntimeWarning, ## message="mktemp") ## self.assertRaises(RuntimeWarning, ## tempfile.mktemp, dir=self.dir) test_classes.append(test_mktemp) # We test _TemporaryFileWrapper by testing NamedTemporaryFile. class test_NamedTemporaryFile(TC): """Test NamedTemporaryFile().""" def do_create(self, dir: str = None, pre: str = "", suf: str = "", delete: bool = True) -> IO[Any]: if dir is None: dir = tempfile.gettempdir() try: file = tempfile.NamedTemporaryFile(dir=dir, prefix=pre, suffix=suf, delete=delete) except: self.failOnException("NamedTemporaryFile") self.nameCheck(file.name, dir, pre, suf) return file def test_basic(self) -> None: # NamedTemporaryFile can create files self.do_create() self.do_create(pre="a") self.do_create(suf="b") self.do_create(pre="a", suf="b") self.do_create(pre="aa", suf=".txt") def test_creates_named(self) -> None: # NamedTemporaryFile creates files with names f = tempfile.NamedTemporaryFile() self.assertTrue(os.path.exists(f.name), "NamedTemporaryFile %s does not exist" % f.name) def test_del_on_close(self) -> None: # A NamedTemporaryFile is deleted when closed dir = tempfile.mkdtemp() try: f = tempfile.NamedTemporaryFile(dir=dir) f.write(b'blat') f.close() self.assertFalse(os.path.exists(f.name), "NamedTemporaryFile %s exists after close" % f.name) finally: os.rmdir(dir) def test_dis_del_on_close(self) -> None: # Tests that delete-on-close can be disabled dir = tempfile.mkdtemp() tmp = None # type: str try: f = tempfile.NamedTemporaryFile(dir=dir, delete=False) tmp = f.name f.write(b'blat') f.close() self.assertTrue(os.path.exists(f.name), "NamedTemporaryFile %s missing after close" % f.name) finally: if tmp is not None: os.unlink(tmp) os.rmdir(dir) def test_multiple_close(self) -> None: # A NamedTemporaryFile can be closed many times without error f = tempfile.NamedTemporaryFile() f.write(b'abc\n') f.close() try: f.close() f.close() except: self.failOnException("close") def test_context_manager(self) -> None: # A NamedTemporaryFile can be used as a context manager with tempfile.NamedTemporaryFile() as f: self.assertTrue(os.path.exists(f.name)) self.assertFalse(os.path.exists(f.name)) def use_closed(): with f: pass self.assertRaises(ValueError, use_closed) # How to test the mode and bufsize parameters? test_classes.append(test_NamedTemporaryFile) class test_SpooledTemporaryFile(TC): """Test SpooledTemporaryFile().""" def do_create(self, max_size: int = 0, dir: str = None, pre: str = "", suf: str = "") -> tempfile.SpooledTemporaryFile: if dir is None: dir = tempfile.gettempdir() try: file = tempfile.SpooledTemporaryFile(max_size=max_size, dir=dir, prefix=pre, suffix=suf) except: self.failOnException("SpooledTemporaryFile") return file def test_basic(self) -> None: # SpooledTemporaryFile can create files f = self.do_create() self.assertFalse(f._rolled) f = self.do_create(max_size=100, pre="a", suf=".txt") self.assertFalse(f._rolled) def test_del_on_close(self) -> None: # A SpooledTemporaryFile is deleted when closed dir = tempfile.mkdtemp() try: f = tempfile.SpooledTemporaryFile(max_size=10, dir=dir) self.assertFalse(f._rolled) f.write(b'blat ' * 5) self.assertTrue(f._rolled) filename = f.name f.close() self.assertFalse(isinstance(filename, str) and os.path.exists(filename), "SpooledTemporaryFile %s exists after close" % filename) finally: os.rmdir(dir) def test_rewrite_small(self) -> None: # A SpooledTemporaryFile can be written to multiple within the max_size f = self.do_create(max_size=30) self.assertFalse(f._rolled) for i in range(5): f.seek(0, 0) f.write(b'x' * 20) self.assertFalse(f._rolled) def test_write_sequential(self) -> None: # A SpooledTemporaryFile should hold exactly max_size bytes, and roll # over afterward f = self.do_create(max_size=30) self.assertFalse(f._rolled) f.write(b'x' * 20) self.assertFalse(f._rolled) f.write(b'x' * 10) self.assertFalse(f._rolled) f.write(b'x') self.assertTrue(f._rolled) def test_writelines(self) -> None: # Verify writelines with a SpooledTemporaryFile f = self.do_create() f.writelines([b'x', b'y', b'z']) f.seek(0) buf = f.read() self.assertEqual(buf, b'xyz') def test_writelines_sequential(self) -> None: # A SpooledTemporaryFile should hold exactly max_size bytes, and roll # over afterward f = self.do_create(max_size=35) f.writelines([b'x' * 20, b'x' * 10, b'x' * 5]) self.assertFalse(f._rolled) f.write(b'x') self.assertTrue(f._rolled) def test_sparse(self) -> None: # A SpooledTemporaryFile that is written late in the file will extend # when that occurs f = self.do_create(max_size=30) self.assertFalse(f._rolled) f.seek(100, 0) self.assertFalse(f._rolled) f.write(b'x') self.assertTrue(f._rolled) def test_fileno(self) -> None: # A SpooledTemporaryFile should roll over to a real file on fileno() f = self.do_create(max_size=30) self.assertFalse(f._rolled) self.assertTrue(f.fileno() > 0) self.assertTrue(f._rolled) def test_multiple_close_before_rollover(self) -> None: # A SpooledTemporaryFile can be closed many times without error f = tempfile.SpooledTemporaryFile() f.write(b'abc\n') self.assertFalse(f._rolled) f.close() try: f.close() f.close() except: self.failOnException("close") def test_multiple_close_after_rollover(self) -> None: # A SpooledTemporaryFile can be closed many times without error f = tempfile.SpooledTemporaryFile(max_size=1) f.write(b'abc\n') self.assertTrue(f._rolled) f.close() try: f.close() f.close() except: self.failOnException("close") def test_bound_methods(self) -> None: # It should be OK to steal a bound method from a SpooledTemporaryFile # and use it independently; when the file rolls over, those bound # methods should continue to function f = self.do_create(max_size=30) read = f.read write = f.write seek = f.seek write(b"a" * 35) write(b"b" * 35) seek(0, 0) self.assertEqual(read(70), b'a'*35 + b'b'*35) def test_text_mode(self) -> None: # Creating a SpooledTemporaryFile with a text mode should produce # a file object reading and writing (Unicode) text strings. f = tempfile.SpooledTemporaryFile(mode='w+', max_size=10) f.write("abc\n") f.seek(0) self.assertEqual(f.read(), "abc\n") f.write("def\n") f.seek(0) self.assertEqual(f.read(), "abc\ndef\n") f.write("xyzzy\n") f.seek(0) self.assertEqual(f.read(), "abc\ndef\nxyzzy\n") # Check that Ctrl+Z doesn't truncate the file f.write("foo\x1abar\n") f.seek(0) self.assertEqual(f.read(), "abc\ndef\nxyzzy\nfoo\x1abar\n") def test_text_newline_and_encoding(self) -> None: f = tempfile.SpooledTemporaryFile(mode='w+', max_size=10, newline='', encoding='utf-8') f.write("\u039B\r\n") f.seek(0) self.assertEqual(f.read(), "\u039B\r\n") self.assertFalse(f._rolled) f.write("\u039B" * 20 + "\r\n") f.seek(0) self.assertEqual(f.read(), "\u039B\r\n" + ("\u039B" * 20) + "\r\n") self.assertTrue(f._rolled) def test_context_manager_before_rollover(self) -> None: # A SpooledTemporaryFile can be used as a context manager with tempfile.SpooledTemporaryFile(max_size=1) as f: self.assertFalse(f._rolled) self.assertFalse(f.closed) self.assertTrue(f.closed) def use_closed(): with f: pass self.assertRaises(ValueError, use_closed) def test_context_manager_during_rollover(self) -> None: # A SpooledTemporaryFile can be used as a context manager with tempfile.SpooledTemporaryFile(max_size=1) as f: self.assertFalse(f._rolled) f.write(b'abc\n') f.flush() self.assertTrue(f._rolled) self.assertFalse(f.closed) self.assertTrue(f.closed) def use_closed(): with f: pass self.assertRaises(ValueError, use_closed) def test_context_manager_after_rollover(self) -> None: # A SpooledTemporaryFile can be used as a context manager f = tempfile.SpooledTemporaryFile(max_size=1) f.write(b'abc\n') f.flush() self.assertTrue(f._rolled) with f: self.assertFalse(f.closed) self.assertTrue(f.closed) def use_closed(): with f: pass self.assertRaises(ValueError, use_closed) test_classes.append(test_SpooledTemporaryFile) class test_TemporaryFile(TC): """Test TemporaryFile().""" def test_basic(self) -> None: # TemporaryFile can create files # No point in testing the name params - the file has no name. try: tempfile.TemporaryFile() except: self.failOnException("TemporaryFile") def test_has_no_name(self) -> None: # TemporaryFile creates files with no names (on this system) dir = tempfile.mkdtemp() f = tempfile.TemporaryFile(dir=dir) f.write(b'blat') # Sneaky: because this file has no name, it should not prevent # us from removing the directory it was created in. try: os.rmdir(dir) except: ei = sys.exc_info() # cleanup f.close() os.rmdir(dir) self.failOnException("rmdir", ei) def test_multiple_close(self) -> None: # A TemporaryFile can be closed many times without error f = tempfile.TemporaryFile() f.write(b'abc\n') f.close() try: f.close() f.close() except: self.failOnException("close") # How to test the mode and bufsize parameters? def test_mode_and_encoding(self) -> None: def roundtrip(input: AnyStr, *args: Any, **kwargs: Any) -> None: with tempfile.TemporaryFile(*args, **kwargs) as fileobj: fileobj.write(input) fileobj.seek(0) self.assertEqual(input, fileobj.read()) roundtrip(b"1234", "w+b") roundtrip("abdc\n", "w+") roundtrip("\u039B", "w+", encoding="utf-16") roundtrip("foo\r\n", "w+", newline="") if tempfile.NamedTemporaryFile is not tempfile.TemporaryFile: test_classes.append(test_TemporaryFile) # Helper for test_del_on_shutdown class NulledModules: def __init__(self, *modules: Any) -> None: self.refs = [mod.__dict__ for mod in modules] self.contents = [ref.copy() for ref in self.refs] def __enter__(self) -> None: for d in self.refs: for key in d: d[key] = None def __exit__(self, *exc_info: Any) -> None: for d, c in zip(self.refs, self.contents): d.clear() d.update(c) class test_TemporaryDirectory(TC): """Test TemporaryDirectory().""" def do_create(self, dir: str = None, pre: str = "", suf: str = "", recurse: int = 1) -> tempfile.TemporaryDirectory: if dir is None: dir = tempfile.gettempdir() try: tmp = tempfile.TemporaryDirectory(dir=dir, prefix=pre, suffix=suf) except: self.failOnException("TemporaryDirectory") self.nameCheck(tmp.name, dir, pre, suf) # Create a subdirectory and some files if recurse: self.do_create(tmp.name, pre, suf, recurse-1) with open(os.path.join(tmp.name, "test.txt"), "wb") as f: f.write(b"Hello world!") return tmp def test_mkdtemp_failure(self) -> None: # Check no additional exception if mkdtemp fails # Previously would raise AttributeError instead # (noted as part of Issue #10188) with tempfile.TemporaryDirectory() as nonexistent: pass with self.assertRaises(os.error): tempfile.TemporaryDirectory(dir=nonexistent) def test_explicit_cleanup(self) -> None: # A TemporaryDirectory is deleted when cleaned up dir = tempfile.mkdtemp() try: d = self.do_create(dir=dir) self.assertTrue(os.path.exists(d.name), "TemporaryDirectory %s does not exist" % d.name) d.cleanup() self.assertFalse(os.path.exists(d.name), "TemporaryDirectory %s exists after cleanup" % d.name) finally: os.rmdir(dir) @support.skip_unless_symlink def test_cleanup_with_symlink_to_a_directory(self) -> None: # cleanup() should not follow symlinks to directories (issue #12464) d1 = self.do_create() d2 = self.do_create() # Symlink d1/foo -> d2 os.symlink(d2.name, os.path.join(d1.name, "foo")) # This call to cleanup() should not follow the "foo" symlink d1.cleanup() self.assertFalse(os.path.exists(d1.name), "TemporaryDirectory %s exists after cleanup" % d1.name) self.assertTrue(os.path.exists(d2.name), "Directory pointed to by a symlink was deleted") self.assertEqual(os.listdir(d2.name), ['test.txt'], "Contents of the directory pointed to by a symlink " "were deleted") d2.cleanup() @support.cpython_only def test_del_on_collection(self) -> None: # A TemporaryDirectory is deleted when garbage collected dir = tempfile.mkdtemp() try: d = self.do_create(dir=dir) name = d.name del d # Rely on refcounting to invoke __del__ self.assertFalse(os.path.exists(name), "TemporaryDirectory %s exists after __del__" % name) finally: os.rmdir(dir) @unittest.expectedFailure # See issue #10188 def test_del_on_shutdown(self) -> None: # A TemporaryDirectory may be cleaned up during shutdown # Make sure it works with the relevant modules nulled out with self.do_create() as dir: d = self.do_create(dir=dir) # Mimic the nulling out of modules that # occurs during system shutdown modules = [os, os.path] if has_stat: modules.append(stat) # Currently broken, so suppress the warning # that is otherwise emitted on stdout with support.captured_stderr() as err: with NulledModules(*modules): d.cleanup() # Currently broken, so stop spurious exception by # indicating the object has already been closed d._closed = True # And this assert will fail, as expected by the # unittest decorator... self.assertFalse(os.path.exists(d.name), "TemporaryDirectory %s exists after cleanup" % d.name) def test_warnings_on_cleanup(self) -> None: # Two kinds of warning on shutdown # Issue 10888: may write to stderr if modules are nulled out # ResourceWarning will be triggered by __del__ with self.do_create() as dir: if os.sep != '\\': # Embed a backslash in order to make sure string escaping # in the displayed error message is dealt with correctly suffix = '\\check_backslash_handling' else: suffix = '' d = self.do_create(dir=dir, suf=suffix) #Check for the Issue 10888 message modules = [os, os.path] if has_stat: modules.append(stat) with support.captured_stderr() as err: with NulledModules(*modules): d.cleanup() message = err.getvalue().replace('\\\\', '\\') self.assertIn("while cleaning up", message) self.assertIn(d.name, message) # Check for the resource warning with support.check_warnings(('Implicitly', ResourceWarning), quiet=False): warnings.filterwarnings("always", category=ResourceWarning) d.__del__() self.assertFalse(os.path.exists(d.name), "TemporaryDirectory %s exists after __del__" % d.name) def test_multiple_close(self) -> None: # Can be cleaned-up many times without error d = self.do_create() d.cleanup() try: d.cleanup() d.cleanup() except: self.failOnException("cleanup") def test_context_manager(self) -> None: # Can be used as a context manager d = self.do_create() with d as name: self.assertTrue(os.path.exists(name)) self.assertEqual(name, d.name) self.assertFalse(os.path.exists(name)) test_classes.append(test_TemporaryDirectory) def test_main() -> None: support.run_unittest(*test_classes) if __name__ == "__main__": test_main() mypy-0.560/test-data/stdlib-samples/3.2/test/test_textwrap.py0000644€tŠÔÚ€2›s®0000005463013215007205030260 0ustar jukkaDROPBOX\Domain Users00000000000000# # Test suite for the textwrap module. # # Original tests written by Greg Ward . # Converted to PyUnit by Peter Hansen . # Currently maintained by Greg Ward. # # $Id$ # import unittest from test import support from typing import Any, List, Sequence from textwrap import TextWrapper, wrap, fill, dedent class BaseTestCase(unittest.TestCase): '''Parent class with utility methods for textwrap tests.''' wrapper = None # type: TextWrapper def show(self, textin: Sequence[str]) -> str: if isinstance(textin, list): results = [] # type: List[str] for i in range(len(textin)): results.append(" %d: %r" % (i, textin[i])) result = '\n'.join(results) elif isinstance(textin, str): result = " %s\n" % repr(textin) return result def check(self, result: Sequence[str], expect: Sequence[str]) -> None: self.assertEqual(result, expect, 'expected:\n%s\nbut got:\n%s' % ( self.show(expect), self.show(result))) def check_wrap(self, text: str, width: int, expect: Sequence[str], **kwargs: Any) -> None: result = wrap(text, width, **kwargs) self.check(result, expect) def check_split(self, text: str, expect: Sequence[str]) -> None: result = self.wrapper._split(text) self.assertEqual(result, expect, "\nexpected %r\n" "but got %r" % (expect, result)) class WrapTestCase(BaseTestCase): def setUp(self) -> None: self.wrapper = TextWrapper(width=45) def test_simple(self) -> None: # Simple case: just words, spaces, and a bit of punctuation text = "Hello there, how are you this fine day? I'm glad to hear it!" self.check_wrap(text, 12, ["Hello there,", "how are you", "this fine", "day? I'm", "glad to hear", "it!"]) self.check_wrap(text, 42, ["Hello there, how are you this fine day?", "I'm glad to hear it!"]) self.check_wrap(text, 80, [text]) def test_whitespace(self) -> None: # Whitespace munging and end-of-sentence detection text = """\ This is a paragraph that already has line breaks. But some of its lines are much longer than the others, so it needs to be wrapped. Some lines are \ttabbed too. What a mess! """ expect = ["This is a paragraph that already has line", "breaks. But some of its lines are much", "longer than the others, so it needs to be", "wrapped. Some lines are tabbed too. What a", "mess!"] wrapper = TextWrapper(45, fix_sentence_endings=True) result = wrapper.wrap(text) self.check(result, expect) results = wrapper.fill(text) self.check(results, '\n'.join(expect)) def test_fix_sentence_endings(self) -> None: wrapper = TextWrapper(60, fix_sentence_endings=True) # SF #847346: ensure that fix_sentence_endings=True does the # right thing even on input short enough that it doesn't need to # be wrapped. text = "A short line. Note the single space." expect = ["A short line. Note the single space."] self.check(wrapper.wrap(text), expect) # Test some of the hairy end cases that _fix_sentence_endings() # is supposed to handle (the easy stuff is tested in # test_whitespace() above). text = "Well, Doctor? What do you think?" expect = ["Well, Doctor? What do you think?"] self.check(wrapper.wrap(text), expect) text = "Well, Doctor?\nWhat do you think?" self.check(wrapper.wrap(text), expect) text = 'I say, chaps! Anyone for "tennis?"\nHmmph!' expect = ['I say, chaps! Anyone for "tennis?" Hmmph!'] self.check(wrapper.wrap(text), expect) wrapper.width = 20 expect = ['I say, chaps!', 'Anyone for "tennis?"', 'Hmmph!'] self.check(wrapper.wrap(text), expect) text = 'And she said, "Go to hell!"\nCan you believe that?' expect = ['And she said, "Go to', 'hell!" Can you', 'believe that?'] self.check(wrapper.wrap(text), expect) wrapper.width = 60 expect = ['And she said, "Go to hell!" Can you believe that?'] self.check(wrapper.wrap(text), expect) text = 'File stdio.h is nice.' expect = ['File stdio.h is nice.'] self.check(wrapper.wrap(text), expect) def test_wrap_short(self) -> None: # Wrapping to make short lines longer text = "This is a\nshort paragraph." self.check_wrap(text, 20, ["This is a short", "paragraph."]) self.check_wrap(text, 40, ["This is a short paragraph."]) def test_wrap_short_1line(self) -> None: # Test endcases text = "This is a short line." self.check_wrap(text, 30, ["This is a short line."]) self.check_wrap(text, 30, ["(1) This is a short line."], initial_indent="(1) ") def test_hyphenated(self) -> None: # Test breaking hyphenated words text = ("this-is-a-useful-feature-for-" "reformatting-posts-from-tim-peters'ly") self.check_wrap(text, 40, ["this-is-a-useful-feature-for-", "reformatting-posts-from-tim-peters'ly"]) self.check_wrap(text, 41, ["this-is-a-useful-feature-for-", "reformatting-posts-from-tim-peters'ly"]) self.check_wrap(text, 42, ["this-is-a-useful-feature-for-reformatting-", "posts-from-tim-peters'ly"]) def test_hyphenated_numbers(self) -> None: # Test that hyphenated numbers (eg. dates) are not broken like words. text = ("Python 1.0.0 was released on 1994-01-26. Python 1.0.1 was\n" "released on 1994-02-15.") self.check_wrap(text, 30, ['Python 1.0.0 was released on', '1994-01-26. Python 1.0.1 was', 'released on 1994-02-15.']) self.check_wrap(text, 40, ['Python 1.0.0 was released on 1994-01-26.', 'Python 1.0.1 was released on 1994-02-15.']) text = "I do all my shopping at 7-11." self.check_wrap(text, 25, ["I do all my shopping at", "7-11."]) self.check_wrap(text, 27, ["I do all my shopping at", "7-11."]) self.check_wrap(text, 29, ["I do all my shopping at 7-11."]) def test_em_dash(self) -> None: # Test text with em-dashes text = "Em-dashes should be written -- thus." self.check_wrap(text, 25, ["Em-dashes should be", "written -- thus."]) # Probe the boundaries of the properly written em-dash, # ie. " -- ". self.check_wrap(text, 29, ["Em-dashes should be written", "-- thus."]) expect = ["Em-dashes should be written --", "thus."] self.check_wrap(text, 30, expect) self.check_wrap(text, 35, expect) self.check_wrap(text, 36, ["Em-dashes should be written -- thus."]) # The improperly written em-dash is handled too, because # it's adjacent to non-whitespace on both sides. text = "You can also do--this or even---this." expect = ["You can also do", "--this or even", "---this."] self.check_wrap(text, 15, expect) self.check_wrap(text, 16, expect) expect = ["You can also do--", "this or even---", "this."] self.check_wrap(text, 17, expect) self.check_wrap(text, 19, expect) expect = ["You can also do--this or even", "---this."] self.check_wrap(text, 29, expect) self.check_wrap(text, 31, expect) expect = ["You can also do--this or even---", "this."] self.check_wrap(text, 32, expect) self.check_wrap(text, 35, expect) # All of the above behaviour could be deduced by probing the # _split() method. text = "Here's an -- em-dash and--here's another---and another!" expect = ["Here's", " ", "an", " ", "--", " ", "em-", "dash", " ", "and", "--", "here's", " ", "another", "---", "and", " ", "another!"] self.check_split(text, expect) text = "and then--bam!--he was gone" expect = ["and", " ", "then", "--", "bam!", "--", "he", " ", "was", " ", "gone"] self.check_split(text, expect) def test_unix_options (self) -> None: # Test that Unix-style command-line options are wrapped correctly. # Both Optik (OptionParser) and Docutils rely on this behaviour! text = "You should use the -n option, or --dry-run in its long form." self.check_wrap(text, 20, ["You should use the", "-n option, or --dry-", "run in its long", "form."]) self.check_wrap(text, 21, ["You should use the -n", "option, or --dry-run", "in its long form."]) expect = ["You should use the -n option, or", "--dry-run in its long form."] self.check_wrap(text, 32, expect) self.check_wrap(text, 34, expect) self.check_wrap(text, 35, expect) self.check_wrap(text, 38, expect) expect = ["You should use the -n option, or --dry-", "run in its long form."] self.check_wrap(text, 39, expect) self.check_wrap(text, 41, expect) expect = ["You should use the -n option, or --dry-run", "in its long form."] self.check_wrap(text, 42, expect) # Again, all of the above can be deduced from _split(). text = "the -n option, or --dry-run or --dryrun" expect = ["the", " ", "-n", " ", "option,", " ", "or", " ", "--dry-", "run", " ", "or", " ", "--dryrun"] self.check_split(text, expect) def test_funky_hyphens (self) -> None: # Screwy edge cases cooked up by David Goodger. All reported # in SF bug #596434. self.check_split("what the--hey!", ["what", " ", "the", "--", "hey!"]) self.check_split("what the--", ["what", " ", "the--"]) self.check_split("what the--.", ["what", " ", "the--."]) self.check_split("--text--.", ["--text--."]) # When I first read bug #596434, this is what I thought David # was talking about. I was wrong; these have always worked # fine. The real problem is tested in test_funky_parens() # below... self.check_split("--option", ["--option"]) self.check_split("--option-opt", ["--option-", "opt"]) self.check_split("foo --option-opt bar", ["foo", " ", "--option-", "opt", " ", "bar"]) def test_punct_hyphens(self) -> None: # Oh bother, SF #965425 found another problem with hyphens -- # hyphenated words in single quotes weren't handled correctly. # In fact, the bug is that *any* punctuation around a hyphenated # word was handled incorrectly, except for a leading "--", which # was special-cased for Optik and Docutils. So test a variety # of styles of punctuation around a hyphenated word. # (Actually this is based on an Optik bug report, #813077). self.check_split("the 'wibble-wobble' widget", ['the', ' ', "'wibble-", "wobble'", ' ', 'widget']) self.check_split('the "wibble-wobble" widget', ['the', ' ', '"wibble-', 'wobble"', ' ', 'widget']) self.check_split("the (wibble-wobble) widget", ['the', ' ', "(wibble-", "wobble)", ' ', 'widget']) self.check_split("the ['wibble-wobble'] widget", ['the', ' ', "['wibble-", "wobble']", ' ', 'widget']) def test_funky_parens (self) -> None: # Second part of SF bug #596434: long option strings inside # parentheses. self.check_split("foo (--option) bar", ["foo", " ", "(--option)", " ", "bar"]) # Related stuff -- make sure parens work in simpler contexts. self.check_split("foo (bar) baz", ["foo", " ", "(bar)", " ", "baz"]) self.check_split("blah (ding dong), wubba", ["blah", " ", "(ding", " ", "dong),", " ", "wubba"]) def test_initial_whitespace(self) -> None: # SF bug #622849 reported inconsistent handling of leading # whitespace; let's test that a bit, shall we? text = " This is a sentence with leading whitespace." self.check_wrap(text, 50, [" This is a sentence with leading whitespace."]) self.check_wrap(text, 30, [" This is a sentence with", "leading whitespace."]) def test_no_drop_whitespace(self) -> None: # SF patch #1581073 text = " This is a sentence with much whitespace." self.check_wrap(text, 10, [" This is a", " ", "sentence ", "with ", "much white", "space."], drop_whitespace=False) def test_split(self) -> None: # Ensure that the standard _split() method works as advertised # in the comments text = "Hello there -- you goof-ball, use the -b option!" result = self.wrapper._split(text) self.check(result, ["Hello", " ", "there", " ", "--", " ", "you", " ", "goof-", "ball,", " ", "use", " ", "the", " ", "-b", " ", "option!"]) def test_break_on_hyphens(self) -> None: # Ensure that the break_on_hyphens attributes work text = "yaba daba-doo" self.check_wrap(text, 10, ["yaba daba-", "doo"], break_on_hyphens=True) self.check_wrap(text, 10, ["yaba", "daba-doo"], break_on_hyphens=False) def test_bad_width(self) -> None: # Ensure that width <= 0 is caught. text = "Whatever, it doesn't matter." self.assertRaises(ValueError, wrap, text, 0) self.assertRaises(ValueError, wrap, text, -1) def test_no_split_at_umlaut(self) -> None: text = "Die Empf\xe4nger-Auswahl" self.check_wrap(text, 13, ["Die", "Empf\xe4nger-", "Auswahl"]) def test_umlaut_followed_by_dash(self) -> None: text = "aa \xe4\xe4-\xe4\xe4" self.check_wrap(text, 7, ["aa \xe4\xe4-", "\xe4\xe4"]) class LongWordTestCase (BaseTestCase): def setUp(self) -> None: self.wrapper = TextWrapper() self.text = '''\ Did you say "supercalifragilisticexpialidocious?" How *do* you spell that odd word, anyways? ''' def test_break_long(self) -> None: # Wrap text with long words and lots of punctuation self.check_wrap(self.text, 30, ['Did you say "supercalifragilis', 'ticexpialidocious?" How *do*', 'you spell that odd word,', 'anyways?']) self.check_wrap(self.text, 50, ['Did you say "supercalifragilisticexpialidocious?"', 'How *do* you spell that odd word, anyways?']) # SF bug 797650. Prevent an infinite loop by making sure that at # least one character gets split off on every pass. self.check_wrap('-'*10+'hello', 10, ['----------', ' h', ' e', ' l', ' l', ' o'], subsequent_indent = ' '*15) # bug 1146. Prevent a long word to be wrongly wrapped when the # preceding word is exactly one character shorter than the width self.check_wrap(self.text, 12, ['Did you say ', '"supercalifr', 'agilisticexp', 'ialidocious?', '" How *do*', 'you spell', 'that odd', 'word,', 'anyways?']) def test_nobreak_long(self) -> None: # Test with break_long_words disabled self.wrapper.break_long_words = False self.wrapper.width = 30 expect = ['Did you say', '"supercalifragilisticexpialidocious?"', 'How *do* you spell that odd', 'word, anyways?' ] result = self.wrapper.wrap(self.text) self.check(result, expect) # Same thing with kwargs passed to standalone wrap() function. result = wrap(self.text, width=30, break_long_words=0) self.check(result, expect) class IndentTestCases(BaseTestCase): # called before each test method def setUp(self) -> None: self.text = '''\ This paragraph will be filled, first without any indentation, and then with some (including a hanging indent).''' def test_fill(self) -> None: # Test the fill() method expect = '''\ This paragraph will be filled, first without any indentation, and then with some (including a hanging indent).''' result = fill(self.text, 40) self.check(result, expect) def test_initial_indent(self) -> None: # Test initial_indent parameter expect = [" This paragraph will be filled,", "first without any indentation, and then", "with some (including a hanging indent)."] result = wrap(self.text, 40, initial_indent=" ") self.check(result, expect) expects = "\n".join(expect) results = fill(self.text, 40, initial_indent=" ") self.check(results, expects) def test_subsequent_indent(self) -> None: # Test subsequent_indent parameter expect = '''\ * This paragraph will be filled, first without any indentation, and then with some (including a hanging indent).''' result = fill(self.text, 40, initial_indent=" * ", subsequent_indent=" ") self.check(result, expect) # Despite the similar names, DedentTestCase is *not* the inverse # of IndentTestCase! class DedentTestCase(unittest.TestCase): def assertUnchanged(self, text: str) -> None: """assert that dedent() has no effect on 'text'""" self.assertEqual(text, dedent(text)) def test_dedent_nomargin(self) -> None: # No lines indented. text = "Hello there.\nHow are you?\nOh good, I'm glad." self.assertUnchanged(text) # Similar, with a blank line. text = "Hello there.\n\nBoo!" self.assertUnchanged(text) # Some lines indented, but overall margin is still zero. text = "Hello there.\n This is indented." self.assertUnchanged(text) # Again, add a blank line. text = "Hello there.\n\n Boo!\n" self.assertUnchanged(text) def test_dedent_even(self) -> None: # All lines indented by two spaces. text = " Hello there.\n How are ya?\n Oh good." expect = "Hello there.\nHow are ya?\nOh good." self.assertEqual(expect, dedent(text)) # Same, with blank lines. text = " Hello there.\n\n How are ya?\n Oh good.\n" expect = "Hello there.\n\nHow are ya?\nOh good.\n" self.assertEqual(expect, dedent(text)) # Now indent one of the blank lines. text = " Hello there.\n \n How are ya?\n Oh good.\n" expect = "Hello there.\n\nHow are ya?\nOh good.\n" self.assertEqual(expect, dedent(text)) def test_dedent_uneven(self) -> None: # Lines indented unevenly. text = '''\ def foo(): while 1: return foo ''' expect = '''\ def foo(): while 1: return foo ''' self.assertEqual(expect, dedent(text)) # Uneven indentation with a blank line. text = " Foo\n Bar\n\n Baz\n" expect = "Foo\n Bar\n\n Baz\n" self.assertEqual(expect, dedent(text)) # Uneven indentation with a whitespace-only line. text = " Foo\n Bar\n \n Baz\n" expect = "Foo\n Bar\n\n Baz\n" self.assertEqual(expect, dedent(text)) # dedent() should not mangle internal tabs def test_dedent_preserve_internal_tabs(self) -> None: text = " hello\tthere\n how are\tyou?" expect = "hello\tthere\nhow are\tyou?" self.assertEqual(expect, dedent(text)) # make sure that it preserves tabs when it's not making any # changes at all self.assertEqual(expect, dedent(expect)) # dedent() should not mangle tabs in the margin (i.e. # tabs and spaces both count as margin, but are *not* # considered equivalent) def test_dedent_preserve_margin_tabs(self) -> None: text = " hello there\n\thow are you?" self.assertUnchanged(text) # same effect even if we have 8 spaces text = " hello there\n\thow are you?" self.assertUnchanged(text) # dedent() only removes whitespace that can be uniformly removed! text = "\thello there\n\thow are you?" expect = "hello there\nhow are you?" self.assertEqual(expect, dedent(text)) text = " \thello there\n \thow are you?" self.assertEqual(expect, dedent(text)) text = " \t hello there\n \t how are you?" self.assertEqual(expect, dedent(text)) text = " \thello there\n \t how are you?" expect = "hello there\n how are you?" self.assertEqual(expect, dedent(text)) def test_main() -> None: support.run_unittest(WrapTestCase, LongWordTestCase, IndentTestCases, DedentTestCase) if __name__ == '__main__': test_main() mypy-0.560/test-data/stdlib-samples/3.2/test/tf_inherit_check.py0000644€tŠÔÚ€2›s®0000000110213215007205030615 0ustar jukkaDROPBOX\Domain Users00000000000000# Helper script for test_tempfile.py. argv[2] is the number of a file # descriptor which should _not_ be open. Check this by attempting to # write to it -- if we succeed, something is wrong. import sys import os verbose = (sys.argv[1] == 'v') try: fd = int(sys.argv[2]) try: os.write(fd, b"blat") except os.error: # Success -- could not write to fd. sys.exit(0) else: if verbose: sys.stderr.write("fd %d is open in child" % fd) sys.exit(1) except Exception: if verbose: raise sys.exit(1) mypy-0.560/test-data/stdlib-samples/3.2/textwrap.py0000644€tŠÔÚ€2›s®0000003733213215007205026242 0ustar jukkaDROPBOX\Domain Users00000000000000"""Text wrapping and filling. """ # Copyright (C) 1999-2001 Gregory P. Ward. # Copyright (C) 2002, 2003 Python Software Foundation. # Written by Greg Ward import string, re from typing import Dict, List, Any __all__ = ['TextWrapper', 'wrap', 'fill', 'dedent'] # Hardcode the recognized whitespace characters to the US-ASCII # whitespace characters. The main reason for doing this is that in # ISO-8859-1, 0xa0 is non-breaking whitespace, so in certain locales # that character winds up in string.whitespace. Respecting # string.whitespace in those cases would 1) make textwrap treat 0xa0 the # same as any other whitespace char, which is clearly wrong (it's a # *non-breaking* space), 2) possibly cause problems with Unicode, # since 0xa0 is not in range(128). _whitespace = '\t\n\x0b\x0c\r ' class TextWrapper: """ Object for wrapping/filling text. The public interface consists of the wrap() and fill() methods; the other methods are just there for subclasses to override in order to tweak the default behaviour. If you want to completely replace the main wrapping algorithm, you'll probably have to override _wrap_chunks(). Several instance attributes control various aspects of wrapping: width (default: 70) the maximum width of wrapped lines (unless break_long_words is false) initial_indent (default: "") string that will be prepended to the first line of wrapped output. Counts towards the line's width. subsequent_indent (default: "") string that will be prepended to all lines save the first of wrapped output; also counts towards each line's width. expand_tabs (default: true) Expand tabs in input text to spaces before further processing. Each tab will become 1 .. 8 spaces, depending on its position in its line. If false, each tab is treated as a single character. replace_whitespace (default: true) Replace all whitespace characters in the input text by spaces after tab expansion. Note that if expand_tabs is false and replace_whitespace is true, every tab will be converted to a single space! fix_sentence_endings (default: false) Ensure that sentence-ending punctuation is always followed by two spaces. Off by default because the algorithm is (unavoidably) imperfect. break_long_words (default: true) Break words longer than 'width'. If false, those words will not be broken, and some lines might be longer than 'width'. break_on_hyphens (default: true) Allow breaking hyphenated words. If true, wrapping will occur preferably on whitespaces and right after hyphens part of compound words. drop_whitespace (default: true) Drop leading and trailing whitespace from lines. """ unicode_whitespace_trans = {} # type: Dict[int, int] uspace = ord(' ') for x in _whitespace: unicode_whitespace_trans[ord(x)] = uspace # This funky little regex is just the trick for splitting # text up into word-wrappable chunks. E.g. # "Hello there -- you goof-ball, use the -b option!" # splits into # Hello/ /there/ /--/ /you/ /goof-/ball,/ /use/ /the/ /-b/ /option! # (after stripping out empty strings). wordsep_re = re.compile( r'(\s+|' # any whitespace r'[^\s\w]*\w+[^0-9\W]-(?=\w+[^0-9\W])|' # hyphenated words r'(?<=[\w\!\"\'\&\.\,\?])-{2,}(?=\w))') # em-dash # This less funky little regex just split on recognized spaces. E.g. # "Hello there -- you goof-ball, use the -b option!" # splits into # Hello/ /there/ /--/ /you/ /goof-ball,/ /use/ /the/ /-b/ /option!/ wordsep_simple_re = re.compile(r'(\s+)') # XXX this is not locale- or charset-aware -- string.lowercase # is US-ASCII only (and therefore English-only) sentence_end_re = re.compile(r'[a-z]' # lowercase letter r'[\.\!\?]' # sentence-ending punct. r'[\"\']?' # optional end-of-quote r'\Z') # end of chunk def __init__(self, width: int = 70, initial_indent: str = "", subsequent_indent: str = "", expand_tabs: bool = True, replace_whitespace: bool = True, fix_sentence_endings: bool = False, break_long_words: bool = True, drop_whitespace: bool = True, break_on_hyphens: bool = True) -> None: self.width = width self.initial_indent = initial_indent self.subsequent_indent = subsequent_indent self.expand_tabs = expand_tabs self.replace_whitespace = replace_whitespace self.fix_sentence_endings = fix_sentence_endings self.break_long_words = break_long_words self.drop_whitespace = drop_whitespace self.break_on_hyphens = break_on_hyphens # -- Private methods ----------------------------------------------- # (possibly useful for subclasses to override) def _munge_whitespace(self, text: str) -> str: """_munge_whitespace(text : string) -> string Munge whitespace in text: expand tabs and convert all other whitespace characters to spaces. Eg. " foo\tbar\n\nbaz" becomes " foo bar baz". """ if self.expand_tabs: text = text.expandtabs() if self.replace_whitespace: text = text.translate(self.unicode_whitespace_trans) return text def _split(self, text: str) -> List[str]: """_split(text : string) -> [string] Split the text to wrap into indivisible chunks. Chunks are not quite the same as words; see _wrap_chunks() for full details. As an example, the text Look, goof-ball -- use the -b option! breaks into the following chunks: 'Look,', ' ', 'goof-', 'ball', ' ', '--', ' ', 'use', ' ', 'the', ' ', '-b', ' ', 'option!' if break_on_hyphens is True, or in: 'Look,', ' ', 'goof-ball', ' ', '--', ' ', 'use', ' ', 'the', ' ', '-b', ' ', option!' otherwise. """ if self.break_on_hyphens is True: chunks = self.wordsep_re.split(text) else: chunks = self.wordsep_simple_re.split(text) chunks = [c for c in chunks if c] return chunks def _fix_sentence_endings(self, chunks: List[str]) -> None: """_fix_sentence_endings(chunks : [string]) Correct for sentence endings buried in 'chunks'. Eg. when the original text contains "... foo.\nBar ...", munge_whitespace() and split() will convert that to [..., "foo.", " ", "Bar", ...] which has one too few spaces; this method simply changes the one space to two. """ i = 0 patsearch = self.sentence_end_re.search while i < len(chunks)-1: if chunks[i+1] == " " and patsearch(chunks[i]): chunks[i+1] = " " i += 2 else: i += 1 def _handle_long_word(self, reversed_chunks: List[str], cur_line: List[str], cur_len: int, width: int) -> None: """_handle_long_word(chunks : [string], cur_line : [string], cur_len : int, width : int) Handle a chunk of text (most likely a word, not whitespace) that is too long to fit in any line. """ # Figure out when indent is larger than the specified width, and make # sure at least one character is stripped off on every pass if width < 1: space_left = 1 else: space_left = width - cur_len # If we're allowed to break long words, then do so: put as much # of the next chunk onto the current line as will fit. if self.break_long_words: cur_line.append(reversed_chunks[-1][:space_left]) reversed_chunks[-1] = reversed_chunks[-1][space_left:] # Otherwise, we have to preserve the long word intact. Only add # it to the current line if there's nothing already there -- # that minimizes how much we violate the width constraint. elif not cur_line: cur_line.append(reversed_chunks.pop()) # If we're not allowed to break long words, and there's already # text on the current line, do nothing. Next time through the # main loop of _wrap_chunks(), we'll wind up here again, but # cur_len will be zero, so the next line will be entirely # devoted to the long word that we can't handle right now. def _wrap_chunks(self, chunks: List[str]) -> List[str]: """_wrap_chunks(chunks : [string]) -> [string] Wrap a sequence of text chunks and return a list of lines of length 'self.width' or less. (If 'break_long_words' is false, some lines may be longer than this.) Chunks correspond roughly to words and the whitespace between them: each chunk is indivisible (modulo 'break_long_words'), but a line break can come between any two chunks. Chunks should not have internal whitespace; ie. a chunk is either all whitespace or a "word". Whitespace chunks will be removed from the beginning and end of lines, but apart from that whitespace is preserved. """ lines = [] # type: List[str] if self.width <= 0: raise ValueError("invalid width %r (must be > 0)" % self.width) # Arrange in reverse order so items can be efficiently popped # from a stack of chucks. chunks.reverse() while chunks: # Start the list of chunks that will make up the current line. # cur_len is just the length of all the chunks in cur_line. cur_line = [] # type: List[str] cur_len = 0 # Figure out which static string will prefix this line. if lines: indent = self.subsequent_indent else: indent = self.initial_indent # Maximum width for this line. width = self.width - len(indent) # First chunk on line is whitespace -- drop it, unless this # is the very beginning of the text (ie. no lines started yet). if self.drop_whitespace and chunks[-1].strip() == '' and lines: del chunks[-1] while chunks: l = len(chunks[-1]) # Can at least squeeze this chunk onto the current line. if cur_len + l <= width: cur_line.append(chunks.pop()) cur_len += l # Nope, this line is full. else: break # The current line is full, and the next chunk is too big to # fit on *any* line (not just this one). if chunks and len(chunks[-1]) > width: self._handle_long_word(chunks, cur_line, cur_len, width) # If the last chunk on this line is all whitespace, drop it. if self.drop_whitespace and cur_line and cur_line[-1].strip() == '': del cur_line[-1] # Convert current line back to a string and store it in list # of all lines (return value). if cur_line: lines.append(indent + ''.join(cur_line)) return lines # -- Public interface ---------------------------------------------- def wrap(self, text: str) -> List[str]: """wrap(text : string) -> [string] Reformat the single paragraph in 'text' so it fits in lines of no more than 'self.width' columns, and return a list of wrapped lines. Tabs in 'text' are expanded with string.expandtabs(), and all other whitespace characters (including newline) are converted to space. """ text = self._munge_whitespace(text) chunks = self._split(text) if self.fix_sentence_endings: self._fix_sentence_endings(chunks) return self._wrap_chunks(chunks) def fill(self, text: str) -> str: """fill(text : string) -> string Reformat the single paragraph in 'text' to fit in lines of no more than 'self.width' columns, and return a new string containing the entire wrapped paragraph. """ return "\n".join(self.wrap(text)) # -- Convenience interface --------------------------------------------- def wrap(text: str, width: int = 70, **kwargs: Any) -> List[str]: """Wrap a single paragraph of text, returning a list of wrapped lines. Reformat the single paragraph in 'text' so it fits in lines of no more than 'width' columns, and return a list of wrapped lines. By default, tabs in 'text' are expanded with string.expandtabs(), and all other whitespace characters (including newline) are converted to space. See TextWrapper class for available keyword args to customize wrapping behaviour. """ w = TextWrapper(width=width, **kwargs) return w.wrap(text) def fill(text: str, width: int = 70, **kwargs: Any) -> str: """Fill a single paragraph of text, returning a new string. Reformat the single paragraph in 'text' to fit in lines of no more than 'width' columns, and return a new string containing the entire wrapped paragraph. As with wrap(), tabs are expanded and other whitespace characters converted to space. See TextWrapper class for available keyword args to customize wrapping behaviour. """ w = TextWrapper(width=width, **kwargs) return w.fill(text) # -- Loosely related functionality ------------------------------------- _whitespace_only_re = re.compile('^[ \t]+$', re.MULTILINE) _leading_whitespace_re = re.compile('(^[ \t]*)(?:[^ \t\n])', re.MULTILINE) def dedent(text: str) -> str: """Remove any common leading whitespace from every line in `text`. This can be used to make triple-quoted strings line up with the left edge of the display, while still presenting them in the source code in indented form. Note that tabs and spaces are both treated as whitespace, but they are not equal: the lines " hello" and "\thello" are considered to have no common leading whitespace. (This behaviour is new in Python 2.5; older versions of this module incorrectly expanded tabs before searching for common leading whitespace.) """ # Look for the longest leading string of spaces and tabs common to # all lines. margin = None # type: str text = _whitespace_only_re.sub('', text) indents = _leading_whitespace_re.findall(text) for indent in indents: if margin is None: margin = indent # Current line more deeply indented than previous winner: # no change (previous winner is still on top). elif indent.startswith(margin): pass # Current line consistent with and no deeper than previous winner: # it's the new winner. elif margin.startswith(indent): margin = indent # Current line and previous winner have no common whitespace: # there is no margin. else: margin = "" break # sanity check (testing/debugging only) if 0 and margin: for line in text.split("\n"): assert not line or line.startswith(margin), \ "line = %r, margin = %r" % (line, margin) if margin: text = re.sub(r'(?m)^' + margin, '', text) return text if __name__ == "__main__": #print dedent("\tfoo\n\tbar") #print dedent(" \thello there\n \t how are you?") print(dedent("Hello there.\n This is indented.")) mypy-0.560/test-data/unit/0000755€tŠÔÚ€2›s®0000000000013215007244021537 5ustar jukkaDROPBOX\Domain Users00000000000000mypy-0.560/test-data/unit/check-abstract.test0000644€tŠÔÚ€2›s®0000005124513215007205025322 0ustar jukkaDROPBOX\Domain Users00000000000000-- Type checker test cases for abstract classes. -- Subtyping with abstract classes -- ------------------------------- [case testAbstractClassSubclasses] from abc import abstractmethod, ABCMeta i = None # type: I j = None # type: J a = None # type: A b = None # type: B c = None # type: C j = c # E: Incompatible types in assignment (expression has type "C", variable has type "J") a = i # E: Incompatible types in assignment (expression has type "I", variable has type "A") a = j # E: Incompatible types in assignment (expression has type "J", variable has type "A") b = i # E: Incompatible types in assignment (expression has type "I", variable has type "B") i = a i = b i = c j = a j = b a = b class I(metaclass=ABCMeta): @abstractmethod def f(self): pass class J(metaclass=ABCMeta): @abstractmethod def g(self): pass class A(I, J): pass class B(A): pass class C(I): pass [case testAbstractClassSubtypingViaExtension] from abc import abstractmethod, ABCMeta i = None # type: I j = None # type: J a = None # type: A o = None # type: object j = i # E: Incompatible types in assignment (expression has type "I", variable has type "J") a = i # E: Incompatible types in assignment (expression has type "I", variable has type "A") a = j # E: Incompatible types in assignment (expression has type "J", variable has type "A") i = o # E: Incompatible types in assignment (expression has type "object", variable has type "I") j = o # E: Incompatible types in assignment (expression has type "object", variable has type "J") i = a j = a i = j o = i o = j class I(metaclass=ABCMeta): @abstractmethod def f(self): pass class J(I): pass class A(J): pass [case testInheritingAbstractClassInSubclass] from abc import abstractmethod, ABCMeta i = None # type: I a = None # type: A b = None # type: B i = a # E: Incompatible types in assignment (expression has type "A", variable has type "I") b = a # E: Incompatible types in assignment (expression has type "A", variable has type "B") a = b i = b class I(metaclass=ABCMeta): @abstractmethod def f(self): pass class A: pass class B(A, I): pass -- Abstract class objects -- ---------------------- [case testAbstractClassAsTypeObject] from abc import abstractmethod, ABCMeta o = None # type: object t = None # type: type o = I t = I class I(metaclass=ABCMeta): @abstractmethod def f(self): pass [case testAbstractClassInCasts] from typing import cast from abc import abstractmethod, ABCMeta class I(metaclass=ABCMeta): @abstractmethod def f(self): pass class A(I): pass class B: pass i, a, b = None, None, None # type: (I, A, B) o = None # type: object a = cast(I, o) # E: Incompatible types in assignment (expression has type "I", variable has type "A") b = cast(B, i) # Ok; a subclass of B might inherit I i = cast(I, b) # Ok; a subclass of B might inherit I i = cast(I, o) i = cast(I, a) [case testInstantiatingClassThatImplementsAbstractMethod] from abc import abstractmethod, ABCMeta import typing class A(metaclass=ABCMeta): @abstractmethod def f(self): pass class B(A): def f(self): pass B() [out] [case testInstantiatingAbstractClass] from abc import abstractmethod, ABCMeta import typing class A(metaclass=ABCMeta): pass class B(metaclass=ABCMeta): @abstractmethod def f(self): pass A() # OK B() # E: Cannot instantiate abstract class 'B' with abstract attribute 'f' [out] [case testInstantiatingClassWithInheritedAbstractMethod] from abc import abstractmethod, ABCMeta import typing class A(metaclass=ABCMeta): @abstractmethod def f(self): pass @abstractmethod def g(self): pass class B(A): pass B() # E: Cannot instantiate abstract class 'B' with abstract attributes 'f' and 'g' [out] [case testInstantiationAbstractsInTypeForFunctions] from typing import Type from abc import abstractmethod class A: @abstractmethod def m(self) -> None: pass class B(A): pass class C(B): def m(self) -> None: pass def f(cls: Type[A]) -> A: return cls() # OK def g() -> A: return A() # E: Cannot instantiate abstract class 'A' with abstract attribute 'm' f(A) # E: Only concrete class can be given where "Type[A]" is expected f(B) # E: Only concrete class can be given where "Type[A]" is expected f(C) # OK x: Type[B] f(x) # OK [out] [case testInstantiationAbstractsInTypeForAliases] from typing import Type from abc import abstractmethod class A: @abstractmethod def m(self) -> None: pass class B(A): pass class C(B): def m(self) -> None: pass def f(cls: Type[A]) -> A: return cls() # OK Alias = A GoodAlias = C Alias() # E: Cannot instantiate abstract class 'A' with abstract attribute 'm' GoodAlias() f(Alias) # E: Only concrete class can be given where "Type[A]" is expected f(GoodAlias) [out] [case testInstantiationAbstractsInTypeForVariables] from typing import Type from abc import abstractmethod class A: @abstractmethod def m(self) -> None: pass class B(A): pass class C(B): def m(self) -> None: pass var: Type[A] var() var = A # E: Can only assign concrete classes to a variable of type "Type[A]" var = B # E: Can only assign concrete classes to a variable of type "Type[A]" var = C # OK var_old = None # type: Type[A] # Old syntax for variable annotations var_old() var_old = A # E: Can only assign concrete classes to a variable of type "Type[A]" var_old = B # E: Can only assign concrete classes to a variable of type "Type[A]" var_old = C # OK [out] [case testInstantiationAbstractsInTypeForClassMethods] from typing import Type from abc import abstractmethod class Logger: @staticmethod def log(a: Type[C]): pass class C: @classmethod def action(cls) -> None: cls() #OK for classmethods Logger.log(cls) #OK for classmethods @abstractmethod def m(self) -> None: pass [builtins fixtures/classmethod.pyi] [out] [case testInstantiatingClassWithInheritedAbstractMethodAndSuppression] from abc import abstractmethod, ABCMeta import typing class A(metaclass=ABCMeta): @abstractmethod def a(self): pass @abstractmethod def b(self): pass @abstractmethod def c(self): pass @abstractmethod def d(self): pass @abstractmethod def e(self): pass @abstractmethod def f(self): pass @abstractmethod def g(self): pass @abstractmethod def h(self): pass @abstractmethod def i(self): pass @abstractmethod def j(self): pass a = A() # E: Cannot instantiate abstract class 'A' with abstract attributes 'a', 'b', ... and 'j' (7 methods suppressed) [out] -- Implementing abstract methods -- ----------------------------- [case testImplementingAbstractMethod] from abc import abstractmethod, ABCMeta import typing class A(metaclass=ABCMeta): @abstractmethod def f(self, x: int) -> int: pass @abstractmethod def g(self, x: int) -> int: pass class B(A): def f(self, x: str) -> int: \ # E: Argument 1 of "f" incompatible with supertype "A" pass def g(self, x: int) -> int: pass [out] [case testImplementingAbstractMethodWithMultipleBaseClasses] from abc import abstractmethod, ABCMeta import typing class I(metaclass=ABCMeta): @abstractmethod def f(self, x: int) -> int: pass class J(metaclass=ABCMeta): @abstractmethod def g(self, x: str) -> str: pass class A(I, J): def f(self, x: str) -> int: pass \ # E: Argument 1 of "f" incompatible with supertype "I" def g(self, x: str) -> int: pass \ # E: Return type of "g" incompatible with supertype "J" def h(self) -> int: pass # Not related to any base class [out] [case testImplementingAbstractMethodWithExtension] from abc import abstractmethod, ABCMeta import typing class J(metaclass=ABCMeta): @abstractmethod def f(self, x: int) -> int: pass class I(J): pass class A(I): def f(self, x: str) -> int: pass \ # E: Argument 1 of "f" incompatible with supertype "J" [out] [case testInvalidOverridingAbstractMethod] from abc import abstractmethod, ABCMeta import typing class J(metaclass=ABCMeta): @abstractmethod def f(self, x: 'J') -> None: pass class I(J): @abstractmethod def f(self, x: 'I') -> None: pass # E: Argument 1 of "f" incompatible with supertype "J" [out] [case testAbstractClassCoAndContraVariance] from abc import abstractmethod, ABCMeta import typing class I(metaclass=ABCMeta): @abstractmethod def f(self, a: A) -> 'I': pass @abstractmethod def g(self, a: A) -> 'I': pass @abstractmethod def h(self, a: 'I') -> A: pass class A(I): def h(self, a: 'A') -> 'I': # Fail pass def f(self, a: 'I') -> 'I': pass def g(self, a: 'A') -> 'A': pass [out] main:11: error: Argument 1 of "h" incompatible with supertype "I" main:11: error: Return type of "h" incompatible with supertype "I" -- Accessing abstract members -- -------------------------- [case testAccessingAbstractMethod] from abc import abstractmethod, ABCMeta class I(metaclass=ABCMeta): @abstractmethod def f(self, a: int) -> str: pass i, a, b = None, None, None # type: (I, int, str) a = i.f(a) # E: Incompatible types in assignment (expression has type "str", variable has type "int") b = i.f(b) # E: Argument 1 to "f" of "I" has incompatible type "str"; expected "int" i.g() # E: "I" has no attribute "g" b = i.f(a) [case testAccessingInheritedAbstractMethod] from abc import abstractmethod, ABCMeta class J(metaclass=ABCMeta): @abstractmethod def f(self, a: int) -> str: pass class I(J): pass i, a, b = None, None, None # type: (I, int, str) a = i.f(1) # E: Incompatible types in assignment (expression has type "str", variable has type "int") b = i.f(1) -- Any (dynamic) types -- ------------------- [case testAbstractClassWithAllDynamicTypes] from abc import abstractmethod, ABCMeta import typing class I(metaclass=ABCMeta): @abstractmethod def f(self, x): pass @abstractmethod def g(self, x): pass class A(I): def f(self, x): pass def g(self, x, y) -> None: pass \ # E: Signature of "g" incompatible with supertype "I" [out] [case testAbstractClassWithAllDynamicTypes2] from abc import abstractmethod, ABCMeta import typing class I(metaclass=ABCMeta): @abstractmethod def f(self, x): pass @abstractmethod def g(self, x): pass class A(I): def f(self, x): pass def g(self, x, y): pass [out] [case testAbstractClassWithImplementationUsingDynamicTypes] from abc import abstractmethod, ABCMeta import typing class I(metaclass=ABCMeta): @abstractmethod def f(self, x: int) -> None: pass @abstractmethod def g(self, x: int) -> None: pass class A(I): def f(self, x): pass def g(self, x, y): pass [out] -- Special cases -- ------------- [case testMultipleAbstractBases] from abc import abstractmethod, ABCMeta import typing class A(metaclass=ABCMeta): @abstractmethod def f(self) -> None: pass class B(metaclass=ABCMeta): @abstractmethod def g(self) -> None: pass class C(A, B): @abstractmethod def h(self) -> None: pass [case testMemberAccessWithMultipleAbstractBaseClasses] from abc import abstractmethod, ABCMeta class A(metaclass=ABCMeta): @abstractmethod def f(self) -> None: pass class B(metaclass=ABCMeta): @abstractmethod def g(self) -> None: pass class C(A, B): pass x = None # type: C x.f() x.g() x.f(x) # E: Too many arguments for "f" of "A" x.g(x) # E: Too many arguments for "g" of "B" [case testInstantiatingAbstractClassWithMultipleBaseClasses] from abc import abstractmethod, ABCMeta class A(metaclass=ABCMeta): @abstractmethod def f(self) -> None: pass class B(metaclass=ABCMeta): @abstractmethod def g(self) -> None: pass class C(A, B): def f(self) -> None: pass class D(A, B): def g(self) -> None: pass class E(A, B): def f(self) -> None: pass def g(self) -> None: pass C() # E: Cannot instantiate abstract class 'C' with abstract attribute 'g' D() # E: Cannot instantiate abstract class 'D' with abstract attribute 'f' E() [case testInconsistentMro] from abc import abstractmethod, ABCMeta import typing class A(metaclass=ABCMeta): pass class B(object, A): pass \ # E: Cannot determine consistent method resolution order (MRO) for "B" [case testOverloadedAbstractMethod] from foo import * [file foo.pyi] from abc import abstractmethod, ABCMeta from typing import overload class A(metaclass=ABCMeta): @abstractmethod @overload def f(self, x: int) -> int: pass @abstractmethod @overload def f(self, x: str) -> str: pass class B(A): @overload def f(self, x: int) -> int: pass @overload def f(self, x: str) -> str: pass A() # E: Cannot instantiate abstract class 'A' with abstract attribute 'f' B() B().f(1) a = B() # type: A a.f(1) a.f('') a.f(B()) # E: No overload variant of "f" of "A" matches argument types [foo.B] [case testOverloadedAbstractMethodWithAlternativeDecoratorOrder] from foo import * [file foo.pyi] from abc import abstractmethod, ABCMeta from typing import overload class A(metaclass=ABCMeta): @overload @abstractmethod def f(self, x: int) -> int: pass @overload @abstractmethod def f(self, x: str) -> str: pass class B(A): @overload def f(self, x: int) -> int: pass @overload def f(self, x: str) -> str: pass A() # E: Cannot instantiate abstract class 'A' with abstract attribute 'f' B() B().f(1) a = B() # type: A a.f(1) a.f('') a.f(B()) # E: No overload variant of "f" of "A" matches argument types [foo.B] [case testOverloadedAbstractMethodVariantMissingDecorator1] from foo import * [file foo.pyi] from abc import abstractmethod, ABCMeta from typing import overload class A(metaclass=ABCMeta): @abstractmethod \ # E: Overloaded method has both abstract and non-abstract variants @overload def f(self, x: int) -> int: pass @overload def f(self, x: str) -> str: pass [out] [case testOverloadedAbstractMethodVariantMissingDecorator1] from foo import * [file foo.pyi] from abc import abstractmethod, ABCMeta from typing import overload class A(metaclass=ABCMeta): @overload \ # E: Overloaded method has both abstract and non-abstract variants def f(self, x: int) -> int: pass @abstractmethod @overload def f(self, x: str) -> str: pass [out] [case testMultipleInheritanceAndAbstractMethod] import typing from abc import abstractmethod, ABCMeta class A: def f(self, x: str) -> None: pass class B(metaclass=ABCMeta): @abstractmethod def f(self, x: str) -> None: pass class C(A, B): pass [case testMultipleInheritanceAndAbstractMethod2] import typing from abc import abstractmethod, ABCMeta class A: def f(self, x: str) -> None: pass class B(metaclass=ABCMeta): @abstractmethod def f(self, x: int) -> None: pass class C(A, B): pass [out] main:8: error: Definition of "f" in base class "A" is incompatible with definition in base class "B" [case testCallAbstractMethodBeforeDefinition] import typing from abc import abstractmethod, ABCMeta class A(metaclass=ABCMeta): def f(self) -> None: self.g(1) # E: Argument 1 to "g" of "A" has incompatible type "int"; expected "str" @abstractmethod def g(self, x: str) -> None: pass [out] [case testAbstractOperatorMethods1] import typing from abc import abstractmethod, ABCMeta class A(metaclass=ABCMeta): @abstractmethod def __lt__(self, other: 'A') -> int: pass @abstractmethod def __gt__(self, other: 'A') -> int: pass [case testAbstractOperatorMethods2] import typing from abc import abstractmethod, ABCMeta class A(metaclass=ABCMeta): @abstractmethod def __radd__(self, other: 'C') -> str: pass # Error class B: @abstractmethod def __add__(self, other: 'A') -> int: pass class C: def __add__(self, other: int) -> B: pass [out] [case testAbstractClassWithAnyBase] from typing import Any from abc import abstractmethod, ABCMeta A: Any class D(metaclass=ABCMeta): @abstractmethod def f(self) -> None: pass class C(A, D): pass C() # A might implement 'f' -- Abstract properties -- ------------------- [case testReadOnlyAbstractProperty] from abc import abstractproperty, ABCMeta class A(metaclass=ABCMeta): @abstractproperty def x(self) -> int: pass def f(a: A) -> None: a.x() # E: "int" not callable a.x = 1 # E: Property "x" defined in "A" is read-only [out] [case testReadOnlyAbstractPropertyForwardRef] from abc import abstractproperty, ABCMeta def f(a: A) -> None: a.x() # E: "int" not callable a.x = 1 # E: Property "x" defined in "A" is read-only class A(metaclass=ABCMeta): @abstractproperty def x(self) -> int: pass [out] [case testReadWriteAbstractProperty] from abc import abstractproperty, ABCMeta def f(a: A) -> None: a.x.y # E: "int" has no attribute "y" a.x = 1 class A(metaclass=ABCMeta): @abstractproperty def x(self) -> int: pass @x.setter def x(self, x: int) -> None: pass [out] [case testInstantiateClassWithReadOnlyAbstractProperty] from abc import abstractproperty, ABCMeta class A(metaclass=ABCMeta): @abstractproperty def x(self) -> int: pass class B(A): pass b = B() # E: Cannot instantiate abstract class 'B' with abstract attribute 'x' [case testInstantiateClassWithReadWriteAbstractProperty] from abc import abstractproperty, ABCMeta class A(metaclass=ABCMeta): @abstractproperty def x(self) -> int: pass @x.setter def x(self, x: int) -> None: pass class B(A): pass b = B() # E: Cannot instantiate abstract class 'B' with abstract attribute 'x' [case testImplementAbstractPropertyViaProperty] from abc import abstractproperty, ABCMeta class A(metaclass=ABCMeta): @abstractproperty def x(self) -> int: pass class B(A): @property def x(self) -> int: pass b = B() b.x() # E: "int" not callable [builtins fixtures/property.pyi] [case testImplementReradWriteAbstractPropertyViaProperty] from abc import abstractproperty, ABCMeta class A(metaclass=ABCMeta): @abstractproperty def x(self) -> int: pass @x.setter def x(self, v: int) -> None: pass class B(A): @property def x(self) -> int: pass @x.setter def x(self, v: int) -> None: pass b = B() b.x.y # E: "int" has no attribute "y" [builtins fixtures/property.pyi] [case testImplementAbstractPropertyViaPropertyInvalidType] from abc import abstractproperty, ABCMeta class A(metaclass=ABCMeta): @abstractproperty def x(self) -> int: pass class B(A): @property def x(self) -> str: pass # E: Return type of "x" incompatible with supertype "A" b = B() b.x() # E: "str" not callable [builtins fixtures/property.pyi] [case testCantImplementAbstractPropertyViaInstanceVariable] from abc import abstractproperty, ABCMeta class A(metaclass=ABCMeta): @abstractproperty def x(self) -> int: pass class B(A): def __init__(self) -> None: self.x = 1 # E b = B() # E b.x.y # E [builtins fixtures/property.pyi] [out] main:7: error: Property "x" defined in "B" is read-only main:8: error: Cannot instantiate abstract class 'B' with abstract attribute 'x' main:9: error: "int" has no attribute "y" [case testSuperWithAbstractProperty] from abc import abstractproperty, ABCMeta class A(metaclass=ABCMeta): @abstractproperty def x(self) -> int: pass class B(A): @property def x(self) -> int: return super().x.y # E: "int" has no attribute "y" [builtins fixtures/property.pyi] [out] [case testSuperWithReadWriteAbstractProperty] from abc import abstractproperty, ABCMeta class A(metaclass=ABCMeta): @abstractproperty def x(self) -> int: pass @x.setter def x(self, v: int) -> None: pass class B(A): @property def x(self) -> int: return super().x.y # E @x.setter def x(self, v: int) -> None: super().x = '' # E [builtins fixtures/property.pyi] [out] main:10: error: "int" has no attribute "y" main:13: error: Invalid assignment target [case testOnlyImplementGetterOfReadWriteAbstractProperty] from abc import abstractproperty, ABCMeta class A(metaclass=ABCMeta): @abstractproperty def x(self) -> int: pass @x.setter def x(self, v: int) -> None: pass class B(A): @property # E def x(self) -> int: pass b = B() b.x.y # E [builtins fixtures/property.pyi] [out] main:8: error: Read-only property cannot override read-write property main:11: error: "int" has no attribute "y" [case testDynamicallyTypedReadOnlyAbstractProperty] from abc import abstractproperty, ABCMeta class A(metaclass=ABCMeta): @abstractproperty def x(self): pass def f(a: A) -> None: a.x.y a.x = 1 # E: Property "x" defined in "A" is read-only [out] [case testDynamicallyTypedReadOnlyAbstractPropertyForwardRef] from abc import abstractproperty, ABCMeta def f(a: A) -> None: a.x.y a.x = 1 # E: Property "x" defined in "A" is read-only class A(metaclass=ABCMeta): @abstractproperty def x(self): pass [out] [case testDynamicallyTypedReadWriteAbstractProperty] from abc import abstractproperty, ABCMeta def f(a: A) -> None: a.x.y a.x = 1 class A(metaclass=ABCMeta): @abstractproperty def x(self): pass @x.setter def x(self, x): pass [out] mypy-0.560/test-data/unit/check-async-await.test0000644€tŠÔÚ€2›s®0000004621313215007205025736 0ustar jukkaDROPBOX\Domain Users00000000000000-- Tests for async def and await (PEP 492) -- --------------------------------------- [case testAsyncDefPass] async def f() -> int: pass [builtins fixtures/async_await.pyi] [typing fixtures/typing-full.pyi] [case testAsyncDefReturn] async def f() -> int: return 0 reveal_type(f()) # E: Revealed type is 'typing.Awaitable[builtins.int]' [builtins fixtures/async_await.pyi] [typing fixtures/typing-full.pyi] [case testAsyncDefMissingReturn] # flags: --warn-no-return async def f() -> int: make_this_not_trivial = 1 [builtins fixtures/async_await.pyi] [typing fixtures/typing-full.pyi] [out] main:2: error: Missing return statement [case testAsyncDefReturnWithoutValue] async def f() -> int: make_this_not_trivial = 1 return [builtins fixtures/async_await.pyi] [typing fixtures/typing-full.pyi] [out] main:4: error: Return value expected [case testAwaitCoroutine] async def f() -> int: x = await f() reveal_type(x) # E: Revealed type is 'builtins.int*' return x [builtins fixtures/async_await.pyi] [typing fixtures/typing-full.pyi] [out] [case testAwaitDefaultContext] from typing import TypeVar T = TypeVar('T') async def f(x: T) -> T: y = await f(x) reveal_type(y) return y [typing fixtures/typing-full.pyi] [out] main:6: error: Revealed type is 'T`-1' [case testAwaitAnyContext] from typing import Any, TypeVar T = TypeVar('T') async def f(x: T) -> T: y = await f(x) # type: Any reveal_type(y) return y [typing fixtures/typing-full.pyi] [out] main:6: error: Revealed type is 'Any' [case testAwaitExplicitContext] from typing import TypeVar T = TypeVar('T') async def f(x: T) -> T: y = await f(x) # type: int reveal_type(y) return x [typing fixtures/typing-full.pyi] [out] main:5: error: Argument 1 to "f" has incompatible type "T"; expected "int" main:6: error: Revealed type is 'builtins.int' [case testAwaitGeneratorError] from typing import Any, Generator def g() -> Generator[int, None, str]: yield 0 return '' async def f() -> int: x = await g() return x [typing fixtures/typing-full.pyi] [out] main:7: error: Incompatible types in "await" (actual type "Generator[int, None, str]", expected type "Awaitable[Any]") [case testAwaitIteratorError] from typing import Any, Iterator def g() -> Iterator[Any]: yield async def f() -> int: x = await g() return x [typing fixtures/typing-full.pyi] [out] main:6: error: Incompatible types in "await" (actual type "Iterator[Any]", expected type "Awaitable[Any]") [case testAwaitArgumentError] def g() -> int: return 0 async def f() -> int: x = await g() return x [builtins fixtures/async_await.pyi] [typing fixtures/typing-full.pyi] [out] main:5: error: Incompatible types in "await" (actual type "int", expected type "Awaitable[Any]") [case testAwaitResultError] async def g() -> int: return 0 async def f() -> str: x = await g() # type: str return x [builtins fixtures/async_await.pyi] [typing fixtures/typing-full.pyi] [out] main:5: error: Incompatible types in assignment (expression has type "int", variable has type "str") [case testAwaitReturnError] async def g() -> int: return 0 async def f() -> str: x = await g() return x [builtins fixtures/async_await.pyi] [typing fixtures/typing-full.pyi] [out] main:6: error: Incompatible return value type (got "int", expected "str") [case testAsyncFor] from typing import AsyncIterator class C(AsyncIterator[int]): async def __anext__(self) -> int: return 0 async def f() -> None: async for x in C(): reveal_type(x) # E: Revealed type is 'builtins.int*' [builtins fixtures/async_await.pyi] [typing fixtures/typing-full.pyi] [case testAsyncForError] from typing import AsyncIterator async def f() -> None: async for x in [1]: pass [builtins fixtures/async_await.pyi] [typing fixtures/typing-full.pyi] [out] main:4: error: AsyncIterable expected main:4: error: "List[int]" has no attribute "__aiter__" [case testAsyncForTypeComments] from typing import AsyncIterator, Union class C(AsyncIterator[int]): async def __anext__(self) -> int: return 0 async def f() -> None: async for x in C(): # type: str # E: Incompatible types in assignment (expression has type "int", variable has type "str") pass async for y in C(): # type: int pass async for z in C(): # type: Union[int, str] reveal_type(z) # E: Revealed type is 'Union[builtins.int, builtins.str]' [builtins fixtures/async_await.pyi] [typing fixtures/typing-full.pyi] [case testAsyncForComprehension] # flags: --fast-parser --python-version 3.6 from typing import Generic, Iterable, TypeVar, AsyncIterator, Tuple T = TypeVar('T') class asyncify(Generic[T], AsyncIterator[T]): def __init__(self, iterable: Iterable[T]) -> None: self.iterable = iter(iterable) def __aiter__(self) -> AsyncIterator[T]: return self async def __anext__(self) -> T: try: return next(self.iterable) except StopIteration: raise StopAsyncIteration async def listcomp(obj: Iterable[int]): lst = [i async for i in asyncify(obj)] reveal_type(lst) # E: Revealed type is 'builtins.list[builtins.int*]' lst2 = [i async for i in asyncify(obj) for j in obj] reveal_type(lst2) # E: Revealed type is 'builtins.list[builtins.int*]' async def setcomp(obj: Iterable[int]): lst = {i async for i in asyncify(obj)} reveal_type(lst) # E: Revealed type is 'builtins.set[builtins.int*]' async def dictcomp(obj: Iterable[Tuple[int, str]]): lst = {i: j async for i, j in asyncify(obj)} reveal_type(lst) # E: Revealed type is 'builtins.dict[builtins.int*, builtins.str*]' async def generatorexp(obj: Iterable[int]): lst = (i async for i in asyncify(obj)) reveal_type(lst) # E: Revealed type is 'typing.AsyncIterator[builtins.int*]' lst2 = (i async for i in asyncify(obj) for i in obj) reveal_type(lst2) # E: Revealed type is 'typing.AsyncIterator[builtins.int*]' [builtins fixtures/async_await.pyi] [typing fixtures/typing-full.pyi] [case testAsyncForComprehensionErrors] # flags: --fast-parser --python-version 3.6 from typing import Generic, Iterable, TypeVar, AsyncIterator, Tuple T = TypeVar('T') class asyncify(Generic[T], AsyncIterator[T]): def __init__(self, iterable: Iterable[T]) -> None: self.iterable = iter(iterable) def __aiter__(self) -> AsyncIterator[T]: return self async def __anext__(self) -> T: try: return next(self.iterable) except StopIteration: raise StopAsyncIteration async def wrong_iterable(obj: Iterable[int]): [i async for i in obj] [i for i in asyncify(obj)] {i: i async for i in obj} {i: i for i in asyncify(obj)} [out] main:18: error: AsyncIterable expected main:18: error: "Iterable[int]" has no attribute "__aiter__"; maybe "__iter__"? main:19: error: Iterable expected main:19: error: "asyncify[int]" has no attribute "__iter__"; maybe "__aiter__"? main:20: error: AsyncIterable expected main:20: error: "Iterable[int]" has no attribute "__aiter__"; maybe "__iter__"? main:21: error: Iterable expected main:21: error: "asyncify[int]" has no attribute "__iter__"; maybe "__aiter__"? [builtins fixtures/async_await.pyi] [typing fixtures/typing-full.pyi] [case testAsyncWith] class C: async def __aenter__(self) -> int: pass async def __aexit__(self, x, y, z) -> None: pass async def f() -> None: async with C() as x: reveal_type(x) # E: Revealed type is 'builtins.int*' [builtins fixtures/async_await.pyi] [typing fixtures/typing-full.pyi] [case testAsyncWithError] class C: def __enter__(self) -> int: pass def __exit__(self, x, y, z) -> None: pass async def f() -> None: async with C() as x: pass [builtins fixtures/async_await.pyi] [typing fixtures/typing-full.pyi] [out] main:6: error: "C" has no attribute "__aenter__"; maybe "__enter__"? main:6: error: "C" has no attribute "__aexit__"; maybe "__exit__"? [case testAsyncWithErrorBadAenter] class C: def __aenter__(self) -> int: pass async def __aexit__(self, x, y, z) -> None: pass async def f() -> None: async with C() as x: # E: Incompatible types in "async with" for "__aenter__" (actual type "int", expected type "Awaitable[Any]") pass [builtins fixtures/async_await.pyi] [typing fixtures/typing-full.pyi] [case testAsyncWithErrorBadAenter2] class C: def __aenter__(self) -> None: pass async def __aexit__(self, x, y, z) -> None: pass async def f() -> None: async with C() as x: # E: "None" has no attribute "__await__" pass [builtins fixtures/async_await.pyi] [typing fixtures/typing-full.pyi] [case testAsyncWithErrorBadAexit] class C: async def __aenter__(self) -> int: pass def __aexit__(self, x, y, z) -> int: pass async def f() -> None: async with C() as x: # E: Incompatible types in "async with" for "__aexit__" (actual type "int", expected type "Awaitable[Any]") pass [builtins fixtures/async_await.pyi] [typing fixtures/typing-full.pyi] [case testAsyncWithErrorBadAexit2] class C: async def __aenter__(self) -> int: pass def __aexit__(self, x, y, z) -> None: pass async def f() -> None: async with C() as x: # E: "None" has no attribute "__await__" pass [builtins fixtures/async_await.pyi] [typing fixtures/typing-full.pyi] [case testAsyncWithTypeComments] class C: async def __aenter__(self) -> int: pass async def __aexit__(self, x, y, z) -> None: pass async def f() -> None: async with C() as x: # type: int pass async with C() as y, C() as z: # type: str, int # E: Incompatible types in assignment (expression has type "int", variable has type "str") pass async with C() as a: # type: int, int # E: Invalid tuple literal type pass [builtins fixtures/async_await.pyi] [typing fixtures/typing-full.pyi] [case testNoYieldInAsyncDef] # flags: --python-version 3.5 async def f(): yield None # E: 'yield' in async function async def g(): yield # E: 'yield' in async function async def h(): x = yield # E: 'yield' in async function [builtins fixtures/async_await.pyi] [case testNoYieldFromInAsyncDef] async def f(): yield from [] async def g(): x = yield from [] [builtins fixtures/async_await.pyi] [out] main:3: error: 'yield from' in async function main:5: error: 'yield from' in async function [case testNoAsyncDefInPY2_python2] async def f(): # E: invalid syntax pass [case testYieldFromNoAwaitable] from typing import Any, Generator async def f() -> str: return '' def g() -> Generator[Any, None, str]: x = yield from f() return x [builtins fixtures/async_await.pyi] [typing fixtures/typing-full.pyi] [out] main:6: error: "yield from" can't be applied to "Awaitable[str]" [case testAwaitableSubclass] from typing import Any, AsyncIterator, Awaitable, Generator class A(Awaitable[int]): def __await__(self) -> Generator[Any, None, int]: yield return 0 class C: def __aenter__(self) -> A: return A() def __aexit__(self, *a) -> A: return A() class I(AsyncIterator[int]): def __aiter__(self) -> 'I': return self def __anext__(self) -> A: return A() async def main() -> None: x = await A() reveal_type(x) # E: Revealed type is 'builtins.int' async with C() as y: reveal_type(y) # E: Revealed type is 'builtins.int' async for z in I(): reveal_type(z) # E: Revealed type is 'builtins.int' [builtins fixtures/async_await.pyi] [typing fixtures/typing-full.pyi] [case testYieldTypeCheckInDecoratedCoroutine] from typing import Generator from types import coroutine @coroutine def f() -> Generator[int, str, int]: x = yield 0 x = yield '' # E: Incompatible types in "yield" (actual type "str", expected type "int") reveal_type(x) # E: Revealed type is 'builtins.str' if x: return 0 else: return '' # E: Incompatible return value type (got "str", expected "int") [builtins fixtures/async_await.pyi] [typing fixtures/typing-full.pyi] -- Async generators (PEP 525), some test cases adapted from the PEP text -- --------------------------------------------------------------------- [case testAsyncGenerator] # flags: --python-version 3.6 from typing import AsyncGenerator, Generator async def f() -> int: return 42 async def g() -> AsyncGenerator[int, None]: value = await f() reveal_type(value) # E: Revealed type is 'builtins.int*' yield value yield 'not an int' # E: Incompatible types in "yield" (actual type "str", expected type "int") # return without a value is fine return reveal_type(g) # E: Revealed type is 'def () -> typing.AsyncGenerator[builtins.int, builtins.None]' reveal_type(g()) # E: Revealed type is 'typing.AsyncGenerator[builtins.int, builtins.None]' async def h() -> None: async for item in g(): reveal_type(item) # E: Revealed type is 'builtins.int*' async def wrong_return() -> Generator[int, None, None]: # E: The return type of an async generator function should be "AsyncGenerator" or one of its supertypes yield 3 [builtins fixtures/dict.pyi] [typing fixtures/typing-full.pyi] [case testAsyncGeneratorReturnIterator] # flags: --python-version 3.6 from typing import AsyncIterator async def gen() -> AsyncIterator[int]: yield 3 yield 'not an int' # E: Incompatible types in "yield" (actual type "str", expected type "int") async def use_gen() -> None: async for item in gen(): reveal_type(item) # E: Revealed type is 'builtins.int*' [builtins fixtures/dict.pyi] [typing fixtures/typing-full.pyi] [case testAsyncGeneratorManualIter] # flags: --python-version 3.6 from typing import AsyncGenerator async def genfunc() -> AsyncGenerator[int, None]: yield 1 yield 2 async def user() -> None: gen = genfunc() reveal_type(gen.__aiter__()) # E: Revealed type is 'typing.AsyncGenerator[builtins.int*, builtins.None]' reveal_type(await gen.__anext__()) # E: Revealed type is 'builtins.int*' [builtins fixtures/dict.pyi] [typing fixtures/typing-full.pyi] [case testAsyncGeneratorAsend] # flags: --fast-parser --python-version 3.6 from typing import AsyncGenerator async def f() -> None: pass async def gen() -> AsyncGenerator[int, str]: await f() v = yield 42 reveal_type(v) # E: Revealed type is 'builtins.str' await f() async def h() -> None: g = gen() await g.asend(()) # E: Argument 1 to "asend" of "AsyncGenerator" has incompatible type "Tuple[]"; expected "str" reveal_type(await g.asend('hello')) # E: Revealed type is 'builtins.int*' [builtins fixtures/dict.pyi] [typing fixtures/typing-full.pyi] [case testAsyncGeneratorAthrow] # flags: --fast-parser --python-version 3.6 from typing import AsyncGenerator async def gen() -> AsyncGenerator[str, int]: try: yield 'hello' except BaseException: yield 'world' async def h() -> None: g = gen() v = await g.asend(1) reveal_type(v) # E: Revealed type is 'builtins.str*' reveal_type(await g.athrow(BaseException)) # E: Revealed type is 'builtins.str*' [builtins fixtures/dict.pyi] [typing fixtures/typing-full.pyi] [case testAsyncGeneratorNoSyncIteration] # flags: --fast-parser --python-version 3.6 from typing import AsyncGenerator async def gen() -> AsyncGenerator[int, None]: for i in (1, 2, 3): yield i def h() -> None: for i in gen(): pass [builtins fixtures/dict.pyi] [typing fixtures/typing-full.pyi] [out] main:9: error: Iterable expected main:9: error: "AsyncGenerator[int, None]" has no attribute "__iter__"; maybe "__aiter__"? [case testAsyncGeneratorNoYieldFrom] # flags: --fast-parser --python-version 3.6 from typing import AsyncGenerator async def f() -> AsyncGenerator[int, None]: pass async def gen() -> AsyncGenerator[int, None]: yield from f() # E: 'yield from' in async function [builtins fixtures/dict.pyi] [typing fixtures/typing-full.pyi] [case testAsyncGeneratorNoReturnWithValue] # flags: --fast-parser --python-version 3.6 from typing import AsyncGenerator async def return_int() -> AsyncGenerator[int, None]: yield 1 return 42 # E: 'return' with value in async generator is not allowed async def return_none() -> AsyncGenerator[int, None]: yield 1 return None # E: 'return' with value in async generator is not allowed def f() -> None: return async def return_f() -> AsyncGenerator[int, None]: yield 1 return f() # E: 'return' with value in async generator is not allowed [builtins fixtures/dict.pyi] [typing fixtures/typing-full.pyi] -- The full matrix of coroutine compatibility -- ------------------------------------------ [case testFullCoroutineMatrix] from typing import Any, AsyncIterator, Awaitable, Generator, Iterator from types import coroutine # The various things you might try to use in `await` or `yield from`. def plain_generator() -> Generator[str, None, int]: yield 'a' return 1 async def plain_coroutine() -> int: return 1 @coroutine def decorated_generator() -> Generator[str, None, int]: yield 'a' return 1 @coroutine async def decorated_coroutine() -> int: return 1 class It(Iterator[str]): def __iter__(self) -> 'It': return self def __next__(self) -> str: return 'a' def other_iterator() -> It: return It() class Aw(Awaitable[int]): def __await__(self) -> Generator[str, Any, int]: yield 'a' return 1 def other_coroutine() -> Aw: return Aw() # The various contexts in which `await` or `yield from` might occur. def plain_host_generator() -> Generator[str, None, None]: yield 'a' x = 0 x = yield from plain_generator() x = yield from plain_coroutine() # E: "yield from" can't be applied to "Awaitable[int]" x = yield from decorated_generator() x = yield from decorated_coroutine() # E: "yield from" can't be applied to "AwaitableGenerator[Any, Any, int, Awaitable[int]]" x = yield from other_iterator() x = yield from other_coroutine() # E: "yield from" can't be applied to "Aw" async def plain_host_coroutine() -> None: x = 0 x = await plain_generator() # E: Incompatible types in "await" (actual type "Generator[str, None, int]", expected type "Awaitable[Any]") x = await plain_coroutine() x = await decorated_generator() x = await decorated_coroutine() x = await other_iterator() # E: Incompatible types in "await" (actual type "It", expected type "Awaitable[Any]") x = await other_coroutine() @coroutine def decorated_host_generator() -> Generator[str, None, None]: yield 'a' x = 0 x = yield from plain_generator() x = yield from plain_coroutine() x = yield from decorated_generator() x = yield from decorated_coroutine() x = yield from other_iterator() x = yield from other_coroutine() # E: "yield from" can't be applied to "Aw" @coroutine async def decorated_host_coroutine() -> None: x = 0 x = await plain_generator() # E: Incompatible types in "await" (actual type "Generator[str, None, int]", expected type "Awaitable[Any]") x = await plain_coroutine() x = await decorated_generator() x = await decorated_coroutine() x = await other_iterator() # E: Incompatible types in "await" (actual type "It", expected type "Awaitable[Any]") x = await other_coroutine() [builtins fixtures/async_await.pyi] [typing fixtures/typing-full.pyi] [out] mypy-0.560/test-data/unit/check-basic.test0000644€tŠÔÚ€2›s®0000001546113215007205024600 0ustar jukkaDROPBOX\Domain Users00000000000000[case testEmptyFile] [out] [case testAssignmentAndVarDef] a = None # type: A b = None # type: B a = a a = b # Fail class A: pass class B: pass [out] main:5: error: Incompatible types in assignment (expression has type "B", variable has type "A") [case testConstructionAndAssignment] x = None # type: A x = A() x = B() class A: def __init__(self): pass class B: def __init__(self): pass [out] main:4: error: Incompatible types in assignment (expression has type "B", variable has type "A") [case testInheritInitFromObject] x = None # type: A x = A() x = B() class A(object): pass class B(object): pass [out] main:4: error: Incompatible types in assignment (expression has type "B", variable has type "A") [case testImplicitInheritInitFromObject] x = None # type: A o = None # type: object x = o # E: Incompatible types in assignment (expression has type "object", variable has type "A") x = A() o = x class A: pass class B: pass [out] [case testTooManyConstructorArgs] import typing object(object()) [out] main:2: error: Too many arguments for "object" [case testVarDefWithInit] import typing a = A() # type: A b = object() # type: A class A: pass [out] main:3: error: Incompatible types in assignment (expression has type "object", variable has type "A") [case testInheritanceBasedSubtyping] import typing x = B() # type: A y = A() # type: B # Fail class A: pass class B(A): pass [out] main:3: error: Incompatible types in assignment (expression has type "A", variable has type "B") [case testDeclaredVariableInParentheses] (x) = None # type: int x = '' # E: Incompatible types in assignment (expression has type "str", variable has type "int") x = 1 -- Simple functions and calling -- ---------------------------- [case testFunction] import typing def f(x: 'A') -> None: pass f(A()) f(B()) # Fail class A: pass class B: pass [out] main:4: error: Argument 1 to "f" has incompatible type "B"; expected "A" [case testNotCallable] import typing A()() class A: pass [out] main:2: error: "A" not callable [case testSubtypeArgument] import typing def f(x: 'A', y: 'B') -> None: pass f(B(), A()) # Fail f(B(), B()) class A: pass class B(A): pass [out] main:3: error: Argument 2 to "f" has incompatible type "A"; expected "B" [case testInvalidArgumentCount] import typing def f(x, y) -> None: pass f(object()) f(object(), object(), object()) [out] main:3: error: Too few arguments for "f" main:4: error: Too many arguments for "f" -- Locals -- ------ [case testLocalVariables] def f() -> None: x = None # type: A y = None # type: B x = x x = y # Fail class A: pass class B: pass [out] main:6: error: Incompatible types in assignment (expression has type "B", variable has type "A") [case testLocalVariableScope] def f() -> None: x = None # type: A x = A() def g() -> None: x = None # type: B x = A() # Fail class A: pass class B: pass [out] main:7: error: Incompatible types in assignment (expression has type "A", variable has type "B") [case testFunctionArguments] import typing def f(x: 'A', y: 'B') -> None: x = y # Fail x = x y = B() class A: pass class B: pass [out] main:3: error: Incompatible types in assignment (expression has type "B", variable has type "A") [case testLocalVariableInitialization] import typing def f() -> None: a = A() # type: A b = B() # type: A # Fail class A: pass class B: pass [out] main:4: error: Incompatible types in assignment (expression has type "B", variable has type "A") [case testVariableInitializationWithSubtype] import typing x = B() # type: A y = A() # type: B # Fail class A: pass class B(A): pass [out] main:3: error: Incompatible types in assignment (expression has type "A", variable has type "B") -- Misc -- ---- [case testInvalidReturn] import typing def f() -> 'A': return B() class A: pass class B: pass [out] main:3: error: Incompatible return value type (got "B", expected "A") [case testTopLevelContextAndInvalidReturn] import typing def f() -> 'A': return B() a = B() # type: A class A: pass class B: pass [out] main:3: error: Incompatible return value type (got "B", expected "A") main:4: error: Incompatible types in assignment (expression has type "B", variable has type "A") [case testEmptyReturnInAnyTypedFunction] from typing import Any def f() -> Any: return [case testEmptyYieldInAnyTypedFunction] from typing import Any def f() -> Any: yield [case testModule__name__] import typing x = __name__ # type: str a = __name__ # type: A # E: Incompatible types in assignment (expression has type "str", variable has type "A") class A: pass [builtins fixtures/primitives.pyi] [case testModule__doc__] import typing x = __doc__ # type: str a = __doc__ # type: A # E: Incompatible types in assignment (expression has type "str", variable has type "A") class A: pass [builtins fixtures/primitives.pyi] [case testModule__file__] import typing x = __file__ # type: str a = __file__ # type: A # E: Incompatible types in assignment (expression has type "str", variable has type "A") class A: pass [builtins fixtures/primitives.pyi] [case test__package__] import typing x = __package__ # type: str a = __file__ # type: int # E: Incompatible types in assignment (expression has type "str", variable has type "int") -- Scoping and shadowing -- --------------------- [case testLocalVariableShadowing] a = None # type: A a = B() # Fail a = A() def f() -> None: a = None # type: B a = A() # Fail a = B() a = B() # Fail a = A() class A: pass class B: pass [out] main:3: error: Incompatible types in assignment (expression has type "B", variable has type "A") main:7: error: Incompatible types in assignment (expression has type "A", variable has type "B") main:9: error: Incompatible types in assignment (expression has type "B", variable has type "A") [case testGlobalDefinedInBlockWithType] class A: pass while A: a = None # type: A a = A() a = object() # E: Incompatible types in assignment (expression has type "object", variable has type "A") -- # type: signatures -- ------------------ [case testFunctionSignatureAsComment] def f(x): # type: (int) -> str return 1 f('') [out] main:2: error: Incompatible return value type (got "int", expected "str") main:3: error: Argument 1 to "f" has incompatible type "str"; expected "int" [case testMethodSignatureAsComment] class A: def f(self, x): # type: (int) -> str self.f('') # Fail return 1 A().f('') # Fail [out] main:4: error: Argument 1 to "f" of "A" has incompatible type "str"; expected "int" main:5: error: Incompatible return value type (got "int", expected "str") main:6: error: Argument 1 to "f" of "A" has incompatible type "str"; expected "int" [case testTrailingCommaParsing-skip] x = 1 x in 1, if x in 1, : pass [out] [case testInitReturnTypeError] class C: def __init__(self): # type: () -> int pass [out] main:2: error: The return type of "__init__" must be None mypy-0.560/test-data/unit/check-bound.test0000644€tŠÔÚ€2›s®0000001020713215007205024617 0ustar jukkaDROPBOX\Domain Users00000000000000-- Enforcement of upper bounds -- --------------------------- [case testBoundOnGenericFunction] from typing import TypeVar class A: pass class B(A): pass class C(A): pass class D: pass T = TypeVar('T', bound=A) U = TypeVar('U') def f(x: T) -> T: pass def g(x: U) -> U: return f(x) # Fail f(A()) f(B()) f(D()) # Fail b = B() b = f(b) b = f(C()) # Fail [out] main:12: error: Value of type variable "T" of "f" cannot be "U" main:16: error: Value of type variable "T" of "f" cannot be "D" main:20: error: Incompatible types in assignment (expression has type "C", variable has type "B") [case testBoundOnGenericClass] from typing import TypeVar, Generic class A: pass class B(A): pass T = TypeVar('T', bound=A) class G(Generic[T]): def __init__(self, x: T) -> None: pass v = None # type: G[A] w = None # type: G[B] x = None # type: G[str] # E: Type argument "builtins.str" of "G" must be a subtype of "__main__.A" y = G('a') # E: Value of type variable "T" of "G" cannot be "str" z = G(A()) z = G(B()) [case testBoundVoid] from typing import TypeVar, Generic T = TypeVar('T', bound=int) class C(Generic[T]): t = None # type: T def get(self) -> T: return self.t c1 = None # type: C[None] c1.get() d = c1.get() # E: "get" of "C" does not return a value [case testBoundAny] from typing import TypeVar, Generic T = TypeVar('T', bound=int) class C(Generic[T]): def __init__(self, x: T) -> None: pass def f(x: T) -> T: return x def g(): pass f(g()) C(g()) z = None # type: C [case testBoundHigherOrderWithVoid] from typing import TypeVar, Callable class A: pass T = TypeVar('T', bound=A) def f(g: Callable[[], T]) -> T: return g() def h() -> None: pass f(h) a = f(h) # E: "f" does not return a value [case testBoundInheritance] from typing import TypeVar, Generic class A: pass T = TypeVar('T') TA = TypeVar('TA', bound=A) class C(Generic[TA]): pass class D0(C[TA], Generic[TA]): pass class D1(C[T], Generic[T]): pass # E: Type argument "T`1" of "C" must be a subtype of "__main__.A" class D2(C[A]): pass class D3(C[str]): pass # E: Type argument "builtins.str" of "C" must be a subtype of "__main__.A" -- Using information from upper bounds -- ----------------------------------- [case testBoundGenericFunctions] from typing import TypeVar class A: pass class B(A): pass T = TypeVar('T') TA = TypeVar('TA', bound=A) TB = TypeVar('TB', bound=B) def f(x: T) -> T: return x def g(x: TA) -> TA: return f(x) def h(x: TB) -> TB: return g(x) def g2(x: TA) -> TA: return h(x) # Fail def j(x: TA) -> A: return x def k(x: TA) -> B: return x # Fail [out] main:16: error: Value of type variable "TB" of "h" cannot be "TA" main:21: error: Incompatible return value type (got "TA", expected "B") [case testBoundMethodUsage] from typing import TypeVar class A0: def foo(self) -> None: pass class A(A0): def bar(self) -> None: pass a = 1 @property def b(self) -> int: return self.a class B(A): def baz(self) -> None: pass T = TypeVar('T', bound=A) def f(x: T) -> T: x.foo() x.bar() x.baz() # E: "T" has no attribute "baz" x.a x.b return x b = f(B()) [builtins fixtures/property.pyi] [out] [case testBoundClassMethod] from typing import TypeVar class A0: @classmethod def foo(cls, x: int) -> int: pass class A(A0): pass T = TypeVar('T', bound=A) def f(x: T) -> int: return x.foo(22) [builtins fixtures/classmethod.pyi] [case testBoundStaticMethod] from typing import TypeVar class A0: @staticmethod def foo(x: int) -> int: pass class A(A0): pass T = TypeVar('T', bound=A) def f(x: T) -> int: return x.foo(22) [builtins fixtures/staticmethod.pyi] [case testBoundOnDecorator] from typing import TypeVar, Callable, Any, cast T = TypeVar('T', bound=Callable[..., Any]) def twice(f: T) -> T: def result(*args, **kwargs) -> Any: f(*args, **kwargs) return f(*args, **kwargs) return cast(T, result) @twice def foo(x: int) -> int: return x a = 1 b = foo(a) b = 'a' # E: Incompatible types in assignment (expression has type "str", variable has type "int") twice(a) # E: Value of type variable "T" of "twice" cannot be "int" [builtins fixtures/args.pyi] mypy-0.560/test-data/unit/check-callable.test0000644€tŠÔÚ€2›s®0000001404713215007205025255 0ustar jukkaDROPBOX\Domain Users00000000000000[case testCallableDef] def f() -> None: pass if callable(f): f() else: f += 5 [builtins fixtures/callable.pyi] [case testCallableLambda] f = lambda: None if callable(f): f() else: f += 5 [builtins fixtures/callable.pyi] [case testCallableNotCallable] x = 5 if callable(x): x() else: x += 5 [builtins fixtures/callable.pyi] [case testUnion] from typing import Callable, Union x = 5 # type: Union[int, Callable[[], str]] if callable(x): y = x() + 'test' else: z = x + 6 [builtins fixtures/callable.pyi] [case testUnionMultipleReturnTypes] from typing import Callable, Union x = 5 # type: Union[int, Callable[[], str], Callable[[], int]] if callable(x): y = x() + 2 # E: Unsupported operand types for + (likely involving Union) else: z = x + 6 [builtins fixtures/callable.pyi] [case testUnionMultipleNonCallableTypes] from typing import Callable, Union x = 5 # type: Union[int, str, Callable[[], str]] if callable(x): y = x() + 'test' else: z = x + 6 # E: Unsupported operand types for + (likely involving Union) [builtins fixtures/callable.pyi] [case testCallableThenIsinstance] from typing import Callable, Union x = 5 # type: Union[int, str, Callable[[], str], Callable[[], int]] if callable(x): y = x() if isinstance(y, int): b1 = y + 2 else: b2 = y + 'test' else: if isinstance(x, int): b3 = x + 3 else: b4 = x + 'test2' [builtins fixtures/callable.pyi] [case testIsinstanceThenCallable] from typing import Callable, Union x = 5 # type: Union[int, str, Callable[[], str], Callable[[], int]] if isinstance(x, int): b1 = x + 1 else: if callable(x): y = x() if isinstance(y, int): b2 = y + 1 else: b3 = y + 'test' else: b4 = x + 'test2' [builtins fixtures/callable.pyi] [case testCallableWithDifferentArgTypes] from typing import Callable, Union x = 5 # type: Union[int, Callable[[], None], Callable[[int], None]] if callable(x): x() # E: Too few arguments [builtins fixtures/callable.pyi] [case testClassInitializer] from typing import Callable, Union class A: x = 5 a = A # type: Union[A, Callable[[], A]] if callable(a): a = a() a.x + 6 [builtins fixtures/callable.pyi] [case testCallableVariables] from typing import Union class A: x = 5 class B: x = int x = A() # type: Union[A, B] if callable(x.x): y = x.x() else: y = x.x + 5 [builtins fixtures/callable.pyi] [case testCallableAnd] from typing import Union, Callable x = 5 # type: Union[int, Callable[[], str]] if callable(x) and x() == 'test': x() else: x + 5 # E: Unsupported left operand type for + (some union) [builtins fixtures/callable.pyi] [case testCallableOr] from typing import Union, Callable x = 5 # type: Union[int, Callable[[], str]] if callable(x) or x() == 'test': # E: "int" not callable x() # E: "int" not callable else: x + 5 [builtins fixtures/callable.pyi] [case testCallableOrOtherType] from typing import Union, Callable x = 5 # type: Union[int, Callable[[], str]] if callable(x) or x == 2: pass else: pass [builtins fixtures/callable.pyi] [case testAnyCallable] from typing import Any x = 5 # type: Any if callable(x): reveal_type(x) # E: Revealed type is 'Any' else: reveal_type(x) # E: Revealed type is 'Any' [builtins fixtures/callable.pyi] [case testCallableCallableClasses] from typing import Union class A: pass class B: def __call__(self) -> None: pass a = A() # type: A b = B() # type: B c = A() # type: Union[A, B] if callable(a): 5 + 'test' if not callable(b): 5 + 'test' if callable(c): reveal_type(c) # E: Revealed type is '__main__.B' else: reveal_type(c) # E: Revealed type is '__main__.A' [builtins fixtures/callable.pyi] [case testCallableNestedUnions] from typing import Callable, Union T = Union[Union[int, Callable[[], int]], Union[str, Callable[[], str]]] def f(t: T) -> None: if callable(t): reveal_type(t()) # E: Revealed type is 'Union[builtins.int, builtins.str]' else: reveal_type(t) # E: Revealed type is 'Union[builtins.int, builtins.str]' [builtins fixtures/callable.pyi] [case testCallableTypeVarEmpty] from typing import TypeVar T = TypeVar('T') def f(t: T) -> T: if callable(t): return 5 else: return t [builtins fixtures/callable.pyi] [case testCallableTypeVarUnion] from typing import Callable, TypeVar, Union T = TypeVar('T', int, Callable[[], int], Union[str, Callable[[], str]]) def f(t: T) -> None: if callable(t): reveal_type(t()) # E: Revealed type is 'builtins.int' # E: Revealed type is 'builtins.str' else: reveal_type(t) # E: Revealed type is 'builtins.int*' # E: Revealed type is 'builtins.str' [builtins fixtures/callable.pyi] [case testCallableTypeVarBound] from typing import TypeVar class A: def __call__(self) -> str: return 'hi' T = TypeVar('T', bound=A) def f(t: T) -> str: if callable(t): return t() else: return 5 [builtins fixtures/callable.pyi] [case testCallableTypeType] from typing import Type class A: pass T = Type[A] def f(t: T) -> A: if callable(t): return t() else: return 5 [builtins fixtures/callable.pyi] [case testCallableTypeUnion] from abc import ABCMeta, abstractmethod from typing import Type, Union class A(metaclass=ABCMeta): @abstractmethod def f(self) -> None: pass class B: pass x = B # type: Union[Type[A], Type[B]] if callable(x): # Abstract classes raise an error when called, but are indeed `callable` pass else: 'test' + 5 [builtins fixtures/callable.pyi] [case testCallableUnionOfTypes] from abc import ABCMeta, abstractmethod from typing import Type, Union class A(metaclass=ABCMeta): @abstractmethod def f(self) -> None: pass class B: pass x = B # type: Type[Union[A, B]] if callable(x): # Abstract classes raise an error when called, but are indeed `callable` pass else: 'test' + 5 [builtins fixtures/callable.pyi] mypy-0.560/test-data/unit/check-class-namedtuple.test0000644€tŠÔÚ€2›s®0000004561613215007205026765 0ustar jukkaDROPBOX\Domain Users00000000000000[case testNewNamedTupleOldPythonVersion] # flags: --python-version 3.5 from typing import NamedTuple class E(NamedTuple): # E: NamedTuple class syntax is only supported in Python 3.6 pass [case testNewNamedTupleNoUnderscoreFields] # flags: --python-version 3.6 from typing import NamedTuple class X(NamedTuple): x: int _y: int # E: NamedTuple field name cannot start with an underscore: _y _z: int # E: NamedTuple field name cannot start with an underscore: _z [case testNewNamedTupleAccessingAttributes] # flags: --python-version 3.6 from typing import NamedTuple class X(NamedTuple): x: int y: str x: X x.x x.y x.z # E: "X" has no attribute "z" [case testNewNamedTupleAttributesAreReadOnly] # flags: --python-version 3.6 from typing import NamedTuple class X(NamedTuple): x: int x: X x.x = 5 # E: Property "x" defined in "X" is read-only x.y = 5 # E: "X" has no attribute "y" class A(X): pass a: A a.x = 5 # E: Property "x" defined in "A" is read-only [case testNewNamedTupleCreateWithPositionalArguments] # flags: --python-version 3.6 from typing import NamedTuple class X(NamedTuple): x: int y: str x = X(1, '2') x.x x.z # E: "X" has no attribute "z" x = X(1) # E: Too few arguments for "X" x = X(1, '2', 3) # E: Too many arguments for "X" [case testNewNamedTupleShouldBeSingleBase] # flags: --python-version 3.6 from typing import NamedTuple class A: ... class X(NamedTuple, A): # E: NamedTuple should be a single base pass [case testCreateNewNamedTupleWithKeywordArguments] # flags: --python-version 3.6 from typing import NamedTuple class X(NamedTuple): x: int y: str x = X(x=1, y='x') x = X(1, y='x') x = X(x=1, z=1) # E: Unexpected keyword argument "z" for "X" x = X(y='x') # E: Missing positional argument "x" in call to "X" [case testNewNamedTupleCreateAndUseAsTuple] # flags: --python-version 3.6 from typing import NamedTuple class X(NamedTuple): x: int y: str x = X(1, 'x') a, b = x a, b, c = x # E: Need more than 2 values to unpack (3 expected) [case testNewNamedTupleWithItemTypes] # flags: --python-version 3.6 from typing import NamedTuple class N(NamedTuple): a: int b: str n = N(1, 'x') s: str = n.a # E: Incompatible types in assignment (expression has type "int", \ variable has type "str") i: int = n.b # E: Incompatible types in assignment (expression has type "str", \ variable has type "int") x, y = n x = y # E: Incompatible types in assignment (expression has type "str", variable has type "int") [case testNewNamedTupleConstructorArgumentTypes] # flags: --python-version 3.6 from typing import NamedTuple class N(NamedTuple): a: int b: str n = N('x', 'x') # E: Argument 1 to "N" has incompatible type "str"; expected "int" n = N(1, b=2) # E: Argument 2 to "N" has incompatible type "int"; expected "str" N(1, 'x') N(b='x', a=1) [case testNewNamedTupleAsBaseClass] # flags: --python-version 3.6 from typing import NamedTuple class N(NamedTuple): a: int b: str class X(N): pass x = X(1, 2) # E: Argument 2 to "X" has incompatible type "int"; expected "str" s = '' i = 0 s = x.a # E: Incompatible types in assignment (expression has type "int", variable has type "str") i, s = x s, s = x # E: Incompatible types in assignment (expression has type "int", variable has type "str") [case testNewNamedTupleSelfTypeWithNamedTupleAsBase] # flags: --python-version 3.6 from typing import NamedTuple class A(NamedTuple): a: int b: str class B(A): def f(self, x: int) -> None: self.f(self.a) self.f(self.b) # E: Argument 1 to "f" of "B" has incompatible type "str"; expected "int" i = 0 s = '' i, s = self i, i = self # E: Incompatible types in assignment (expression has type "str", \ variable has type "int") [out] [case testNewNamedTupleTypeReferenceToClassDerivedFrom] # flags: --python-version 3.6 from typing import NamedTuple class A(NamedTuple): a: int b: str class B(A): def f(self, x: 'B') -> None: i = 0 s = '' self = x i, s = x i, s = x.a, x.b i, s = x.a, x.a # E: Incompatible types in assignment (expression has type "int", \ variable has type "str") i, i = self # E: Incompatible types in assignment (expression has type "str", \ variable has type "int") [out] [case testNewNamedTupleSubtyping] # flags: --python-version 3.6 from typing import NamedTuple, Tuple class A(NamedTuple): a: int b: str class B(A): pass a = A(1, '') b = B(1, '') t: Tuple[int, str] b = a # E: Incompatible types in assignment (expression has type "A", variable has type "B") a = t # E: Incompatible types in assignment (expression has type "Tuple[int, str]", variable has type "A") b = t # E: Incompatible types in assignment (expression has type "Tuple[int, str]", variable has type "B") t = a t = (1, '') t = b a = b [case testNewNamedTupleSimpleTypeInference] # flags: --python-version 3.6 from typing import NamedTuple, Tuple class A(NamedTuple): a: int l = [A(1), A(2)] a = A(1) a = l[0] (i,) = l[0] i, i = l[0] # E: Need more than 1 value to unpack (2 expected) l = [A(1)] a = (1,) # E: Incompatible types in assignment (expression has type "Tuple[int]", \ variable has type "A") [builtins fixtures/list.pyi] [case testNewNamedTupleMissingClassAttribute] # flags: --python-version 3.6 from typing import NamedTuple class MyNamedTuple(NamedTuple): a: int b: str MyNamedTuple.x # E: "Type[MyNamedTuple]" has no attribute "x" [case testNewNamedTupleEmptyItems] # flags: --python-version 3.6 from typing import NamedTuple class A(NamedTuple): ... [case testNewNamedTupleForwardRef] # flags: --python-version 3.6 from typing import NamedTuple class A(NamedTuple): b: 'B' class B: ... a = A(B()) a = A(1) # E: Argument 1 to "A" has incompatible type "int"; expected "B" [case testNewNamedTupleProperty] # flags: --python-version 3.6 from typing import NamedTuple class A(NamedTuple): a: int class B(A): @property def b(self) -> int: return self.a class C(B): pass B(1).b C(2).b [builtins fixtures/property.pyi] [case testNewNamedTupleAsDict] # flags: --python-version 3.6 from typing import NamedTuple, Any class X(NamedTuple): x: Any y: Any x: X reveal_type(x._asdict()) # E: Revealed type is 'builtins.dict[builtins.str, Any]' [builtins fixtures/dict.pyi] [case testNewNamedTupleReplaceTyped] # flags: --python-version 3.6 from typing import NamedTuple class X(NamedTuple): x: int y: str x: X reveal_type(x._replace()) # E: Revealed type is 'Tuple[builtins.int, builtins.str, fallback=__main__.X]' x._replace(x=5) x._replace(y=5) # E: Argument 1 to "_replace" of "X" has incompatible type "int"; expected "str" [case testNewNamedTupleFields] # flags: --python-version 3.6 from typing import NamedTuple class X(NamedTuple): x: int y: str reveal_type(X._fields) # E: Revealed type is 'Tuple[builtins.str, builtins.str]' reveal_type(X._field_types) # E: Revealed type is 'builtins.dict[builtins.str, Any]' reveal_type(X._field_defaults) # E: Revealed type is 'builtins.dict[builtins.str, Any]' reveal_type(X.__annotations__) # E: Revealed type is 'builtins.dict[builtins.str, Any]' [builtins fixtures/dict.pyi] [case testNewNamedTupleUnit] # flags: --python-version 3.6 from typing import NamedTuple class X(NamedTuple): pass x: X = X() x._replace() x._fields[0] # E: Tuple index out of range [case testNewNamedTupleJoinNamedTuple] # flags: --python-version 3.6 from typing import NamedTuple class X(NamedTuple): x: int y: str class Y(NamedTuple): x: int y: str reveal_type([X(3, 'b'), Y(1, 'a')]) # E: Revealed type is 'builtins.list[Tuple[builtins.int, builtins.str]]' [builtins fixtures/list.pyi] [case testNewNamedTupleJoinTuple] # flags: --python-version 3.6 from typing import NamedTuple class X(NamedTuple): x: int y: str reveal_type([(3, 'b'), X(1, 'a')]) # E: Revealed type is 'builtins.list[Tuple[builtins.int, builtins.str]]' reveal_type([X(1, 'a'), (3, 'b')]) # E: Revealed type is 'builtins.list[Tuple[builtins.int, builtins.str]]' [builtins fixtures/list.pyi] [case testNewNamedTupleWithTooManyArguments] # flags: --python-version 3.6 from typing import NamedTuple class X(NamedTuple): x: int y = z = 2 # E: Invalid statement in NamedTuple definition; expected "field_name: field_type [= default]" def f(self): pass [case testNewNamedTupleWithInvalidItems2] # flags: --python-version 3.6 import typing class X(typing.NamedTuple): x: int y = 1 x.x: int z: str = 'z' aa: int [out] main:6: error: Invalid statement in NamedTuple definition; expected "field_name: field_type [= default]" main:7: error: Invalid statement in NamedTuple definition; expected "field_name: field_type [= default]" main:7: error: Type cannot be declared in assignment to non-self attribute main:7: error: "int" has no attribute "x" main:9: error: Non-default NamedTuple fields cannot follow default fields [builtins fixtures/list.pyi] [case testNewNamedTupleWithoutTypesSpecified] # flags: --python-version 3.6 from typing import NamedTuple class X(NamedTuple): x: int y = 2 # E: Invalid statement in NamedTuple definition; expected "field_name: field_type [= default]" [case testTypeUsingTypeCNamedTuple] # flags: --python-version 3.6 from typing import NamedTuple, Type class N(NamedTuple): x: int y: str def f(a: Type[N]): a() [builtins fixtures/list.pyi] [out] main:8: error: Unsupported type Type["N"] [case testNewNamedTupleWithDefaults] # flags: --fast-parser --python-version 3.6 from typing import List, NamedTuple, Optional class X(NamedTuple): x: int y: int = 2 reveal_type(X(1)) # E: Revealed type is 'Tuple[builtins.int, builtins.int, fallback=__main__.X]' reveal_type(X(1, 2)) # E: Revealed type is 'Tuple[builtins.int, builtins.int, fallback=__main__.X]' X(1, 'a') # E: Argument 2 to "X" has incompatible type "str"; expected "int" X(1, z=3) # E: Unexpected keyword argument "z" for "X" class HasNone(NamedTuple): x: int y: Optional[int] = None reveal_type(HasNone(1)) # E: Revealed type is 'Tuple[builtins.int, Union[builtins.int, builtins.None], fallback=__main__.HasNone]' class Parameterized(NamedTuple): x: int y: List[int] = [1] + [2] z: List[int] = [] reveal_type(Parameterized(1)) # E: Revealed type is 'Tuple[builtins.int, builtins.list[builtins.int], builtins.list[builtins.int], fallback=__main__.Parameterized]' Parameterized(1, ['not an int']) # E: List item 0 has incompatible type "str"; expected "int" class Default: pass class UserDefined(NamedTuple): x: Default = Default() reveal_type(UserDefined()) # E: Revealed type is 'Tuple[__main__.Default, fallback=__main__.UserDefined]' reveal_type(UserDefined(Default())) # E: Revealed type is 'Tuple[__main__.Default, fallback=__main__.UserDefined]' UserDefined(1) # E: Argument 1 to "UserDefined" has incompatible type "int"; expected "Default" [builtins fixtures/list.pyi] [case testNewNamedTupleWithDefaultsStrictOptional] # flags: --fast-parser --strict-optional --python-version 3.6 from typing import List, NamedTuple, Optional class HasNone(NamedTuple): x: int y: Optional[int] = None reveal_type(HasNone(1)) # E: Revealed type is 'Tuple[builtins.int, Union[builtins.int, builtins.None], fallback=__main__.HasNone]' HasNone(None) # E: Argument 1 to "HasNone" has incompatible type "None"; expected "int" HasNone(1, y=None) HasNone(1, y=2) class CannotBeNone(NamedTuple): x: int y: int = None # E: Incompatible types in assignment (expression has type "None", variable has type "int") [builtins fixtures/list.pyi] [case testNewNamedTupleWrongType] # flags: --fast-parser --python-version 3.6 from typing import NamedTuple class X(NamedTuple): x: int y: int = 'not an int' # E: Incompatible types in assignment (expression has type "str", variable has type "int") [case testNewNamedTupleErrorInDefault] # flags: --fast-parser --python-version 3.6 from typing import NamedTuple class X(NamedTuple): x: int = 1 + '1' # E: Unsupported operand types for + ("int" and "str") [case testNewNamedTupleInheritance] # flags: --fast-parser --python-version 3.6 from typing import NamedTuple class X(NamedTuple): x: str y: int = 3 class Y(X): def method(self) -> str: self.y return self.x reveal_type(Y('a')) # E: Revealed type is 'Tuple[builtins.str, builtins.int, fallback=__main__.Y]' Y(y=1, x='1').method() class CallsBaseInit(X): def __init__(self, x: str) -> None: super().__init__(x) [case testNewNamedTupleWithMethods] from typing import NamedTuple class XMeth(NamedTuple): x: int def double(self) -> int: return self.x async def asyncdouble(self) -> int: return self.x class XRepr(NamedTuple): x: int y: int = 1 def __str__(self) -> str: return 'string' def __add__(self, other: XRepr) -> int: return 0 reveal_type(XMeth(1).double()) # E: Revealed type is 'builtins.int' reveal_type(XMeth(1).asyncdouble()) # E: Revealed type is 'typing.Awaitable[builtins.int]' reveal_type(XMeth(42).x) # E: Revealed type is 'builtins.int' reveal_type(XRepr(42).__str__()) # E: Revealed type is 'builtins.str' reveal_type(XRepr(1, 2).__add__(XRepr(3))) # E: Revealed type is 'builtins.int' [typing fixtures/typing-full.pyi] [case testNewNamedTupleOverloading] from typing import NamedTuple, overload class Overloader(NamedTuple): x: int @overload def method(self, y: str) -> str: pass @overload def method(self, y: int) -> int: pass def method(self, y): return y reveal_type(Overloader(1).method('string')) # E: Revealed type is 'builtins.str' reveal_type(Overloader(1).method(1)) # E: Revealed type is 'builtins.int' Overloader(1).method(('tuple',)) # E: No overload variant of "method" of "Overloader" matches argument types [Tuple[builtins.str]] [case testNewNamedTupleMethodInheritance] from typing import NamedTuple, TypeVar T = TypeVar('T') class Base(NamedTuple): x: int def copy(self: T) -> T: reveal_type(self) # E: Revealed type is 'T`-1' return self def good_override(self) -> int: reveal_type(self) # E: Revealed type is 'Tuple[builtins.int, fallback=__main__.Base]' reveal_type(self[0]) # E: Revealed type is 'builtins.int' self[0] = 3 # E: Unsupported target for indexed assignment reveal_type(self.x) # E: Revealed type is 'builtins.int' self.x = 3 # E: Property "x" defined in "Base" is read-only self[1] # E: Tuple index out of range reveal_type(self[T]) # E: Revealed type is 'builtins.int' return self.x def bad_override(self) -> int: return self.x class Child(Base): def new_method(self) -> int: reveal_type(self) # E: Revealed type is 'Tuple[builtins.int, fallback=__main__.Child]' reveal_type(self[0]) # E: Revealed type is 'builtins.int' self[0] = 3 # E: Unsupported target for indexed assignment reveal_type(self.x) # E: Revealed type is 'builtins.int' self.x = 3 # E: Property "x" defined in "Child" is read-only self[1] # E: Tuple index out of range return self.x def good_override(self) -> int: return 0 def bad_override(self) -> str: # E: Return type of "bad_override" incompatible with supertype "Base" return 'incompatible' def takes_base(base: Base) -> int: return base.x reveal_type(Base(1).copy()) # E: Revealed type is 'Tuple[builtins.int, fallback=__main__.Base]' reveal_type(Child(1).copy()) # E: Revealed type is 'Tuple[builtins.int, fallback=__main__.Child]' reveal_type(Base(1).good_override()) # E: Revealed type is 'builtins.int' reveal_type(Child(1).good_override()) # E: Revealed type is 'builtins.int' reveal_type(Base(1).bad_override()) # E: Revealed type is 'builtins.int' reveal_type(takes_base(Base(1))) # E: Revealed type is 'builtins.int' reveal_type(takes_base(Child(1))) # E: Revealed type is 'builtins.int' [builtins fixtures/tuple.pyi] [case testNewNamedTupleIllegalNames] from typing import Callable, NamedTuple class XMethBad(NamedTuple): x: int def _fields(self): # E: Cannot overwrite NamedTuple attribute "_fields" return 'no chance for this' class MagicalFields(NamedTuple): x: int def __slots__(self) -> None: pass # E: Cannot overwrite NamedTuple attribute "__slots__" def __new__(cls) -> None: pass # E: Cannot overwrite NamedTuple attribute "__new__" def _source(self) -> int: pass # E: Cannot overwrite NamedTuple attribute "_source" __annotations__ = {'x': float} # E: NamedTuple field name cannot start with an underscore: __annotations__ \ # E: Invalid statement in NamedTuple definition; expected "field_name: field_type [= default]" \ # E: Cannot overwrite NamedTuple attribute "__annotations__" class AnnotationsAsAMethod(NamedTuple): x: int # This fails at runtime because typing.py assumes that __annotations__ is a dictionary. def __annotations__(self) -> float: # E: Cannot overwrite NamedTuple attribute "__annotations__" return 1.0 class ReuseNames(NamedTuple): x: int def x(self) -> str: # E: Name 'x' already defined return '' def y(self) -> int: return 0 y: str # E: Name 'y' already defined class ReuseCallableNamed(NamedTuple): z: Callable[[ReuseNames], int] def z(self) -> int: # E: Name 'z' already defined return 0 [builtins fixtures/dict.pyi] [case testNewNamedTupleDocString] from typing import NamedTuple class Documented(NamedTuple): """This is a docstring.""" x: int reveal_type(Documented.__doc__) # E: Revealed type is 'builtins.str' reveal_type(Documented(1).x) # E: Revealed type is 'builtins.int' class BadDoc(NamedTuple): x: int def __doc__(self) -> str: return '' reveal_type(BadDoc(1).__doc__()) # E: Revealed type is 'builtins.str' [case testNewNamedTupleClassMethod] from typing import NamedTuple class HasClassMethod(NamedTuple): x: str @classmethod def new(cls, f: str) -> 'HasClassMethod': reveal_type(cls) # E: Revealed type is 'def (x: builtins.str) -> Tuple[builtins.str, fallback=__main__.HasClassMethod]' reveal_type(HasClassMethod) # E: Revealed type is 'def (x: builtins.str) -> Tuple[builtins.str, fallback=__main__.HasClassMethod]' return cls(x=f) [builtins fixtures/classmethod.pyi] [case testNewNamedTupleStaticMethod] from typing import NamedTuple class HasStaticMethod(NamedTuple): x: str @staticmethod def new(f: str) -> 'HasStaticMethod': return HasStaticMethod(x=f) [builtins fixtures/classmethod.pyi] [case testNewNamedTupleProperty] from typing import NamedTuple class HasStaticMethod(NamedTuple): x: str @property def size(self) -> int: reveal_type(self) # E: Revealed type is 'Tuple[builtins.str, fallback=__main__.HasStaticMethod]' return 4 [builtins fixtures/property.pyi] mypy-0.560/test-data/unit/check-classes.test0000644€tŠÔÚ€2›s®0000032142513215007205025154 0ustar jukkaDROPBOX\Domain Users00000000000000-- Methods -- ------- [case testMethodCall] a = None # type: A b = None # type: B a.foo(B()) # Fail a.bar(B(), A()) # Fail a.foo(A()) b.bar(B(), A()) class A: def foo(self, x: 'A') -> None: pass class B: def bar(self, x: 'B', y: A) -> None: pass [out] main:5: error: Argument 1 to "foo" of "A" has incompatible type "B"; expected "A" main:6: error: "A" has no attribute "bar" [case testMethodCallWithSubtype] a = None # type: A a.foo(A()) a.foo(B()) a.bar(A()) # Fail a.bar(B()) class A: def foo(self, x: 'A') -> None: pass def bar(self, x: 'B') -> None: pass class B(A): pass [out] main:5: error: Argument 1 to "bar" of "A" has incompatible type "A"; expected "B" [case testInheritingMethod] a = None # type: B a.foo(A()) # Fail a.foo(B()) class A: def foo(self, x: 'B') -> None: pass class B(A): pass [out] main:3: error: Argument 1 to "foo" of "A" has incompatible type "A"; expected "B" [case testMethodCallWithInvalidNumberOfArguments] a = None # type: A a.foo() # Fail a.foo(object(), A()) # Fail class A: def foo(self, x: 'A') -> None: pass [out] main:3: error: Too few arguments for "foo" of "A" main:4: error: Too many arguments for "foo" of "A" main:4: error: Argument 1 to "foo" of "A" has incompatible type "object"; expected "A" [case testMethodBody] import typing class A: def f(self) -> None: a = object() # type: A # Fail [out] main:4: error: Incompatible types in assignment (expression has type "object", variable has type "A") [case testMethodArguments] import typing class A: def f(self, a: 'A', b: 'B') -> None: a = B() # Fail b = A() # Fail a = A() b = B() a = a a = b # Fail class B: pass [out] main:4: error: Incompatible types in assignment (expression has type "B", variable has type "A") main:5: error: Incompatible types in assignment (expression has type "A", variable has type "B") main:9: error: Incompatible types in assignment (expression has type "B", variable has type "A") [case testReturnFromMethod] import typing class A: def f(self) -> 'A': return B() # Fail return A() class B: pass [out] main:4: error: Incompatible return value type (got "B", expected "A") [case testSelfArgument] import typing class A: def f(self) -> None: o = self # type: B # Fail self.g() # Fail a = self # type: A self.f() class B: pass [out] main:4: error: Incompatible types in assignment (expression has type "A", variable has type "B") main:5: error: "A" has no attribute "g" [case testAssignToMethodViaInstance] import typing class A: def f(self): pass A().f = None # E: Cannot assign to a method -- Attributes -- ---------- [case testReferToInvalidAttribute] class A: def __init__(self): self.x = object() a = None # type: A a.y a.y = object() a.x a.x = object() [out] main:6: error: "A" has no attribute "y" main:7: error: "A" has no attribute "y" [case testArgumentTypeInference] class A: def __init__(self, aa: 'A', bb: 'B') -> None: self.a = aa self.b = bb class B: pass a = None # type: A b = None # type: B a.a = b # Fail a.b = a # Fail b.a # Fail a.a = a a.b = b [out] main:9: error: Incompatible types in assignment (expression has type "B", variable has type "A") main:10: error: Incompatible types in assignment (expression has type "A", variable has type "B") main:11: error: "B" has no attribute "a" [case testExplicitAttributeInBody] a = None # type: A a.x = object() # Fail a.x = A() class A: x = None # type: A [out] main:3: error: Incompatible types in assignment (expression has type "object", variable has type "A") [case testAttributeDefinedInNonInitMethod] import typing class A: def f(self) -> None: self.x = 1 self.y = '' self.x = 1 a = A() a.x = 1 a.y = '' a.x = '' # E: Incompatible types in assignment (expression has type "str", variable has type "int") a.z = 0 # E: "A" has no attribute "z" [case testInheritanceAndAttributeAssignment] import typing class A: def f(self) -> None: self.x = 0 class B(A): def f(self) -> None: self.x = '' # E: Incompatible types in assignment (expression has type "str", variable has type "int") [out] [case testAssignmentToAttributeInMultipleMethods] import typing class A: def f(self) -> None: self.x = 0 def g(self) -> None: self.x = '' # Fail def __init__(self) -> None: self.x = '' # Fail [out] main:6: error: Incompatible types in assignment (expression has type "str", variable has type "int") main:8: error: Incompatible types in assignment (expression has type "str", variable has type "int") [case testClassNamesDefinedOnSelUsedInClassBody] class A(object): def f(self): self.attr = 1 attr = 0 class B(object): attr = 0 def f(self): self.attr = 1 class C(object): attr = 0 def f(self): self.attr = 1 attr = 0 class D(object): def g(self): self.attr = 1 attr = 0 def f(self): self.attr = 1 [out] [case testClassNamesDefinedOnSelUsedInClassBodyReveal] class A(object): def f(self) -> None: self.attr = 1 reveal_type(attr) # E: Revealed type is 'builtins.int' class B(object): attr = 0 def f(self) -> None: reveal_type(self.attr) # E: Revealed type is 'builtins.int' [out] -- Method overriding -- ----------------- [case testMethodOverridingWithIdenticalSignature] import typing class A: def f(self, x: 'A') -> None: pass def g(self, x: 'B' , y: object) -> 'A': pass def h(self) -> None: pass class B(A): def f(self, x: A) -> None: pass def g(self, x: 'B' , y: object) -> A: pass def h(self) -> None: pass [out] [case testMethodOverridingWithCovariantType] import typing class A: def f(self, x: 'A', y: 'B') -> 'A': pass def g(self, x: 'A', y: 'B') -> 'A': pass class B(A): def f(self, x: A, y: 'B') -> 'B': pass def g(self, x: A, y: A) -> 'A': pass [out] [case testMethodOverridingWithIncompatibleTypes] import typing class A: def f(self, x: 'A', y: 'B') -> 'A': pass def g(self, x: 'A', y: 'B') -> 'A': pass def h(self, x: 'A', y: 'B') -> 'A': pass class B(A): def f(self, x: 'B', y: 'B') -> A: pass # Fail def g(self, x: A, y: A) -> A: pass def h(self, x: A, y: 'B') -> object: pass # Fail [out] main:7: error: Argument 1 of "f" incompatible with supertype "A" main:9: error: Return type of "h" incompatible with supertype "A" [case testMethodOverridingWithIncompatibleArgumentCount] import typing class A: def f(self, x: 'A') -> None: pass def g(self, x: 'A', y: 'B') -> 'A': pass class B(A): def f(self, x: A, y: A) -> None: pass # Fail def g(self, x: A) -> A: pass # Fail [out] main:6: error: Signature of "f" incompatible with supertype "A" main:7: error: Signature of "g" incompatible with supertype "A" [case testMethodOverridingAcrossDeepInheritanceHierarchy1] import typing class A: def f(self, x: 'B') -> None: pass class B(A): pass class C(B): # with gap in implementations def f(self, x: 'C') -> None: # Fail pass [out] main:6: error: Argument 1 of "f" incompatible with supertype "A" [case testMethodOverridingAcrossDeepInheritanceHierarchy2] import typing class A: def f(self) -> 'B': pass class B(A): def f(self) -> 'C': pass class C(B): # with multiple implementations def f(self) -> B: # Fail pass [out] main:7: error: Return type of "f" incompatible with supertype "B" [case testMethodOverridingWithVoidReturnValue] import typing class A: def f(self) -> None: pass def g(self) -> 'A': pass class B(A): def f(self) -> A: pass # Fail def g(self) -> None: pass [out] main:6: error: Return type of "f" incompatible with supertype "A" [case testOverride__new__WithDifferentSignature] class A: def __new__(cls, x: int) -> str: return '' class B(A): def __new__(cls) -> int: return 1 [case testInnerFunctionNotOverriding] class A: def f(self) -> int: pass class B(A): def g(self) -> None: def f(self) -> str: pass [case testOverride__init_subclass__WithDifferentSignature] class A: def __init_subclass__(cls, x: int) -> None: pass class B(A): def __init_subclass__(cls) -> None: pass [case testOverrideWithDecorator] from typing import Callable def int_to_none(f: Callable[..., int]) -> Callable[..., None]: ... def str_to_int(f: Callable[..., str]) -> Callable[..., int]: ... class A: def f(self) -> None: pass def g(self) -> str: pass def h(self) -> None: pass class B(A): @int_to_none def f(self) -> int: pass @str_to_int def g(self) -> str: pass # E: Signature of "g" incompatible with supertype "A" @int_to_none @str_to_int def h(self) -> str: pass [case testOverrideDecorated] from typing import Callable def str_to_int(f: Callable[..., str]) -> Callable[..., int]: ... class A: @str_to_int def f(self) -> str: pass @str_to_int def g(self) -> str: pass @str_to_int def h(self) -> str: pass class B(A): def f(self) -> int: pass def g(self) -> str: pass # E: Signature of "g" incompatible with supertype "A" @str_to_int def h(self) -> str: pass [case testOverrideWithDecoratorReturningAny] def dec(f): pass class A: def f(self) -> str: pass class B(A): @dec def f(self) -> int: pass [case testOverrideWithDecoratorReturningInstance] def dec(f) -> str: pass class A: def f(self) -> str: pass @dec def g(self) -> int: pass @dec def h(self) -> int: pass class B(A): @dec def f(self) -> int: pass # E: Signature of "f" incompatible with supertype "A" def g(self) -> int: pass # E: Signature of "g" incompatible with supertype "A" @dec def h(self) -> str: pass [case testOverrideStaticMethodWithStaticMethod] class A: @staticmethod def f(x: int, y: str) -> None: pass @staticmethod def g(x: int, y: str) -> None: pass class B(A): @staticmethod def f(x: int, y: str) -> None: pass @staticmethod def g(x: str, y: str) -> None: pass # E: Argument 1 of "g" incompatible with supertype "A" [builtins fixtures/classmethod.pyi] [case testOverrideClassMethodWithClassMethod] class A: @classmethod def f(cls, x: int, y: str) -> None: pass @classmethod def g(cls, x: int, y: str) -> None: pass class B(A): @classmethod def f(cls, x: int, y: str) -> None: pass @classmethod def g(cls, x: str, y: str) -> None: pass # E: Argument 1 of "g" incompatible with supertype "A" [builtins fixtures/classmethod.pyi] [case testOverrideClassMethodWithStaticMethod] class A: @classmethod def f(cls, x: int) -> None: pass @classmethod def g(cls, x: int) -> int: pass @classmethod def h(cls) -> int: pass class B(A): @staticmethod def f(x: int) -> None: pass @staticmethod def g(x: str) -> int: pass # E: Argument 1 of "g" incompatible with supertype "A" @staticmethod def h() -> int: pass [builtins fixtures/classmethod.pyi] [case testOverrideStaticMethodWithClassMethod] class A: @staticmethod def f(x: int) -> None: pass @staticmethod def g(x: str) -> int: pass @staticmethod def h() -> int: pass class B(A): @classmethod def f(cls, x: int) -> None: pass @classmethod def g(cls, x: int) -> int: pass # E: Argument 1 of "g" incompatible with supertype "A" @classmethod def h(cls) -> int: pass [builtins fixtures/classmethod.pyi] -- Constructors -- ------------ [case testTrivialConstructor] import typing a = A() # type: A b = A() # type: B # Fail class A: def __init__(self) -> None: pass class B: pass [out] main:3: error: Incompatible types in assignment (expression has type "A", variable has type "B") [case testConstructor] import typing a = A(B()) # type: A aa = A(object()) # type: A # Fail b = A(B()) # type: B # Fail class A: def __init__(self, x: 'B') -> None: pass class B: pass [out] main:3: error: Argument 1 to "A" has incompatible type "object"; expected "B" main:4: error: Incompatible types in assignment (expression has type "A", variable has type "B") [case testConstructorWithTwoArguments] import typing a = A(C(), B()) # type: A # Fail class A: def __init__(self, x: 'B', y: 'C') -> None: pass class B: pass class C(B): pass [out] main:2: error: Argument 2 to "A" has incompatible type "B"; expected "C" [case testInheritedConstructor] import typing b = B(C()) # type: B a = B(D()) # type: A # Fail class A: def __init__(self, x: 'C') -> None: pass class B(A): pass class C: pass class D: pass [out] main:3: error: Argument 1 to "B" has incompatible type "D"; expected "C" [case testOverridingWithIncompatibleConstructor] import typing A() # Fail B(C()) # Fail A(C()) B() class A: def __init__(self, x: 'C') -> None: pass class B(A): def __init__(self) -> None: pass class C: pass [out] main:2: error: Too few arguments for "A" main:3: error: Too many arguments for "B" [case testConstructorWithReturnValueType] import typing class A: def __init__(self) -> 'A': pass [out] main:3: error: The return type of "__init__" must be None [case testConstructorWithImplicitReturnValueType] import typing class A: def __init__(self, x: int): pass [out] main:3: error: The return type of "__init__" must be None [case testInitSubclassWithReturnValueType] import typing class A: def __init_subclass__(cls) -> 'A': pass [out] main:3: error: The return type of "__init_subclass__" must be None [case testInitSubclassWithImplicitReturnValueType] import typing class A: def __init_subclass__(cls, x: int=1): pass [out] main:3: error: The return type of "__init_subclass__" must be None [case testGlobalFunctionInitWithReturnType] import typing a = __init__() # type: A b = __init__() # type: B # Fail def __init__() -> 'A': pass class A: pass class B: pass [out] main:3: error: Incompatible types in assignment (expression has type "A", variable has type "B") [case testAccessingInit] from typing import Any, cast class A: def __init__(self, a: 'A') -> None: pass a = None # type: A a.__init__(a) # E: Cannot access "__init__" directly (cast(Any, a)).__init__(a) [case testDeepInheritanceHierarchy] import typing d = C() # type: D # Fail d = B() # Fail d = A() # Fail d = D2() # Fail a = D() # type: A a = D2() b = D() # type: B b = D2() class A: pass class B(A): pass class C(B): pass class D(C): pass class D2(C): pass [out] main:2: error: Incompatible types in assignment (expression has type "C", variable has type "D") main:3: error: Incompatible types in assignment (expression has type "B", variable has type "D") main:4: error: Incompatible types in assignment (expression has type "A", variable has type "D") main:5: error: Incompatible types in assignment (expression has type "D2", variable has type "D") -- Attribute access in class body -- ------------------------------ [case testDataAttributeRefInClassBody] import typing class B: pass class A: x = B() y = x b = x # type: B b = x c = x # type: A # E: Incompatible types in assignment (expression has type "B", variable has type "A") c = b # E: Incompatible types in assignment (expression has type "B", variable has type "A") [out] [case testMethodRefInClassBody] from typing import Callable class B: pass class A: def f(self) -> None: pass g = f h = f # type: Callable[[A], None] h = f g = h ff = f # type: Callable[[B], None] # E: Incompatible types in assignment (expression has type "Callable[[A], None]", variable has type "Callable[[B], None]") g = ff # E: Incompatible types in assignment (expression has type "Callable[[B], None]", variable has type "Callable[[A], None]") [out] -- Arbitrary statements in class body -- ---------------------------------- [case testStatementsInClassBody] import typing class B: pass class A: for x in [A()]: y = x y = B() # E: Incompatible types in assignment (expression has type "B", variable has type "A") x = A() y = A() x = B() # E: Incompatible types in assignment (expression has type "B", variable has type "A") [builtins fixtures/for.pyi] [out] -- Class attributes -- ---------------- [case testAccessMethodViaClass] import typing class A: def f(self) -> None: pass A.f(A()) A.f(object()) # E: Argument 1 to "f" of "A" has incompatible type "object"; expected "A" A.f() # E: Too few arguments for "f" of "A" A.f(None, None) # E: Too many arguments for "f" of "A" [case testAccessAttributeViaClass] import typing class B: pass class A: x = None # type: A a = A.x # type: A b = A.x # type: B # E: Incompatible types in assignment (expression has type "A", variable has type "B") [case testAccessingUndefinedAttributeViaClass] import typing class A: pass A.x # E: "Type[A]" has no attribute "x" [case testAccessingUndefinedAttributeViaClassWithOverloadedInit] from foo import * [file foo.pyi] from typing import overload class A: @overload def __init__(self): pass @overload def __init__(self, x): pass A.x # E: "Type[A]" has no attribute "x" [case testAccessMethodOfClassWithOverloadedInit] from foo import * [file foo.pyi] from typing import overload, Any class A: @overload def __init__(self) -> None: pass @overload def __init__(self, x: Any) -> None: pass def f(self) -> None: pass A.f(A()) A.f() # E: Too few arguments for "f" of "A" [case testAssignmentToClassDataAttribute] import typing class B: pass class A: x = None # type: B A.x = B() A.x = object() # E: Incompatible types in assignment (expression has type "object", variable has type "B") [case testAssignmentToInferredClassDataAttribute] import typing class B: pass class A: x = B() A.x = B() A.x = A() # E: Incompatible types in assignment (expression has type "A", variable has type "B") [case testInitMethodUnbound] class B: pass class A: def __init__(self, b: B) -> None: pass a = None # type: A b = None # type: B A.__init__(a, b) A.__init__(b, b) # E: Argument 1 to "__init__" of "A" has incompatible type "B"; expected "A" A.__init__(a, a) # E: Argument 2 to "__init__" of "A" has incompatible type "A"; expected "B" [case testAssignToMethodViaClass] import typing class A: def f(self): pass A.f = None # E: Cannot assign to a method [case testAssignToNestedClassViaClass] import typing class A: class B: pass A.B = None # E: Cannot assign to a type [case testAccessingClassAttributeWithTypeInferenceIssue] x = C.x # E: Cannot determine type of 'x' def f() -> int: return 1 class C: x = f() [builtins fixtures/list.pyi] [case testAccessingClassAttributeWithTypeInferenceIssue2] class C: x = [] x = C.x [builtins fixtures/list.pyi] [out] main:2: error: Need type annotation for variable [case testAccessingGenericClassAttribute] from typing import Generic, TypeVar T = TypeVar('T') class A(Generic[T]): x = None # type: T A.x # E: Access to generic instance variables via class is ambiguous A[int].x # E: Access to generic instance variables via class is ambiguous [case testAccessingNestedGenericClassAttribute] from typing import Generic, List, TypeVar, Union T = TypeVar('T') U = TypeVar('U') class A(Generic[T, U]): x = None # type: Union[T, List[U]] A.x # E: Access to generic instance variables via class is ambiguous A[int, int].x # E: Access to generic instance variables via class is ambiguous [builtins fixtures/list.pyi] -- Nested classes -- -------------- [case testClassWithinFunction] def f() -> None: class A: def g(self) -> None: pass a = None # type: A a.g() a.g(a) # E: Too many arguments for "g" of "A" [out] [case testConstructNestedClass] import typing class A: class B: pass b = B() b = A() # E: Incompatible types in assignment (expression has type "A", variable has type "B") b = B(b) # E: Too many arguments for "B" [out] [case testConstructNestedClassWithCustomInit] import typing class A: def f(self) -> None: class B: def __init__(self, a: 'A') -> None: pass b = B(A()) b = A() # E: Incompatible types in assignment (expression has type "A", variable has type "B") b = B() # E: Too few arguments for "B" [out] [case testDeclareVariableWithNestedClassType] def f() -> None: class A: pass a = None # type: A a = A() a = object() # E: Incompatible types in assignment (expression has type "object", variable has type "A") [out] [case testExternalReferenceToClassWithinClass] class A: class B: pass b = None # type: A.B b = A.B() b = A() # E: Incompatible types in assignment (expression has type "A", variable has type "B") b = A.B(b) # E: Too many arguments for "B" [case testAliasNestedClass] class Outer: class Inner: def make_int(self) -> int: return 1 reveal_type(Inner().make_int) # E: Revealed type is 'def () -> builtins.int' some_int = Inner().make_int() reveal_type(Outer.Inner.make_int) # E: Revealed type is 'def (self: __main__.Outer.Inner) -> builtins.int' reveal_type(Outer().some_int) # E: Revealed type is 'builtins.int' Bar = Outer.Inner reveal_type(Bar.make_int) # E: Revealed type is 'def (self: __main__.Outer.Inner) -> builtins.int' x = Bar() # type: Bar def produce() -> Bar: reveal_type(Bar().make_int) # E: Revealed type is 'def () -> builtins.int' return Bar() [case testInnerClassPropertyAccess] class Foo: class Meta: name = 'Bar' meta = Meta reveal_type(Foo.Meta) # E: Revealed type is 'def () -> __main__.Foo.Meta' reveal_type(Foo.meta) # E: Revealed type is 'def () -> __main__.Foo.Meta' reveal_type(Foo.Meta.name) # E: Revealed type is 'builtins.str' reveal_type(Foo.meta.name) # E: Revealed type is 'builtins.str' reveal_type(Foo().Meta) # E: Revealed type is 'def () -> __main__.Foo.Meta' reveal_type(Foo().meta) # E: Revealed type is 'def () -> __main__.Foo.Meta' reveal_type(Foo().meta.name) # E: Revealed type is 'builtins.str' reveal_type(Foo().Meta.name) # E: Revealed type is 'builtins.str' -- Declaring attribute type in method -- ---------------------------------- [case testDeclareAttributeTypeInInit] class A: def __init__(self): self.x = None # type: int a = None # type: A a.x = 1 a.x = '' # E: Incompatible types in assignment (expression has type "str", variable has type "int") [case testAccessAttributeDeclaredInInitBeforeDeclaration] a = None # type: A a.x = 1 a.x = '' # E: Incompatible types in assignment (expression has type "str", variable has type "int") class A: def __init__(self): self.x = None # type: int -- Special cases -- ------------- [case testMultipleClassDefinition] import typing A() class A: pass class A: pass [out] main:4: error: Name 'A' already defined on line 3 [case testDocstringInClass] import typing class A: """Foo""" class B: 'x' y = B() [builtins fixtures/primitives.pyi] [case testErrorMessageInFunctionNestedWithinMethod] import typing class A: def f(self) -> None: def g() -> None: "" + 1 # E: Unsupported operand types for + ("str" and "int") "" + 1 # E: Unsupported operand types for + ("str" and "int") [out] -- Static methods -- -------------- [case testSimpleStaticMethod] import typing class A: @staticmethod def f(x: int) -> None: pass A.f(1) A().f(1) A.f('') # E: Argument 1 to "f" of "A" has incompatible type "str"; expected "int" A().f('') # E: Argument 1 to "f" of "A" has incompatible type "str"; expected "int" [builtins fixtures/staticmethod.pyi] [case testBuiltinStaticMethod] import typing int.from_bytes(b'', '') int.from_bytes('', '') # E: Argument 1 to "from_bytes" of "int" has incompatible type "str"; expected "bytes" [builtins fixtures/staticmethod.pyi] [case testAssignStaticMethodOnInstance] import typing class A: @staticmethod def f(x: int) -> None: pass A().f = A.f # E: Cannot assign to a method [builtins fixtures/staticmethod.pyi] -- Class methods -- ------------- [case testSimpleClassMethod] import typing class A: @classmethod def f(cls, x: int) -> None: pass A.f(1) A().f(1) A.f('') # E: Argument 1 to "f" of "A" has incompatible type "str"; expected "int" A().f('') # E: Argument 1 to "f" of "A" has incompatible type "str"; expected "int" [builtins fixtures/classmethod.pyi] [case testBuiltinClassMethod] import typing int.from_bytes(b'', '') int.from_bytes('', '') # E: Argument 1 to "from_bytes" of "int" has incompatible type "str"; expected "bytes" [builtins fixtures/classmethod.pyi] [case testAssignClassMethodOnClass] import typing class A: @classmethod def f(cls, x: int) -> None: pass A.f = A.f # E: Cannot assign to a method [builtins fixtures/classmethod.pyi] [case testAssignClassMethodOnInstance] import typing class A: @classmethod def f(cls, x: int) -> None: pass A().f = A.f # E: Cannot assign to a method [builtins fixtures/classmethod.pyi] [case testClassMethodCalledInClassMethod] import typing class C: @classmethod def foo(cls) -> None: pass @classmethod def bar(cls) -> None: cls() cls(1) # E: Too many arguments for "C" cls.bar() cls.bar(1) # E: Too many arguments for "bar" of "C" cls.bozo() # E: "Type[C]" has no attribute "bozo" [builtins fixtures/classmethod.pyi] [out] [case testClassMethodCalledOnClass] import typing class C: @classmethod def foo(cls) -> None: pass C.foo() C.foo(1) # E: Too many arguments for "foo" of "C" C.bozo() # E: "Type[C]" has no attribute "bozo" [builtins fixtures/classmethod.pyi] [case testClassMethodCalledOnInstance] import typing class C: @classmethod def foo(cls) -> None: pass C().foo() C().foo(1) # E: Too many arguments for "foo" of "C" C.bozo() # E: "Type[C]" has no attribute "bozo" [builtins fixtures/classmethod.pyi] [case testClassMethodMayCallAbstractMethod] from abc import abstractmethod import typing class C: @classmethod def foo(cls) -> None: cls().bar() @abstractmethod def bar(self) -> None: pass [builtins fixtures/classmethod.pyi] [case testClassMethodSubclassing] class A: @classmethod def f(cls) -> None: pass def g(self) -> None: pass class B(A): def f(self) -> None: pass # E: Signature of "f" incompatible with supertype "A" @classmethod def g(cls) -> None: pass class C(A): @staticmethod def f() -> None: pass [builtins fixtures/classmethod.pyi] -- Properties -- ---------- [case testAccessingReadOnlyProperty] import typing class A: @property def f(self) -> str: pass a = A() reveal_type(a.f) # E: Revealed type is 'builtins.str' [builtins fixtures/property.pyi] [case testAssigningToReadOnlyProperty] import typing class A: @property def f(self) -> str: pass A().f = '' # E: Property "f" defined in "A" is read-only [builtins fixtures/property.pyi] [case testPropertyGetterBody] import typing class A: @property def f(self) -> str: self.x = 1 self.x = '' # E: Incompatible types in assignment (expression has type "str", variable has type "int") return '' [builtins fixtures/property.pyi] [out] [case testDynamicallyTypedProperty] import typing class A: @property def f(self): pass a = A() a.f.xx a.f = '' # E: Property "f" defined in "A" is read-only [builtins fixtures/property.pyi] [case testPropertyWithSetter] import typing class A: @property def f(self) -> int: return 1 @f.setter def f(self, x: int) -> None: pass a = A() a.f = a.f a.f.x # E: "int" has no attribute "x" a.f = '' # E: Incompatible types in assignment (expression has type "str", variable has type "int") a.f = 1 reveal_type(a.f) # E: Revealed type is 'builtins.int' [builtins fixtures/property.pyi] [case testPropertyWithDeleterButNoSetter] import typing class A: @property def f(self) -> int: return 1 @f.deleter def f(self, x) -> None: pass a = A() a.f = a.f # E: Property "f" defined in "A" is read-only a.f.x # E: "int" has no attribute "x" [builtins fixtures/property.pyi] -- Descriptors -- ----------- [case testAccessingNonDataDescriptor] from typing import Any class D: def __get__(self, inst: Any, own: Any) -> str: return 's' class A: f = D() a = A() reveal_type(a.f) # E: Revealed type is 'builtins.str' [case testSettingNonDataDescriptor] from typing import Any class D: def __get__(self, inst: Any, own: Any) -> str: return 's' class A: f = D() a = A() a.f = 'foo' a.f = D() # E: Incompatible types in assignment (expression has type "D", variable has type "str") [case testSettingDataDescriptor] from typing import Any class D: def __get__(self, inst: Any, own: Any) -> str: return 's' def __set__(self, inst: Any, value: str) -> None: pass class A: f = D() a = A() a.f = '' a.f = 1 # E: Argument 2 to "__set__" of "D" has incompatible type "int"; expected "str" [case testReadingDescriptorWithoutDunderGet] from typing import Union, Any class D: def __set__(self, inst: Any, value: str) -> None: pass class A: f = D() def __init__(self): self.f = 's' a = A() reveal_type(a.f) # E: Revealed type is '__main__.D' [case testAccessingDescriptorFromClass] # flags: --strict-optional from d import D, Base class A(Base): f = D() reveal_type(A.f) # E: Revealed type is 'd.D' reveal_type(A().f) # E: Revealed type is 'builtins.str' [file d.pyi] from typing import TypeVar, Type, Generic, overload class Base: pass class D: def __init__(self) -> None: pass @overload def __get__(self, inst: None, own: Type[Base]) -> D: pass @overload def __get__(self, inst: Base, own: Type[Base]) -> str: pass [builtins fixtures/bool.pyi] [case testAccessingDescriptorFromClassWrongBase] # flags: --strict-optional from d import D, Base class A: f = D() reveal_type(A.f) reveal_type(A().f) [file d.pyi] from typing import TypeVar, Type, Generic, overload class Base: pass class D: def __init__(self) -> None: pass @overload def __get__(self, inst: None, own: Type[Base]) -> D: pass @overload def __get__(self, inst: Base, own: Type[Base]) -> str: pass [builtins fixtures/bool.pyi] [out] main:5: error: Revealed type is 'Any' main:5: error: No overload variant of "__get__" of "D" matches argument types [builtins.None, Type[__main__.A]] main:6: error: Revealed type is 'Any' main:6: error: No overload variant of "__get__" of "D" matches argument types [__main__.A, Type[__main__.A]] [case testAccessingGenericNonDataDescriptor] from typing import TypeVar, Type, Generic, Any V = TypeVar('V') class D(Generic[V]): def __init__(self, v: V) -> None: self.v = v def __get__(self, inst: Any, own: Type) -> V: return self.v class A: f = D(10) g = D('10') a = A() reveal_type(a.f) # E: Revealed type is 'builtins.int*' reveal_type(a.g) # E: Revealed type is 'builtins.str*' [case testSettingGenericDataDescriptor] from typing import TypeVar, Type, Generic, Any V = TypeVar('V') class D(Generic[V]): def __init__(self, v: V) -> None: self.v = v def __get__(self, inst: Any, own: Type) -> V: return self.v def __set__(self, inst: Any, v: V) -> None: pass class A: f = D(10) g = D('10') a = A() a.f = 1 a.f = '' # E: Argument 2 to "__set__" of "D" has incompatible type "str"; expected "int" a.g = '' a.g = 1 # E: Argument 2 to "__set__" of "D" has incompatible type "int"; expected "str" [case testAccessingGenericDescriptorFromClass] # flags: --strict-optional from d import D class A: f = D(10) # type: D[A, int] g = D('10') # type: D[A, str] reveal_type(A.f) # E: Revealed type is 'd.D[__main__.A*, builtins.int*]' reveal_type(A.g) # E: Revealed type is 'd.D[__main__.A*, builtins.str*]' reveal_type(A().f) # E: Revealed type is 'builtins.int*' reveal_type(A().g) # E: Revealed type is 'builtins.str*' [file d.pyi] from typing import TypeVar, Type, Generic, overload T = TypeVar('T') V = TypeVar('V') class D(Generic[T, V]): def __init__(self, v: V) -> None: pass @overload def __get__(self, inst: None, own: Type[T]) -> 'D[T, V]': pass @overload def __get__(self, inst: T, own: Type[T]) -> V: pass [builtins fixtures/bool.pyi] [case testAccessingGenericDescriptorFromInferredClass] # flags: --strict-optional from typing import Type from d import D class A: f = D(10) # type: D[A, int] g = D('10') # type: D[A, str] def f(some_class: Type[A]): reveal_type(some_class.f) reveal_type(some_class.g) [file d.pyi] from typing import TypeVar, Type, Generic, overload T = TypeVar('T') V = TypeVar('V') class D(Generic[T, V]): def __init__(self, v: V) -> None: pass @overload def __get__(self, inst: None, own: Type[T]) -> 'D[T, V]': pass @overload def __get__(self, inst: T, own: Type[T]) -> V: pass [builtins fixtures/bool.pyi] [out] main:8: error: Revealed type is 'd.D[__main__.A*, builtins.int*]' main:9: error: Revealed type is 'd.D[__main__.A*, builtins.str*]' [case testAccessingGenericDescriptorFromClassBadOverload] # flags: --strict-optional from d import D class A: f = D(10) # type: D[A, int] reveal_type(A.f) [file d.pyi] from typing import TypeVar, Type, Generic, overload T = TypeVar('T') V = TypeVar('V') class D(Generic[T, V]): def __init__(self, v: V) -> None: pass @overload def __get__(self, inst: None, own: None) -> 'D[T, V]': pass @overload def __get__(self, inst: T, own: Type[T]) -> V: pass [builtins fixtures/bool.pyi] [out] main:5: error: Revealed type is 'Any' main:5: error: No overload variant of "__get__" of "D" matches argument types [builtins.None, Type[__main__.A]] [case testAccessingNonDataDescriptorSubclass] from typing import Any class C: def __get__(self, inst: Any, own: Any) -> str: return 's' class D(C): pass class A: f = D() a = A() reveal_type(a.f) # E: Revealed type is 'builtins.str' [case testSettingDataDescriptorSubclass] from typing import Any class C: def __get__(self, inst: Any, own: Any) -> str: return 's' def __set__(self, inst: Any, v: str) -> None: pass class D(C): pass class A: f = D() a = A() a.f = '' a.f = 1 # E: Argument 2 to "__set__" of "C" has incompatible type "int"; expected "str" [case testReadingDescriptorSubclassWithoutDunderGet] from typing import Union, Any class C: def __set__(self, inst: Any, v: str) -> None: pass class D(C): pass class A: f = D() def __init__(self): self.f = 's' a = A() reveal_type(a.f) # E: Revealed type is '__main__.D' [case testAccessingGenericNonDataDescriptorSubclass] from typing import TypeVar, Type, Generic, Any V = TypeVar('V') class C(Generic[V]): def __init__(self, v: V) -> None: self.v = v def __get__(self, inst: Any, own: Type) -> V: return self.v class D(C[V], Generic[V]): pass class A: f = D(10) g = D('10') a = A() reveal_type(a.f) # E: Revealed type is 'builtins.int*' reveal_type(a.g) # E: Revealed type is 'builtins.str*' [case testSettingGenericDataDescriptorSubclass] from typing import TypeVar, Type, Generic T = TypeVar('T') V = TypeVar('V') class C(Generic[T, V]): def __init__(self, v: V) -> None: self.v = v def __get__(self, inst: T, own: Type[T]) -> V: return self.v def __set__(self, inst: T, v: V) -> None: pass class D(C[T, V], Generic[T, V]): pass class A: f = D(10) # type: D[A, int] g = D('10') # type: D[A, str] a = A() a.f = 1 a.f = '' # E: Argument 2 to "__set__" of "C" has incompatible type "str"; expected "int" a.g = '' a.g = 1 # E: Argument 2 to "__set__" of "C" has incompatible type "int"; expected "str" [case testSetDescriptorOnClass] from typing import TypeVar, Type, Generic T = TypeVar('T') V = TypeVar('V') class D(Generic[T, V]): def __init__(self, v: V) -> None: self.v = v def __get__(self, inst: T, own: Type[T]) -> V: return self.v def __set__(self, inst: T, v: V) -> None: pass class A: f = D(10) # type: D[A, int] A.f = D(20) A.f = D('some string') # E: Argument 1 to "D" has incompatible type "str"; expected "int" [case testSetDescriptorOnInferredClass] from typing import TypeVar, Type, Generic, Any V = TypeVar('V') class D(Generic[V]): def __init__(self, v: V) -> None: self.v = v def __get__(self, inst: Any, own: Type) -> V: return self.v def __set__(self, inst: Any, v: V) -> None: pass class A: f = D(10) def f(some_class: Type[A]): A.f = D(20) A.f = D('some string') [out] main:11: error: Argument 1 to "D" has incompatible type "str"; expected "int" [case testDescriptorUncallableDunderSet] class D: __set__ = 's' class A: f = D() A().f = 'x' # E: __main__.D.__set__ is not callable [case testDescriptorDunderSetTooFewArgs] class D: def __set__(self, inst): pass class A: f = D() A().f = 'x' # E: Too many arguments for "__set__" [case testDescriptorDunderSetTooManyArgs] class D: def __set__(self, inst, v, other): pass class A: f = D() A().f = 'x' # E: Too few arguments for "__set__" [case testDescriptorDunderSetWrongArgTypes] class D: def __set__(self, inst: str, v:str) -> None: pass class A: f = D() A().f = 'x' # E: Argument 1 to "__set__" of "D" has incompatible type "A"; expected "str" [case testDescriptorUncallableDunderGet] class D: __get__ = 's' class A: f = D() A().f # E: __main__.D.__get__ is not callable [case testDescriptorDunderGetTooFewArgs] class D: def __get__(self, inst): pass class A: f = D() A().f # E: Too many arguments for "__get__" [case testDescriptorDunderGetTooManyArgs] class D: def __get__(self, inst, own, other): pass class A: f = D() A().f = 'x' # E: Too few arguments for "__get__" [case testDescriptorDunderGetWrongArgTypeForInstance] from typing import Any class D: def __get__(self, inst: str, own: Any) -> Any: pass class A: f = D() A().f # E: Argument 1 to "__get__" of "D" has incompatible type "A"; expected "str" [case testDescriptorDunderGetWrongArgTypeForOwner] from typing import Any class D: def __get__(self, inst: Any, own: str) -> Any: pass class A: f = D() A().f # E: Argument 2 to "__get__" of "D" has incompatible type "Type[A]"; expected "str" [case testDescriptorGetSetDifferentTypes] from typing import Any class D: def __get__(self, inst: Any, own: Any) -> str: return 's' def __set__(self, inst: Any, v: int) -> None: pass class A: f = D() a = A() a.f = 1 reveal_type(a.f) # E: Revealed type is 'builtins.str' -- _promote decorators -- ------------------- [case testSimpleDucktypeDecorator] from typing import _promote class A: pass @_promote(A) class B: pass a = None # type: A b = None # type: B b = a # E: Incompatible types in assignment (expression has type "A", variable has type "B") a = b [case testDucktypeTransitivityDecorator] from typing import _promote class A: pass @_promote(A) class B: pass @_promote(B) class C: pass a = None # type: A c = None # type: C c = a # E: Incompatible types in assignment (expression has type "A", variable has type "C") a = c -- Hard coded type promotions -- -------------------------- [case testHardCodedTypePromotions] import typing def f(x: float) -> None: pass def g(x: complex) -> None: pass f(1) g(1) g(1.1) [builtins fixtures/complex.pyi] -- Operator methods -- ---------------- [case testOperatorMethodOverrideIntroducingOverloading] from foo import * [file foo.pyi] from typing import overload class A: def __add__(self, x: int) -> int: pass class B(A): @overload # E: Signature of "__add__" incompatible with supertype "A" def __add__(self, x: int) -> int: pass @overload def __add__(self, x: str) -> str: pass [out] [case testOperatorMethodOverrideWideningArgumentType] import typing class A: def __add__(self, x: int) -> int: pass class B(A): def __add__(self, x: object) -> int: pass [out] [case testOperatorMethodOverrideNarrowingReturnType] import typing class A: def __add__(self, x: int) -> 'A': pass class B(A): def __add__(self, x: int) -> 'B': pass [case testOperatorMethodOverrideWithDynamicallyTyped] import typing class A: def __add__(self, x: int) -> 'A': pass class B(A): def __add__(self, x): pass [case testOperatorMethodOverrideWithIdenticalOverloadedType] from foo import * [file foo.pyi] from typing import overload class A: @overload def __add__(self, x: int) -> 'A': pass @overload def __add__(self, x: str) -> 'A': pass class B(A): @overload def __add__(self, x: int) -> 'A': pass @overload def __add__(self, x: str) -> 'A': pass [case testOverloadedOperatorMethodOverrideWithDynamicallyTypedMethod] from foo import * [file foo.pyi] from typing import overload, Any class A: @overload def __add__(self, x: int) -> 'A': pass @overload def __add__(self, x: str) -> 'A': pass class B(A): def __add__(self, x): pass class C(A): def __add__(self, x: Any) -> A: pass [case testOverloadedOperatorMethodOverrideWithNewItem] from foo import * [file foo.pyi] from typing import overload, Any class A: @overload def __add__(self, x: int) -> 'A': pass @overload def __add__(self, x: str) -> 'A': pass class B(A): @overload def __add__(self, x: int) -> A: pass @overload def __add__(self, x: str) -> A: pass @overload def __add__(self, x: type) -> A: pass [out] tmp/foo.pyi:8: error: Signature of "__add__" incompatible with supertype "A" [case testOverloadedOperatorMethodOverrideWithSwitchedItemOrder] from foo import * [file foo.pyi] from typing import overload, Any class A: @overload def __add__(self, x: 'B') -> 'B': pass @overload def __add__(self, x: 'A') -> 'A': pass class B(A): @overload def __add__(self, x: 'A') -> 'A': pass @overload def __add__(self, x: 'B') -> 'B': pass [out] tmp/foo.pyi:8: error: Signature of "__add__" incompatible with supertype "A" [case testReverseOperatorMethodArgumentType] from typing import Any class A: pass class B: def __radd__(self, x: A) -> int: pass # Error class C: def __radd__(self, x: A) -> Any: pass class D: def __radd__(self, x: A) -> object: pass [out] [case testReverseOperatorMethodArgumentType2] from typing import Any, Tuple, Callable class A: def __radd__(self, x: Tuple[int, str]) -> int: pass class B: def __radd__(self, x: Callable[[], int]) -> int: pass class C: def __radd__(self, x: Any) -> int: pass [out] [case testReverseOperatorMethodInvalid] from foo import * [file foo.pyi] class A: ... class B: def __rmul__(self) -> A: ... class C: def __radd__(self, other, oops) -> int: ... [out] tmp/foo.pyi:3: error: Invalid signature "def (foo.B) -> foo.A" tmp/foo.pyi:5: error: Invalid signature "def (foo.C, Any, Any) -> builtins.int" [case testReverseOperatorMethodForwardIsAny] from typing import Any def deco(f: Any) -> Any: return f class C: @deco def __add__(self, other: C) -> C: return C() def __radd__(self, other: C) -> C: return C() [out] [case testReverseOperatorMethodForwardIsAny2] from typing import Any def deco(f: Any) -> Any: return f class C: __add__ = None # type: Any def __radd__(self, other: C) -> C: return C() [out] [case testReverseOperatorMethodForwardIsAny3] from typing import Any def deco(f: Any) -> Any: return f class C: __add__ = 42 def __radd__(self, other: C) -> C: return C() [out] main:5: error: Forward operator "__add__" is not callable [case testOverloadedReverseOperatorMethodArgumentType] from foo import * [file foo.pyi] from typing import overload, Any class A: @overload def __radd__(self, x: 'A') -> str: pass # Error @overload def __radd__(self, x: 'A') -> Any: pass [out] [case testReverseOperatorMethodArgumentTypeAndOverloadedMethod] from foo import * [file foo.pyi] from typing import overload class A: @overload def __add__(self, x: int) -> int: pass @overload def __add__(self, x: str) -> int: pass def __radd__(self, x: 'A') -> str: pass [case testAbstractReverseOperatorMethod] import typing from abc import abstractmethod class A: @abstractmethod def __lt__(self, x: 'A') -> int: pass class B: @abstractmethod def __lt__(self, x: 'B') -> int: pass @abstractmethod def __gt__(self, x: 'B') -> int: pass [out] [case testOperatorMethodsAndOverloadingSpecialCase] from foo import * [file foo.pyi] from typing import overload class A: @overload def __add__(self, x: 'A') -> int: pass @overload def __add__(self, x: str) -> int: pass class B: def __radd__(self, x: 'A') -> str: pass [out] [case testUnsafeOverlappingWithOperatorMethodsAndOverloading2] from foo import A, B from foo import * [file foo.pyi] from typing import overload class A: def __add__(self, x: 'A') -> int: pass class B: @overload def __radd__(self, x: 'X') -> str: pass # Error @overload def __radd__(self, x: A) -> str: pass # Error class X: def __add__(self, x): pass [out] tmp/foo.pyi:6: error: Signatures of "__radd__" of "B" and "__add__" of "X" are unsafely overlapping [case testUnsafeOverlappingWithLineNo] from typing import TypeVar class Real: def __add__(self, other): ... class Fraction(Real): def __radd__(self, other: Real) -> Real: ... [out] main:5: error: Signatures of "__radd__" of "Fraction" and "__add__" of "Real" are unsafely overlapping [case testOverlappingNormalAndInplaceOperatorMethod] import typing class A: # Incompatible (potential trouble with __radd__) def __add__(self, x: 'A') -> int: pass def __iadd__(self, x: 'B') -> int: pass class B: # Safe def __add__(self, x: 'C') -> int: pass def __iadd__(self, x: A) -> int: pass class C(A): pass [out] main:5: error: Signatures of "__iadd__" and "__add__" are incompatible [case testOverloadedNormalAndInplaceOperatorMethod] from foo import * [file foo.pyi] from typing import overload class A: @overload def __add__(self, x: int) -> int: pass @overload def __add__(self, x: str) -> int: pass @overload # Error def __iadd__(self, x: int) -> int: pass @overload def __iadd__(self, x: object) -> int: pass class B: @overload def __add__(self, x: int) -> int: pass @overload def __add__(self, x: str) -> str: pass @overload def __iadd__(self, x: int) -> int: pass @overload def __iadd__(self, x: str) -> str: pass [out] tmp/foo.pyi:7: error: Signatures of "__iadd__" and "__add__" are incompatible [case testIntroducingInplaceOperatorInSubclass] import typing class A: def __add__(self, x: 'A') -> 'B': pass class B(A): # __iadd__ effectively partially overrides __add__ def __iadd__(self, x: 'A') -> 'A': pass # Error class C(A): def __iadd__(self, x: int) -> 'B': pass # Error class D(A): def __iadd__(self, x: 'A') -> 'B': pass [out] main:6: error: Return type of "__iadd__" incompatible with "__add__" of supertype "A" main:8: error: Argument 1 of "__iadd__" incompatible with "__add__" of supertype "A" main:8: error: Signatures of "__iadd__" and "__add__" are incompatible [case testGetAttribute] a, b = None, None # type: A, B class A: def __getattribute__(self, x: str) -> A: return A() class B: pass a = a.foo b = a.bar [out] main:9: error: Incompatible types in assignment (expression has type "A", variable has type "B") [case testGetAttributeSignature] class A: def __getattribute__(self, x: str) -> A: pass class B: def __getattribute__(self, x: A) -> B: pass class C: def __getattribute__(self, x: str, y: str) -> C: pass class D: def __getattribute__(self, x: str) -> None: pass [out] main:4: error: Invalid signature "def (__main__.B, __main__.A) -> __main__.B" main:6: error: Invalid signature "def (__main__.C, builtins.str, builtins.str) -> __main__.C" [case testGetAttr] a, b = None, None # type: A, B class A: def __getattr__(self, x: str) -> A: return A() class B: pass a = a.foo b = a.bar [out] main:9: error: Incompatible types in assignment (expression has type "A", variable has type "B") [case testGetAttrSignature] class A: def __getattr__(self, x: str) -> A: pass class B: def __getattr__(self, x: A) -> B: pass class C: def __getattr__(self, x: str, y: str) -> C: pass class D: def __getattr__(self, x: str) -> None: pass [out] main:4: error: Invalid signature "def (__main__.B, __main__.A) -> __main__.B" main:6: error: Invalid signature "def (__main__.C, builtins.str, builtins.str) -> __main__.C" [case testSetAttr] from typing import Union, Any class A: def __setattr__(self, name: str, value: Any) -> None: ... a = A() a.test = 'hello' class B: def __setattr__(self, name: str, value: Union[int, str]) -> None: ... b = B() b.both = 1 b.work = '2' class C: def __setattr__(self, name: str, value: str) -> None: ... c = C() c.fail = 4 # E: Incompatible types in assignment (expression has type "int", variable has type "str") class D: __setattr__ = 'hello' d = D() d.crash = 4 # E: "D" has no attribute "crash" class Ex: def __setattr__(self, name: str, value: int) -> None:... test = '42' # type: str e = Ex() e.test = 'hello' e.t = 4 class Super: def __setattr__(self, name: str, value: int) -> None: ... class Sub(Super): ... s = Sub() s.success = 4 s.fail = 'fail' # E: Incompatible types in assignment (expression has type "str", variable has type "int") [case testSetAttrSignature] class Test: def __setattr__() -> None: ... # E: Method must have at least one argument # E: Invalid signature "def ()" t = Test() t.crash = 'test' # E: "Test" has no attribute "crash" class A: def __setattr__(self): ... # E: Invalid signature "def (self: Any) -> Any" a = A() a.test = 4 # E: "A" has no attribute "test" class B: def __setattr__(self, name, value: int): ... b = B() b.integer = 5 class C: def __setattr__(self, name: int, value: int) -> None: ... # E: Invalid signature "def (__main__.C, builtins.int, builtins.int)" c = C() c.check = 13 [case testGetAttrAndSetattr] from typing import Any class A: def __setattr__(self, name: str, value: Any) -> None: ... def __getattr__(self, name: str) -> Any: ... a = A() a.test = 4 t = a.test class B: def __setattr__(self, name: str, value: int) -> None: ... def __getattr__(self, name: str) -> str: ... integer = 0 b = B() b.at = '3' # E: Incompatible types in assignment (expression has type "str", variable has type "int") integer = b.at # E: Incompatible types in assignment (expression has type "str", variable has type "int") -- CallableType objects -- ---------------- [case testCallableObject] import typing a = A() b = B() a() # E: Too few arguments for "__call__" of "A" a(a, a) # E: Too many arguments for "__call__" of "A" a = a(a) a = a(b) # E: Argument 1 to "__call__" of "A" has incompatible type "B"; expected "A" b = a(a) # E: Incompatible types in assignment (expression has type "A", variable has type "B") class A: def __call__(self, x: A) -> A: pass class B: pass -- __new__ -- -------- [case testConstructInstanceWith__new__] class C: def __new__(cls, foo: int = None) -> 'C': obj = object.__new__(cls) return obj x = C(foo=12) x.a # E: "C" has no attribute "a" C(foo='') # E: Argument 1 to "C" has incompatible type "str"; expected "int" [builtins fixtures/__new__.pyi] [case testConstructInstanceWithDynamicallyTyped__new__] class C: def __new__(cls, foo): obj = object.__new__(cls) return obj x = C(foo=12) x = C(foo='x') x.a # E: "C" has no attribute "a" C(bar='') # E: Unexpected keyword argument "bar" for "C" [builtins fixtures/__new__.pyi] [case testClassWith__new__AndCompatibilityWithType] class C: def __new__(cls, foo: int = None) -> 'C': obj = object.__new__(cls) return obj def f(x: type) -> None: pass def g(x: int) -> None: pass f(C) g(C) # E: Argument 1 to "g" has incompatible type "Type[C]"; expected "int" [builtins fixtures/__new__.pyi] [case testClassWith__new__AndCompatibilityWithType2] class C: def __new__(cls, foo): obj = object.__new__(cls) return obj def f(x: type) -> None: pass def g(x: int) -> None: pass f(C) g(C) # E: Argument 1 to "g" has incompatible type "Type[C]"; expected "int" [builtins fixtures/__new__.pyi] [case testGenericClassWith__new__] from typing import TypeVar, Generic T = TypeVar('T') class C(Generic[T]): def __new__(cls, foo: T) -> 'C[T]': obj = object.__new__(cls) return obj def set(self, x: T) -> None: pass c = C('') c.set('') c.set(1) # E: Argument 1 to "set" of "C" has incompatible type "int"; expected "str" [builtins fixtures/__new__.pyi] [case testOverloaded__new__] from foo import * [file foo.pyi] from typing import overload class C: @overload def __new__(cls, foo: int) -> 'C': obj = object.__new__(cls) return obj @overload def __new__(cls, x: str, y: str) -> 'C': obj = object.__new__(cls) return obj c = C(1) c.a # E: "C" has no attribute "a" C('', '') C('') # E: No overload variant of "C" matches argument types [builtins.str] [builtins fixtures/__new__.pyi] -- Special cases -- ------------- [case testSubclassInt] import typing class A(int): pass n = 0 n = A() a = A() a = 0 # E: Incompatible types in assignment (expression has type "int", variable has type "A") [case testForwardReferenceToNestedClass] def f(o: 'B.C') -> None: o.f('') # E: Argument 1 to "f" of "C" has incompatible type "str"; expected "int" class B: class C: def f(self, x: int) -> None: pass [out] [case testForwardReferenceToNestedClassDeep] def f(o: 'B.C.D') -> None: o.f('') # E: Argument 1 to "f" of "D" has incompatible type "str"; expected "int" class B: class C: class D: def f(self, x: int) -> None: pass [out] [case testForwardReferenceToNestedClassWithinClass] class B: def f(self, o: 'C.D') -> None: o.f('') # E: Argument 1 to "f" of "D" has incompatible type "str"; expected "int" class C: class D: def f(self, x: int) -> None: pass [out] [case testClassVsInstanceDisambiguation] class A: pass def f(x: A) -> None: pass f(A) # E: Argument 1 to "f" has incompatible type "Type[A]"; expected "A" [out] -- TODO -- attribute inherited from superclass; assign in __init__ -- refer to attribute before type has been inferred (the initialization in -- __init__ has not been analyzed) [case testAnyBaseClassUnconstrainedConstructor] from typing import Any B = None # type: Any class C(B): pass C(0) C(arg=0) [out] [case testErrorMapToSupertype] import typing class X(Nope): pass # E: Name 'Nope' is not defined a, b = X() # Used to crash here (#2244) -- Class-valued attributes -- ----------------------- [case testClassValuedAttributesBasics] class A: ... class B: a = A bad = lambda: 42 B().bad() # E: Attribute function "bad" with type "Callable[[], int]" does not accept self argument reveal_type(B.a) # E: Revealed type is 'def () -> __main__.A' reveal_type(B().a) # E: Revealed type is 'def () -> __main__.A' reveal_type(B().a()) # E: Revealed type is '__main__.A' class C: a = A def __init__(self) -> None: self.aa = self.a() reveal_type(C().aa) # E: Revealed type is '__main__.A' [out] [case testClassValuedAttributesGeneric] from typing import Generic, TypeVar, Type T = TypeVar('T') class A(Generic[T]): def __init__(self, x: T) -> None: self.x = x class B(Generic[T]): a: Type[A[T]] = A reveal_type(B[int]().a) # E: Revealed type is 'Type[__main__.A[builtins.int*]]' B[int]().a('hi') # E: Argument 1 to "A" has incompatible type "str"; expected "int" class C(Generic[T]): a = A def __init__(self) -> None: self.aa = self.a(42) reveal_type(C().aa) # E: Revealed type is '__main__.A[builtins.int]' [out] [case testClassValuedAttributesAlias] from typing import Generic, TypeVar T = TypeVar('T') S = TypeVar('S') class A(Generic[T, S]): ... SameA = A[T, T] class B: a_any = SameA a_int = SameA[int] reveal_type(B().a_any) # E: Revealed type is 'def () -> __main__.A[Any, Any]' reveal_type(B().a_int()) # E: Revealed type is '__main__.A[builtins.int*, builtins.int*]' class C: a_int = SameA[int] def __init__(self) -> None: self.aa = self.a_int() reveal_type(C().aa) # E: Revealed type is '__main__.A[builtins.int*, builtins.int*]' [out] -- Type[C] -- ------- [case testTypeUsingTypeCBasic] from typing import Type class User: pass class ProUser(User): pass def new_user(user_class: Type[User]) -> User: return user_class() reveal_type(new_user(User)) # E: Revealed type is '__main__.User' reveal_type(new_user(ProUser)) # E: Revealed type is '__main__.User' [out] [case testTypeUsingTypeCDefaultInit] from typing import Type class B: pass def f(A: Type[B]) -> None: A(0) # E: Too many arguments for "B" A() [out] [case testTypeUsingTypeCInitWithArg] from typing import Type class B: def __init__(self, a: int) -> None: pass def f(A: Type[B]) -> None: A(0) A() # E: Too few arguments for "B" [out] [case testTypeUsingTypeCTypeVar] from typing import Type, TypeVar class User: pass class ProUser(User): pass U = TypeVar('U', bound=User) def new_user(user_class: Type[U]) -> U: user = user_class() reveal_type(user) return user pro_user = new_user(ProUser) reveal_type(pro_user) [out] main:7: error: Revealed type is 'U`-1' main:10: error: Revealed type is '__main__.ProUser*' [case testTypeUsingTypeCTypeVarDefaultInit] from typing import Type, TypeVar class B: pass T = TypeVar('T', bound=B) def f(A: Type[T]) -> None: A() A(0) # E: Too many arguments for "B" [out] [case testTypeUsingTypeCTypeVarWithInit] from typing import Type, TypeVar class B: def __init__(self, a: int) -> None: pass T = TypeVar('T', bound=B) def f(A: Type[T]) -> None: A() # E: Too few arguments for "B" A(0) [out] [case testTypeUsingTypeCTwoTypeVars] from typing import Type, TypeVar class User: pass class ProUser(User): pass class WizUser(ProUser): pass U = TypeVar('U', bound=User) def new_user(u_c: Type[U]) -> U: pass P = TypeVar('P', bound=ProUser) def new_pro(pro_c: Type[P]) -> P: return new_user(pro_c) wiz = new_pro(WizUser) reveal_type(wiz) def error(u_c: Type[U]) -> P: return new_pro(u_c) # Error here, see below [out] main:11: error: Revealed type is '__main__.WizUser*' main:13: error: Incompatible return value type (got "U", expected "P") main:13: error: Value of type variable "P" of "new_pro" cannot be "U" [case testTypeUsingTypeCCovariance] from typing import Type, TypeVar class User: pass class ProUser(User): pass def new_user(user_class: Type[User]) -> User: return user_class() def new_pro_user(user_class: Type[ProUser]): new_user(user_class) [out] [case testAllowCovariantArgsInConstructor] from typing import Generic, TypeVar T_co = TypeVar('T_co', covariant=True) class C(Generic[T_co]): def __init__(self, x: T_co) -> None: # This should be allowed self.x = x def meth(self) -> None: reveal_type(self.x) # E: Revealed type is 'T_co`1' reveal_type(C(1).x) # E: Revealed type is 'builtins.int*' [builtins fixtures/property.pyi] [out] [case testTypeUsingTypeCErrorCovariance] from typing import Type, TypeVar class User: pass def new_user(user_class: Type[User]): return user_class() def foo(arg: Type[int]): new_user(arg) # E: Argument 1 to "new_user" has incompatible type "Type[int]"; expected "Type[User]" [out] [case testTypeUsingTypeCUnionOverload] from foo import * [file foo.pyi] from typing import Type, Union, overload class X: @overload def __init__(self) -> None: pass @overload def __init__(self, a: int) -> None: pass class Y: def __init__(self) -> None: pass def bar(o: Type[Union[X, Y]]): pass bar(X) bar(Y) [out] [case testTypeUsingTypeCTypeAny] from typing import Type, Any def foo(arg: Type[Any]): x = arg() x = arg(0) x = arg('', ()) reveal_type(x) # E: Revealed type is 'Any' x.foo class X: pass foo(X) [out] [case testTypeUsingTypeCTypeAnyMember] from typing import Type, Any def foo(arg: Type[Any]): x = arg.member_name arg.new_member_name = 42 # Member access is ok and types as Any reveal_type(x) # E: Revealed type is 'Any' # But Type[Any] is distinct from Any y: int = arg # E: Incompatible types in assignment (expression has type "Type[Any]", variable has type "int") [out] [case testTypeUsingTypeCTypeAnyMemberFallback] from typing import Type, Any def foo(arg: Type[Any]): reveal_type(arg.__str__) # E: Revealed type is 'def () -> builtins.str' reveal_type(arg.mro()) # E: Revealed type is 'builtins.list[builtins.type]' [builtins fixtures/type.pyi] [out] [case testTypeUsingTypeCTypeNoArg] from typing import Type def foo(arg: Type): x = arg() reveal_type(x) # E: Revealed type is 'Any' class X: pass foo(X) [out] [case testTypeUsingTypeCBuiltinType] from typing import Type def foo(arg: type): pass class X: pass def bar(arg: Type[X]): foo(arg) foo(X) [builtins fixtures/tuple.pyi] [out] [case testTypeUsingTypeCClassMethod] from typing import Type class User: @classmethod def foo(cls) -> int: pass def bar(self) -> int: pass def process(cls: Type[User]): reveal_type(cls.foo()) # E: Revealed type is 'builtins.int' obj = cls() reveal_type(cls.bar(obj)) # E: Revealed type is 'builtins.int' cls.mro() # Defined in class type cls.error # E: "Type[User]" has no attribute "error" [builtins fixtures/classmethod.pyi] [out] [case testTypeUsingTypeCClassMethodUnion] from typing import Type, Union class User: @classmethod def foo(cls) -> int: pass def bar(self) -> int: pass class ProUser(User): pass class BasicUser(User): pass def process(cls: Type[Union[BasicUser, ProUser]]): cls.foo() obj = cls() cls.bar(obj) cls.mro() # Defined in class type cls.error # E: Item "type" of "Union[Type[BasicUser], Type[ProUser]]" has no attribute "error" [builtins fixtures/classmethod.pyi] [out] [case testTypeUsingTypeCClassMethodFromTypeVar] from typing import Type, TypeVar class User: @classmethod def foo(cls) -> int: pass def bar(self) -> int: pass U = TypeVar('U', bound=User) def process(cls: Type[U]): reveal_type(cls.foo()) # E: Revealed type is 'builtins.int' obj = cls() reveal_type(cls.bar(obj)) # E: Revealed type is 'builtins.int' cls.mro() # Defined in class type cls.error # E: "Type[U]" has no attribute "error" [builtins fixtures/classmethod.pyi] [out] [case testTypeUsingTypeCClassMethodFromTypeVarUnionBound] # Ideally this would work, but not worth the effort; just don't crash from typing import Type, TypeVar, Union class User: @classmethod def foo(cls) -> int: pass def bar(self) -> int: pass class ProUser(User): pass class BasicUser(User): pass U = TypeVar('U', bound=Union[ProUser, BasicUser]) def process(cls: Type[U]): cls.foo() # E: "Type[U]" has no attribute "foo" obj = cls() cls.bar(obj) # E: "Type[U]" has no attribute "bar" cls.mro() # Defined in class type cls.error # E: "Type[U]" has no attribute "error" [builtins fixtures/classmethod.pyi] [out] [case testTypeUsingTypeCErrorUnsupportedType] from typing import Type, Tuple def foo(arg: Type[Tuple[int]]): # E: Unsupported type Type["Tuple[int]"] arg() [builtins fixtures/tuple.pyi] [out] [case testTypeUsingTypeCOverloadedClass] from foo import * [file foo.pyi] from typing import Type, TypeVar, overload class User: @overload def __init__(self) -> None: pass @overload def __init__(self, arg: int) -> None: pass @classmethod def foo(cls) -> None: pass U = TypeVar('U', bound=User) def new(uc: Type[U]) -> U: uc.foo() u = uc() u.foo() u = uc(0) u.foo() u = uc('') u.foo(0) return uc() u = new(User) [builtins fixtures/classmethod.pyi] [out] tmp/foo.pyi:16: error: No overload variant of "User" matches argument types [builtins.str] tmp/foo.pyi:17: error: Too many arguments for "foo" of "User" [case testTypeUsingTypeCInUpperBound] from typing import TypeVar, Type class B: pass T = TypeVar('T', bound=Type[B]) def f(a: T): pass [out] [case testTypeUsingTypeCTuple] from typing import Type, Tuple def f(a: Type[Tuple[int, int]]): a() [out] main:2: error: Unsupported type Type["Tuple[int, int]"] [case testTypeUsingTypeCNamedTuple] from typing import Type, NamedTuple N = NamedTuple('N', [('x', int), ('y', int)]) def f(a: Type[N]): a() [builtins fixtures/list.pyi] [out] main:3: error: Unsupported type Type["N"] [case testTypeUsingTypeCJoin] from typing import Type class B: pass class C(B): pass class D(B): pass def foo(c: Type[C], d: Type[D]) -> None: x = [c, d] reveal_type(x) [builtins fixtures/list.pyi] [out] main:7: error: Revealed type is 'builtins.list[Type[__main__.B]]' [case testTypeEquivalentTypeAny] from typing import Type, Any a = None # type: Type[Any] b = a # type: type x = None # type: type y = x # type: Type[Any] class C: ... p = None # type: type q = p # type: Type[C] [builtins fixtures/list.pyi] [out] [case testTypeEquivalentTypeAny2] from typing import Type, Any, TypeVar, Generic class C: ... x = None # type: type y = None # type: Type[Any] z = None # type: Type[C] lst = [x, y, z] reveal_type(lst) # E: Revealed type is 'builtins.list[builtins.type*]' T1 = TypeVar('T1', bound=type) T2 = TypeVar('T2', bound=Type[Any]) class C1(Generic[T1]): ... class C2(Generic[T2]): ... C1[Type[Any]], C2[type] # both these should not fail [builtins fixtures/list.pyi] [out] [case testTypeEquivalentTypeAnyEdgeCase] class C: pass class M(type): def __init__(cls, x) -> None: type.__init__(cls, x) class Mbad(type): def __init__(cls, x) -> None: type.__init__(C(), x) # E: Argument 1 to "__init__" of "type" has incompatible type "C"; expected "type" [builtins fixtures/primitives.pyi] [out] [case testTypeMatchesOverloadedFunctions] from foo import * [file foo.pyi] from typing import Type, overload, Union class User: pass UserType = User # type: Type[User] @overload def f(a: object) -> int: pass @overload def f(a: int) -> str: pass reveal_type(f(User)) # E: Revealed type is 'builtins.int' reveal_type(f(UserType)) # E: Revealed type is 'builtins.int' [builtins fixtures/classmethod.pyi] [out] [case testTypeMatchesGeneralTypeInOverloadedFunctions] from foo import * [file foo.pyi] from typing import Type, overload class User: pass UserType = User # type: Type[User] @overload def f(a: type) -> int: return 1 @overload def f(a: int) -> str: return "a" reveal_type(f(User)) # E: Revealed type is 'builtins.int' reveal_type(f(UserType)) # E: Revealed type is 'builtins.int' reveal_type(f(1)) # E: Revealed type is 'builtins.str' [builtins fixtures/classmethod.pyi] [out] [case testTypeMatchesSpecificTypeInOverloadedFunctions] from foo import * [file foo.pyi] from typing import Type, overload class User: pass UserType = User # type: Type[User] @overload def f(a: User) -> User: return User() @overload def f(a: Type[User]) -> int: return 1 @overload def f(a: int) -> str: return "a" reveal_type(f(User)) # E: Revealed type is 'builtins.int' reveal_type(f(UserType)) # E: Revealed type is 'builtins.int' reveal_type(f(User())) # E: Revealed type is 'foo.User' reveal_type(f(1)) # E: Revealed type is 'builtins.str' [builtins fixtures/classmethod.pyi] [out] [case testMixingTypeTypeInOverloadedFunctions] from foo import * [file foo.pyi] from typing import Type, overload class User: pass @overload def f(a: User) -> Type[User]: return User @overload def f(a: Type[User]) -> User: return a() @overload def f(a: int) -> Type[User]: return User @overload def f(a: str) -> User: return User() reveal_type(f(User())) # E: Revealed type is 'Type[foo.User]' reveal_type(f(User)) # E: Revealed type is 'foo.User' reveal_type(f(3)) # E: Revealed type is 'Type[foo.User]' reveal_type(f("hi")) # E: Revealed type is 'foo.User' [builtins fixtures/classmethod.pyi] [out] [case testGeneralTypeDoesNotMatchSpecificTypeInOverloadedFunctions] from foo import * [file foo.pyi] from typing import Type, overload class User: pass @overload def f(a: Type[User]) -> None: pass @overload def f(a: int) -> None: pass def mock() -> type: return User f(User) f(mock()) # E: No overload variant of "f" matches argument types [builtins.type] [builtins fixtures/classmethod.pyi] [out] [case testNonTypeDoesNotMatchOverloadedFunctions] from foo import * [file foo.pyi] from typing import Type, overload class User: pass @overload def f(a: Type[User]) -> None: pass @overload def f(a: type) -> None: pass f(3) # E: No overload variant of "f" matches argument types [builtins.int] [builtins fixtures/classmethod.pyi] [out] [case testInstancesDoNotMatchTypeInOverloadedFunctions] from foo import * [file foo.pyi] from typing import Type, overload class User: pass @overload def f(a: Type[User]) -> None: pass @overload def f(a: int) -> None: pass f(User) f(User()) # E: No overload variant of "f" matches argument types [foo.User] [builtins fixtures/classmethod.pyi] [out] [case testTypeCovarianceWithOverloadedFunctions] from foo import * [file foo.pyi] from typing import Type, overload class A: pass class B(A): pass class C(B): pass AType = A # type: Type[A] BType = B # type: Type[B] CType = C # type: Type[C] @overload def f(a: Type[B]) -> None: pass @overload def f(a: int) -> None: pass f(A) # E: No overload variant of "f" matches argument types [def () -> foo.A] f(B) f(C) f(AType) # E: No overload variant of "f" matches argument types [Type[foo.A]] f(BType) f(CType) [builtins fixtures/classmethod.pyi] [out] [case testOverloadedCovariantTypesFail] from foo import * [file foo.pyi] from typing import Type, overload class A: pass class B(A): pass @overload def f(a: Type[B]) -> int: pass # E: Overloaded function signatures 1 and 2 overlap with incompatible return types @overload def f(a: Type[A]) -> str: pass [builtins fixtures/classmethod.pyi] [out] [case testDistinctOverloadedCovariantTypesSucceed] from foo import * [file foo.pyi] from typing import Type, overload class A: pass class AChild(A): pass class B: pass class BChild(B): pass @overload def f(a: Type[A]) -> int: pass @overload def f(a: Type[B]) -> str: pass @overload def f(a: A) -> A: pass @overload def f(a: B) -> B: pass reveal_type(f(A)) # E: Revealed type is 'builtins.int' reveal_type(f(AChild)) # E: Revealed type is 'builtins.int' reveal_type(f(B)) # E: Revealed type is 'builtins.str' reveal_type(f(BChild)) # E: Revealed type is 'builtins.str' reveal_type(f(A())) # E: Revealed type is 'foo.A' reveal_type(f(AChild())) # E: Revealed type is 'foo.A' reveal_type(f(B())) # E: Revealed type is 'foo.B' reveal_type(f(BChild())) # E: Revealed type is 'foo.B' [builtins fixtures/classmethod.pyi] [out] [case testTypeTypeOverlapsWithObjectAndType] from foo import * [file foo.pyi] from typing import Type, overload class User: pass @overload def f(a: Type[User]) -> int: pass # E: Overloaded function signatures 1 and 2 overlap with incompatible return types @overload def f(a: object) -> str: pass @overload def g(a: Type[User]) -> int: pass # E: Overloaded function signatures 1 and 2 overlap with incompatible return types @overload def g(a: type) -> str: pass [builtins fixtures/classmethod.pyi] [out] [case testTypeOverlapsWithObject] from foo import * [file foo.pyi] from typing import Type, overload class User: pass @overload def f(a: type) -> int: pass # E: Overloaded function signatures 1 and 2 overlap with incompatible return types @overload def f(a: object) -> str: pass [builtins fixtures/classmethod.pyi] [out] [case testTypeConstructorReturnsTypeType] class User: @classmethod def test_class_method(cls) -> int: pass @staticmethod def test_static_method() -> str: pass def test_instance_method(self) -> None: pass u = User() reveal_type(type(u)) # E: Revealed type is 'Type[__main__.User]' reveal_type(type(u).test_class_method()) # E: Revealed type is 'builtins.int' reveal_type(type(u).test_static_method()) # E: Revealed type is 'builtins.str' type(u).test_instance_method() # E: Too few arguments for "test_instance_method" of "User" [builtins fixtures/classmethod.pyi] [out] [case testObfuscatedTypeConstructorReturnsTypeType] from typing import TypeVar class User: pass f1 = type A = TypeVar('A') def f2(func: A) -> A: return func u = User() reveal_type(f1(u)) # E: Revealed type is 'Type[__main__.User]' reveal_type(f2(type)(u)) # E: Revealed type is 'Type[__main__.User]' [builtins fixtures/classmethod.pyi] [out] [case testTypeConstructorLookalikeFails] class User: pass def fake1(a: object) -> type: return User def fake2(a: int) -> type: return User reveal_type(type(User())) # E: Revealed type is 'Type[__main__.User]' reveal_type(fake1(User())) # E: Revealed type is 'builtins.type' reveal_type(fake2(3)) # E: Revealed type is 'builtins.type' [builtins fixtures/classmethod.pyi] [out] [case testOtherTypeConstructorsSucceed] def foo(self) -> int: return self.attr User = type('User', (object,), {'foo': foo, 'attr': 3}) reveal_type(User) # E: Revealed type is 'builtins.type' [builtins fixtures/args.pyi] [out] [case testTypeTypeComparisonWorks] class User: pass User == User User == type(User()) type(User()) == User type(User()) == type(User()) User != User User != type(User()) type(User()) != User type(User()) != type(User()) int == int int == type(3) type(3) == int type(3) == type(3) int != int int != type(3) type(3) != int type(3) != type(3) User is User User is type(User) type(User) is User type(User) is type(User) int is int int is type(3) type(3) is int type(3) is type(3) int.__eq__(int) int.__eq__(3, 4) [builtins fixtures/args.pyi] [out] main:33: error: Too few arguments for "__eq__" of "int" main:33: error: Unsupported operand types for == ("int" and "Type[int]") [case testMroSetAfterError] class C(str, str): foo = 0 bar = foo [out] main:1: error: Duplicate base class "str" [case testCannotDetermineMro] class A: pass class B(A): pass class C(B): pass class D(A, B): pass # E: Cannot determine consistent method resolution order (MRO) for "D" class E(C, D): pass # E: Cannot determine consistent method resolution order (MRO) for "E" [case testInconsistentMroLocalRef] class A: pass class B(object, A): # E: Cannot determine consistent method resolution order (MRO) for "B" def readlines(self): pass __iter__ = readlines [case testDynamicMetaclass] class C(metaclass=int()): # E: Dynamic metaclass not supported for 'C' pass [case testDynamicMetaclassCrash] class C(metaclass=int().x): # E: Dynamic metaclass not supported for 'C' pass [case testVariableSubclass] class A: a = 1 # type: int class B(A): a = 1 [out] [case testVariableSubclassAssignMismatch] class A: a = 1 # type: int class B(A): a = "a" [out] main:4: error: Incompatible types in assignment (expression has type "str", base class "A" defined the type as "int") [case testVariableSubclassAssignment] class A: a = None # type: int class B(A): def __init__(self) -> None: self.a = "a" [out] main:5: error: Incompatible types in assignment (expression has type "str", variable has type "int") [case testVariableSubclassTypeOverwrite] class A: a = None # type: int class B(A): a = None # type: str class C(B): a = "a" [out] main:4: error: Incompatible types in assignment (expression has type "str", base class "A" defined the type as "int") [case testVariableSubclassTypeOverwriteImplicit] class A: a = 1 class B(A): a = None # type: str [out] main:4: error: Incompatible types in assignment (expression has type "str", base class "A" defined the type as "int") [case testVariableSuperUsage] class A: a = [] # type: list class B(A): a = [1, 2] class C(B): a = B.a + [3] [builtins fixtures/list.pyi] [out] [case testClassAllBases] from typing import Union class A: a = None # type: Union[int, str] class B(A): a = 1 class C(B): a = "str" class D(A): a = "str" [out] main:7: error: Incompatible types in assignment (expression has type "str", base class "B" defined the type as "int") [case testVariableTypeVar] from typing import TypeVar, Generic T = TypeVar('T') class A(Generic[T]): a = None # type: T class B(A[int]): a = 1 [case testVariableTypeVarInvalid] from typing import TypeVar, Generic T = TypeVar('T') class A(Generic[T]): a = None # type: T class B(A[int]): a = "abc" [out] main:6: error: Incompatible types in assignment (expression has type "str", base class "A" defined the type as "int") [case testVariableTypeVarIndirectly] from typing import TypeVar, Generic T = TypeVar('T') class A(Generic[T]): a = None # type: T class B(A[int]): pass class C(B): a = "a" [out] main:8: error: Incompatible types in assignment (expression has type "str", base class "A" defined the type as "int") [case testVariableTypeVarList] from typing import List, TypeVar, Generic T = TypeVar('T') class A(Generic[T]): a = None # type: List[T] b = None # type: List[T] class B(A[int]): a = [1] b = [''] [builtins fixtures/list.pyi] [out] main:8: error: List item 0 has incompatible type "str"; expected "int" [case testVariableMethod] class A: def a(self) -> None: pass b = 1 class B(A): a = 1 def b(self) -> None: pass [out] main:5: error: Incompatible types in assignment (expression has type "int", base class "A" defined the type as "Callable[[A], None]") main:6: error: Signature of "b" incompatible with supertype "A" [case testVariableProperty] class A: @property def a(self) -> bool: pass class B(A): a = None # type: bool class C(A): a = True class D(A): a = 1 [builtins fixtures/property.pyi] [out] main:9: error: Incompatible types in assignment (expression has type "int", base class "A" defined the type as "bool") [case testVariableOverwriteAny] from typing import Any class A: a = 1 class B(A): a = 'x' # type: Any [out] [case testInstanceMethodOverwrite] class B(): def n(self, a: int) -> None: pass class C(B): def m(self, a: int) -> None: pass n = m [out] [case testInstanceMethodOverwriteError] class B(): def n(self, a: int) -> None: pass class C(B): def m(self, a: str) -> None: pass n = m [out] main:5: error: Incompatible types in assignment (expression has type "Callable[[str], None]", base class "B" defined the type as "Callable[[int], None]") [case testInstanceMethodOverwriteTypevar] from typing import Generic, TypeVar T = TypeVar("T") class B(Generic[T]): def n(self, a: T) -> None: pass class C(B[int]): def m(self, a: int) -> None: pass n = m [case testInstanceMethodOverwriteTwice] class I: def foo(self) -> None: pass class A(I): def foo(self) -> None: pass class B(A): def bar(self) -> None: pass foo = bar class C(B): def bar(self) -> None: pass foo = bar [case testClassMethodOverwrite] class B(): @classmethod def n(self, a: int) -> None: pass class C(B): @classmethod def m(self, a: int) -> None: pass n = m [builtins fixtures/classmethod.pyi] [out] [case testClassMethodOverwriteError] class B(): @classmethod def n(self, a: int) -> None: pass class C(B): @classmethod def m(self, a: str) -> None: pass n = m [builtins fixtures/classmethod.pyi] [out] main:7: error: Incompatible types in assignment (expression has type "Callable[[str], None]", base class "B" defined the type as "Callable[[int], None]") [case testClassSpec] from typing import Callable class A(): b = None # type: Callable[[A, int], int] class B(A): def c(self, a: int) -> int: pass b = c [case testClassSpecError] from typing import Callable class A(): b = None # type: Callable[[A, int], int] class B(A): def c(self, a: str) -> int: pass b = c [out] main:6: error: Incompatible types in assignment (expression has type "Callable[[str], int]", base class "A" defined the type as "Callable[[int], int]") [case testClassStaticMethod] class A(): @staticmethod def a(a: int) -> None: pass class B(A): @staticmethod def b(a: str) -> None: pass a = b [builtins fixtures/staticmethod.pyi] [out] main:7: error: Incompatible types in assignment (expression has type "Callable[[str], None]", base class "A" defined the type as "Callable[[int], None]") [case testClassStaticMethodIndirect] class A(): @staticmethod def a(a: int) -> None: pass c = a class B(A): @staticmethod def b(a: str) -> None: pass c = b [builtins fixtures/staticmethod.pyi] [out] main:8: error: Incompatible types in assignment (expression has type "Callable[[str], None]", base class "A" defined the type as "Callable[[int], None]") [case testClassStaticMethodSubclassing] class A: @staticmethod def a() -> None: pass def b(self) -> None: pass @staticmethod def c() -> None: pass class B(A): def a(self) -> None: pass # E: Signature of "a" incompatible with supertype "A" @classmethod def b(cls) -> None: pass @staticmethod def c() -> None: pass [builtins fixtures/classmethod.pyi] [case testTempNode] class A(): def a(self) -> None: pass class B(A): def b(self) -> None: pass a = c = b [case testListObject] from typing import List class A: x = [] # type: List[object] class B(A): x = [1] [builtins fixtures/list.pyi] [case testClassMemberObject] class A: x = object() class B(A): x = 1 class C(B): x = '' [out] main:6: error: Incompatible types in assignment (expression has type "str", base class "B" defined the type as "int") [case testSlots] class A: __slots__ = ("a") class B(A): __slots__ = ("a", "b") [case testClassOrderOfError] class A: x = 1 class B(A): x = "a" class C(B): x = object() [out] main:4: error: Incompatible types in assignment (expression has type "str", base class "A" defined the type as "int") main:6: error: Incompatible types in assignment (expression has type "object", base class "B" defined the type as "str") [case testClassOneErrorPerLine] class A: x = 1 class B(A): x = "" x = 1.0 [out] main:4: error: Incompatible types in assignment (expression has type "str", base class "A" defined the type as "int") main:5: error: Incompatible types in assignment (expression has type "str", base class "A" defined the type as "int") [case testClassIgnoreType] class A: x = 0 class B(A): x = '' # type: ignore class C(B): x = '' [out] [case testInvalidMetaclassStructure] class X(type): pass class Y(type): pass class A(metaclass=X): pass class B(A, metaclass=Y): pass # E: Inconsistent metaclass structure for 'B' [case testMetaclassNoTypeReveal] class M: x = 0 # type: int class A(metaclass=M): pass # E: Metaclasses not inheriting from 'type' are not supported A.x # E: "Type[A]" has no attribute "x" [case testMetaclassTypeReveal] from typing import Type class M(type): x = 0 # type: int class A(metaclass=M): pass def f(TA: Type[A]): reveal_type(TA) # E: Revealed type is 'Type[__main__.A]' reveal_type(TA.x) # E: Revealed type is 'builtins.int' [case testSubclassMetaclass] class M1(type): x = 0 class M2(M1): pass class C(metaclass=M2): pass reveal_type(C.x) # E: Revealed type is 'builtins.int' [case testMetaclassSubclass] from typing import Type class M(type): x = 0 # type: int class A(metaclass=M): pass class B(A): pass def f(TB: Type[B]): reveal_type(TB) # E: Revealed type is 'Type[__main__.B]' reveal_type(TB.x) # E: Revealed type is 'builtins.int' [case testMetaclassIterable] from typing import Iterable, Iterator class ImplicitMeta(type): def __iter__(self) -> Iterator[int]: yield 1 class Implicit(metaclass=ImplicitMeta): pass for _ in Implicit: pass reveal_type(list(Implicit)) # E: Revealed type is 'builtins.list[builtins.int*]' class ExplicitMeta(type, Iterable[int]): def __iter__(self) -> Iterator[int]: yield 1 class Explicit(metaclass=ExplicitMeta): pass for _ in Explicit: pass reveal_type(list(Explicit)) # E: Revealed type is 'builtins.list[builtins.int*]' [builtins fixtures/list.pyi] [case testMetaclassTuple] from typing import Tuple class M(Tuple[int]): pass class C(metaclass=M): pass # E: Invalid metaclass 'M' [builtins fixtures/tuple.pyi] [case testMetaclassOperatorBeforeReversed] class X: def __radd__(self, x: int) -> int: ... class Meta(type): def __add__(cls, x: X) -> str: ... class Concrete(metaclass=Meta): pass reveal_type(Concrete + X()) # E: Revealed type is 'builtins.str' Concrete + "hello" # E: Unsupported operand types for + ("Type[Concrete]" and "str") [case testMetaclassGetitem] class M(type): def __getitem__(self, key) -> int: return 1 class A(metaclass=M): pass reveal_type(A[M]) # E: Revealed type is 'builtins.int' [case testMetaclassSelfType] from typing import TypeVar, Type class M(type): pass T = TypeVar('T') class M1(M): def foo(cls: Type[T]) -> T: ... class A(metaclass=M1): pass reveal_type(A.foo()) # E: Revealed type is '__main__.A*' [case testMetaclassAndSkippedImport] # flags: --ignore-missing-imports from missing import M class A(metaclass=M): y = 0 reveal_type(A.y) # E: Revealed type is 'builtins.int' A.x # E: "Type[A]" has no attribute "x" [case testAnyMetaclass] from typing import Any M = None # type: Any class A(metaclass=M): y = 0 reveal_type(A.y) # E: Revealed type is 'builtins.int' A.x # E: "Type[A]" has no attribute "x" [case testInvalidVariableAsMetaclass] from typing import Any M = 0 # type: int MM = 0 class A(metaclass=M): # E: Invalid metaclass 'M' y = 0 class B(metaclass=MM): # E: Invalid metaclass 'MM' y = 0 reveal_type(A.y) # E: Revealed type is 'builtins.int' A.x # E: "Type[A]" has no attribute "x" [case testAnyAsBaseOfMetaclass] from typing import Any, Type M = None # type: Any class MM(M): pass class A(metaclass=MM): y = 0 @classmethod def f(cls) -> None: pass def g(self) -> None: pass def h(a: Type[A], b: Type[object]) -> None: h(a, a) h(b, a) # E: Argument 1 to "h" has incompatible type "Type[object]"; expected "Type[A]" a.f(1) # E: Too many arguments for "f" of "A" reveal_type(a.y) # E: Revealed type is 'builtins.int' x = A # type: MM reveal_type(A.y) # E: Revealed type is 'builtins.int' reveal_type(A.x) # E: Revealed type is 'Any' A.f(1) # E: Too many arguments for "f" of "A" A().g(1) # E: Too many arguments for "g" of "A" [builtins fixtures/classmethod.pyi] [case testMetaclassTypeCallable] class M(type): x = 5 class A(metaclass=M): pass reveal_type(type(A).x) # E: Revealed type is 'builtins.int' [case testMetaclassStrictSupertypeOfTypeWithClassmethods] from typing import Type, TypeVar TA = TypeVar('TA', bound='A') TTA = TypeVar('TTA', bound='Type[A]') TM = TypeVar('TM', bound='M') class M(type): def g1(cls: 'Type[A]') -> A: pass # E: The erased type of self 'Type[__main__.A]' is not a supertype of its class '__main__.M' def g2(cls: Type[TA]) -> TA: pass # E: The erased type of self 'Type[__main__.A]' is not a supertype of its class '__main__.M' def g3(cls: TTA) -> TTA: pass # E: The erased type of self 'Type[__main__.A]' is not a supertype of its class '__main__.M' def g4(cls: TM) -> TM: pass m: M class A(metaclass=M): def foo(self): pass reveal_type(A.g1) # E: Revealed type is 'def () -> __main__.A' reveal_type(A.g2) # E: Revealed type is 'def () -> __main__.A*' reveal_type(A.g3) # E: Revealed type is 'def () -> def () -> __main__.A' reveal_type(A.g4) # E: Revealed type is 'def () -> def () -> __main__.A' class B(metaclass=M): def foo(self): pass B.g1 # Should be error: Argument 0 to "g1" of "M" has incompatible type "B"; expected "Type[A]" B.g2 # Should be error: Argument 0 to "g2" of "M" has incompatible type "B"; expected "Type[TA]" B.g3 # Should be error: Argument 0 to "g3" of "M" has incompatible type "B"; expected "TTA" reveal_type(B.g4) # E: Revealed type is 'def () -> def () -> __main__.B' # 4 examples of unsoundness - instantiation, classmethod, staticmethod and ClassVar: ta: Type[A] = m # E: Incompatible types in assignment (expression has type "M", variable has type "Type[A]") a: A = ta() reveal_type(ta.g1) # E: Revealed type is 'def () -> __main__.A' reveal_type(ta.g2) # E: Revealed type is 'def () -> __main__.A*' reveal_type(ta.g3) # E: Revealed type is 'def () -> Type[__main__.A]' reveal_type(ta.g4) # E: Revealed type is 'def () -> Type[__main__.A]' x: M = ta x.g1 # should be error: Argument 0 to "g1" of "M" has incompatible type "M"; expected "Type[A]" x.g2 # should be error: Argument 0 to "g2" of "M" has incompatible type "M"; expected "Type[TA]" x.g3 # should be error: Argument 0 to "g3" of "M" has incompatible type "M"; expected "TTA" reveal_type(x.g4) # E: Revealed type is 'def () -> __main__.M*' def r(ta: Type[TA], tta: TTA) -> None: x: M = ta y: M = tta class Class(metaclass=M): @classmethod def f1(cls: Type[Class]) -> None: pass @classmethod def f2(cls: M) -> None: pass cl: Type[Class] = m # E: Incompatible types in assignment (expression has type "M", variable has type "Type[Class]") reveal_type(cl.f1) # E: Revealed type is 'def ()' reveal_type(cl.f2) # E: Revealed type is 'def ()' x1: M = cl class Static(metaclass=M): @staticmethod def f() -> None: pass s: Type[Static] = m # E: Incompatible types in assignment (expression has type "M", variable has type "Type[Static]") reveal_type(s.f) # E: Revealed type is 'def ()' x2: M = s from typing import ClassVar class Cvar(metaclass=M): x = 1 # type: ClassVar[int] cv: Type[Cvar] = m # E: Incompatible types in assignment (expression has type "M", variable has type "Type[Cvar]") cv.x x3: M = cv [builtins fixtures/classmethod.pyi] [case testMetaclassOverloadResolution] from typing import Type, overload class A: pass class EM(type): pass class E(metaclass=EM): pass class EM1(type): pass class E1(metaclass=EM1): pass @overload def f(x: EM) -> int: ... @overload def f(x: EM1) -> A: ... @overload def f(x: str) -> str: ... def f(x: object) -> object: return '' e: EM reveal_type(f(e)) # E: Revealed type is 'builtins.int' et: Type[E] reveal_type(f(et)) # E: Revealed type is 'builtins.int' e1: EM1 reveal_type(f(e1)) # E: Revealed type is '__main__.A' e1t: Type[E1] reveal_type(f(e1t)) # E: Revealed type is '__main__.A' reveal_type(f('')) # E: Revealed type is 'builtins.str' [case testTypeCErasesGenericsFromC] from typing import Generic, Type, TypeVar K = TypeVar('K') V = TypeVar('V') class ExampleDict(Generic[K, V]): ... D = TypeVar('D') def mkdict(dict_type: Type[D]) -> D: ... reveal_type(mkdict(ExampleDict)) # E: Revealed type is '__main__.ExampleDict*[Any, Any]' [case testTupleForwardBase] from m import a a[0]() # E: "int" not callable [file m.py] from typing import Tuple a = None # type: A class A(Tuple[int, str]): pass [builtins fixtures/tuple.pyi] -- Synthetic types crashes -- ----------------------- [case testCrashOnSelfRecursiveNamedTupleVar] from typing import NamedTuple N = NamedTuple('N', [('x', N)]) # E: Recursive types not fully supported yet, nested types replaced with "Any" n: N [out] [case testCrashOnSelfRecursiveTypedDictVar] from mypy_extensions import TypedDict A = TypedDict('A', {'a': 'A'}) # type: ignore a: A [builtins fixtures/isinstancelist.pyi] [out] [case testCrashInJoinOfSelfRecursiveNamedTuples] from typing import NamedTuple class N(NamedTuple): # type: ignore x: N class M(NamedTuple): # type: ignore x: M n: N m: M lst = [n, m] [builtins fixtures/isinstancelist.pyi] [case testCorrectJoinOfSelfRecursiveTypedDicts] from mypy_extensions import TypedDict class N(TypedDict): x: N class M(TypedDict): x: M n: N m: M lst = [n, m] reveal_type(lst[0]['x']) # E: Revealed type is 'TypedDict('__main__.N', {'x': Any})' [builtins fixtures/isinstancelist.pyi] [out] main:3: error: Recursive types not fully supported yet, nested types replaced with "Any" main:5: error: Recursive types not fully supported yet, nested types replaced with "Any" [case testCrashInForwardRefToNamedTupleWithIsinstance] from typing import Dict, NamedTuple NameDict = Dict[str, 'NameInfo'] class NameInfo(NamedTuple): ast: bool def parse_ast(name_dict: NameDict) -> None: if isinstance(name_dict[''], int): pass reveal_type(name_dict['test']) # E: Revealed type is 'Tuple[builtins.bool, fallback=__main__.NameInfo]' [builtins fixtures/isinstancelist.pyi] [out] [case testCrashInForwardRefToTypedDictWithIsinstance] from mypy_extensions import TypedDict from typing import Dict NameDict = Dict[str, 'NameInfo'] class NameInfo(TypedDict): ast: bool def parse_ast(name_dict: NameDict) -> None: if isinstance(name_dict[''], int): pass reveal_type(name_dict['']['ast']) # E: Revealed type is 'builtins.bool' [builtins fixtures/isinstancelist.pyi] [out] [case testCorrectIsinstanceInForwardRefToNewType] from typing import Dict, NewType NameDict = Dict[str, 'NameInfo'] class Base: ast: bool NameInfo = NewType('NameInfo', Base) def parse_ast(name_dict: NameDict) -> None: if isinstance(name_dict[''], int): pass x = name_dict[''] reveal_type(x) # E: Revealed type is '__main__.NameInfo*' x = NameInfo(Base()) # OK x = Base() # E: Incompatible types in assignment (expression has type "Base", variable has type "NameInfo") [builtins fixtures/isinstancelist.pyi] [out] [case testNoCrashForwardRefToBrokenDoubleNewType] from typing import Any, Dict, List, NewType Foo = NewType('NotFoo', int) # E: String argument 1 'NotFoo' to NewType(...) does not match variable name 'Foo' Foos = NewType('Foos', List[Foo]) # type: ignore def frob(foos: Dict[Any, Foos]) -> None: foo = foos.get(1) dict(foo) [builtins fixtures/dict.pyi] [out] [case testNoCrashForwardRefToBrokenDoubleNewTypeClass] from typing import Any, Dict, List, NewType Foo = NewType('NotFoo', int) # type: ignore Foos = NewType('Foos', List[Foo]) # type: ignore x: C class C: def frob(self, foos: Dict[Any, Foos]) -> None: foo = foos.get(1) dict(foo) reveal_type(x.frob) # E: Revealed type is 'def (foos: builtins.dict[Any, __main__.Foos])' [builtins fixtures/dict.pyi] [out] [case testNewTypeFromForwardNamedTuple] from typing import NewType, NamedTuple, Tuple NT = NewType('NT', N) class N(NamedTuple): x: int x: NT = N(1) # E: Incompatible types in assignment (expression has type "N", variable has type "NT") x = NT(N(1)) [out] [case testNewTypeFromForwardTypedDict] from typing import NewType, Tuple from mypy_extensions import TypedDict NT = NewType('NT', N) # E: Argument 2 to NewType(...) must be subclassable (got TypedDict('__main__.N', {'x': builtins.int})) class N(TypedDict): x: int [builtins fixtures/dict.pyi] [out] [case testCorrectAttributeInForwardRefToNamedTuple] from typing import NamedTuple proc: Process reveal_type(proc.state) # E: Revealed type is 'builtins.int' def get_state(proc: 'Process') -> int: return proc.state class Process(NamedTuple): state: int [out] [case testCorrectItemTypeInForwardRefToTypedDict] from mypy_extensions import TypedDict proc: Process reveal_type(proc['state']) # E: Revealed type is 'builtins.int' def get_state(proc: 'Process') -> int: return proc['state'] class Process(TypedDict): state: int [builtins fixtures/isinstancelist.pyi] [out] [case testCorrectDoubleForwardNamedTuple] from typing import NamedTuple x: A class A(NamedTuple): one: 'B' other: int class B(NamedTuple): attr: str y: A y = x reveal_type(x.one.attr) # E: Revealed type is 'builtins.str' [out] [case testCrashOnDoubleForwardTypedDict] from mypy_extensions import TypedDict x: A class A(TypedDict): one: 'B' other: int class B(TypedDict): attr: str reveal_type(x['one']['attr']) # E: Revealed type is 'builtins.str' [builtins fixtures/isinstancelist.pyi] [out] [case testCrashOnForwardUnionOfNamedTuples] from typing import Union, NamedTuple Node = Union['Foo', 'Bar'] class Foo(NamedTuple): x: int class Bar(NamedTuple): x: int def foo(node: Node) -> int: x = node reveal_type(node) # E: Revealed type is 'Union[Tuple[builtins.int, fallback=__main__.Foo], Tuple[builtins.int, fallback=__main__.Bar]]' return x.x [out] [case testCrashOnForwardUnionOfTypedDicts] from mypy_extensions import TypedDict from typing import Union NodeType = Union['Foo', 'Bar'] class Foo(TypedDict): x: int class Bar(TypedDict): x: int def foo(node: NodeType) -> int: x = node return x['x'] [builtins fixtures/isinstancelist.pyi] [out] [case testSupportForwardUnionOfNewTypes] from typing import Union, NewType x: Node reveal_type(x.x) # E: Revealed type is 'builtins.int' class A: x: int class B: x: int Node = Union['Foo', 'Bar'] Foo = NewType('Foo', A) Bar = NewType('Bar', B) def foo(node: Node) -> Node: x = node return Foo(A()) [out] [case testForwardReferencesInNewTypeMRORecomputed] from typing import NewType x: Foo Foo = NewType('Foo', B) class A: x: int class B(A): pass reveal_type(x.x) # E: Revealed type is 'builtins.int' [out] [case testCrashOnComplexNamedTupleUnionProperty] from typing import NamedTuple, Union x: AOrB AOrB = Union['A', 'B'] class A(NamedTuple): x: int class B(object): def __init__(self, a: AOrB) -> None: self.a = a @property def x(self) -> int: return self.a.x reveal_type(x.x) # E: Revealed type is 'builtins.int' [builtins fixtures/property.pyi] [out] [case testCorrectIsinstanceWithForwardUnion] from typing import Union, NamedTuple ForwardUnion = Union['TP', int] class TP(NamedTuple('TP', [('x', int)])): pass def f(x: ForwardUnion) -> None: reveal_type(x) # E: Revealed type is 'Union[Tuple[builtins.int, fallback=__main__.TP], builtins.int]' if isinstance(x, TP): reveal_type(x) # E: Revealed type is 'Tuple[builtins.int, fallback=__main__.TP]' [builtins fixtures/isinstance.pyi] [out] [case testCrashInvalidArgsSyntheticClassSyntax] from typing import List, NamedTuple from mypy_extensions import TypedDict class TD(TypedDict): x: List[int, str] # E: "list" expects 1 type argument, but 2 given class NM(NamedTuple): x: List[int, str] # E: "list" expects 1 type argument, but 2 given # These two should never crash, reveals are in the next test TD({'x': []}) NM(x=[]) [builtins fixtures/dict.pyi] [out] [case testCrashInvalidArgsSyntheticClassSyntaxReveals] from typing import List, NamedTuple from mypy_extensions import TypedDict class TD(TypedDict): x: List[int, str] # E: "list" expects 1 type argument, but 2 given class NM(NamedTuple): x: List[int, str] # E: "list" expects 1 type argument, but 2 given x: TD x1 = TD({'x': []}) y: NM y1 = NM(x=[]) reveal_type(x) # E: Revealed type is 'TypedDict('__main__.TD', {'x': builtins.list[Any]})' reveal_type(x1) # E: Revealed type is 'TypedDict('__main__.TD', {'x': builtins.list[Any]})' reveal_type(y) # E: Revealed type is 'Tuple[builtins.list[Any], fallback=__main__.NM]' reveal_type(y1) # E: Revealed type is 'Tuple[builtins.list[Any], fallback=__main__.NM]' [builtins fixtures/dict.pyi] [out] [case testCrashInvalidArgsSyntheticFunctionSyntax] from typing import List, NewType, NamedTuple from mypy_extensions import TypedDict TD = TypedDict('TD', {'x': List[int, str]}) # E: "list" expects 1 type argument, but 2 given NM = NamedTuple('NM', [('x', List[int, str])]) # E: "list" expects 1 type argument, but 2 given NT = NewType('NT', List[int, str]) # E: "list" expects 1 type argument, but 2 given # These three should not crash TD({'x': []}) NM(x=[]) NT([]) [builtins fixtures/dict.pyi] [out] [case testCrashForwardSyntheticClassSyntax] from typing import NamedTuple from mypy_extensions import TypedDict class A1(NamedTuple): b: 'B' x: int class A2(TypedDict): b: 'B' x: int class B: pass x: A1 y: A2 reveal_type(x.b) # E: Revealed type is '__main__.B' reveal_type(y['b']) # E: Revealed type is '__main__.B' [builtins fixtures/dict.pyi] [out] [case testCrashForwardSyntheticFunctionSyntax] from typing import NamedTuple from mypy_extensions import TypedDict A1 = NamedTuple('A1', [('b', 'B'), ('x', int)]) A2 = TypedDict('A2', {'b': 'B', 'x': int}) class B: pass x: A1 y: A2 reveal_type(x.b) # E: Revealed type is '__main__.B' reveal_type(y['b']) # E: Revealed type is '__main__.B' [builtins fixtures/dict.pyi] [out] -- Special support for six -- ----------------------- [case testSixMetaclass] import six class M(type): x = 5 class A(six.with_metaclass(M)): pass @six.add_metaclass(M) class B: pass reveal_type(type(A).x) # E: Revealed type is 'builtins.int' reveal_type(type(B).x) # E: Revealed type is 'builtins.int' [case testSixMetaclass_python2] import six class M(type): x = 5 class A(six.with_metaclass(M)): pass @six.add_metaclass(M) class B: pass reveal_type(type(A).x) # E: Revealed type is 'builtins.int' reveal_type(type(B).x) # E: Revealed type is 'builtins.int' [case testFromSixMetaclass] from six import with_metaclass, add_metaclass class M(type): x = 5 class A(with_metaclass(M)): pass @add_metaclass(M) class B: pass reveal_type(type(A).x) # E: Revealed type is 'builtins.int' reveal_type(type(B).x) # E: Revealed type is 'builtins.int' [case testSixMetaclassImportFrom] import six from metadefs import M class A(six.with_metaclass(M)): pass @six.add_metaclass(M) class B: pass reveal_type(type(A).x) # E: Revealed type is 'builtins.int' reveal_type(type(B).x) # E: Revealed type is 'builtins.int' [file metadefs.py] class M(type): x = 5 [case testSixMetaclassImport] import six import metadefs class A(six.with_metaclass(metadefs.M)): pass @six.add_metaclass(metadefs.M) class B: pass reveal_type(type(A).x) # E: Revealed type is 'builtins.int' reveal_type(type(B).x) # E: Revealed type is 'builtins.int' [file metadefs.py] class M(type): x = 5 [case testSixMetaclassAndBase] from typing import Iterable, Iterator import six class M(type, Iterable[int]): x = 5 def __iter__(self) -> Iterator[int]: ... class A: def foo(self): pass class B: def bar(self): pass class C1(six.with_metaclass(M, A)): pass @six.add_metaclass(M) class D1(A): pass class C2(six.with_metaclass(M, A, B)): pass @six.add_metaclass(M) class D2(A, B): pass reveal_type(type(C1).x) # E: Revealed type is 'builtins.int' reveal_type(type(D1).x) # E: Revealed type is 'builtins.int' reveal_type(type(C2).x) # E: Revealed type is 'builtins.int' reveal_type(type(D2).x) # E: Revealed type is 'builtins.int' C1().foo() D1().foo() C1().bar() # E: "C1" has no attribute "bar" D1().bar() # E: "D1" has no attribute "bar" for x in C1: reveal_type(x) # E: Revealed type is 'builtins.int*' for x in C2: reveal_type(x) # E: Revealed type is 'builtins.int*' C2().foo() D2().foo() C2().bar() D2().bar() C2().baz() # E: "C2" has no attribute "baz" D2().baz() # E: "D2" has no attribute "baz" [case testSixMetaclassGenerics] from typing import Generic, GenericMeta, TypeVar import six class DestroyableMeta(type): pass class Destroyable(six.with_metaclass(DestroyableMeta)): pass T_co = TypeVar('T_co', bound='Destroyable', covariant=True) class ArcMeta(GenericMeta, DestroyableMeta): pass class Arc(six.with_metaclass(ArcMeta, Generic[T_co], Destroyable)): pass @six.add_metaclass(ArcMeta) class Arc1(Generic[T_co], Destroyable): pass class MyDestr(Destroyable): pass reveal_type(Arc[MyDestr]()) # E: Revealed type is '__main__.Arc[__main__.MyDestr*]' reveal_type(Arc1[MyDestr]()) # E: Revealed type is '__main__.Arc1[__main__.MyDestr*]' [builtins fixtures/bool.pyi] [case testSixMetaclassErrors] import six class M(type): pass class A(object): pass def f() -> type: return M class C1(six.with_metaclass(M), object): pass # E: Invalid base class class C2(C1, six.with_metaclass(M)): pass # E: Invalid base class class C3(six.with_metaclass(A)): pass # E: Metaclasses not inheriting from 'type' are not supported @six.add_metaclass(A) # E: Metaclasses not inheriting from 'type' are not supported class D3(A): pass class C4(six.with_metaclass(M), metaclass=M): pass # E: Multiple metaclass definitions @six.add_metaclass(M) # E: Multiple metaclass definitions class D4(metaclass=M): pass class C5(six.with_metaclass(f())): pass # E: Dynamic metaclass not supported for 'C5' @six.add_metaclass(f()) # E: Dynamic metaclass not supported for 'D5' class D5: pass @six.add_metaclass(M) # E: Multiple metaclass definitions class CD(six.with_metaclass(M)): pass class M1(type): pass class Q1(metaclass=M1): pass @six.add_metaclass(M) # E: Inconsistent metaclass structure for 'CQA' class CQA(Q1): pass class CQW(six.with_metaclass(M, Q1)): pass # E: Inconsistent metaclass structure for 'CQW' [case testSixMetaclassErrors_python2] # flags: --python-version 2.7 import six class M(type): pass class C4(six.with_metaclass(M)): # E: Multiple metaclass definitions __metaclass__ = M [case testSixMetaclassAny] import t # type: ignore import six class E(metaclass=t.M): pass class F(six.with_metaclass(t.M)): pass @six.add_metaclass(t.M) class G: pass [case testCorrectEnclosingClassPushedInDeferred] class C: def __getattr__(self, attr: str) -> int: x: F return x.f class F: def __init__(self, f: int) -> None: self.f = f [out] [case testCorrectEnclosingClassPushedInDeferred2] from typing import TypeVar T = TypeVar('T', bound=C) class C: def m(self: T) -> T: class Inner: x: F f = x.f return self class F: def __init__(self, f: int) -> None: self.f = f [out] [case testCorrectEnclosingClassPushedInDeferred3] class A: def f(self) -> None: def g(x: int) -> int: return y y = int() [out] [case testMetaclassMemberAccessViaType] from typing import Type class M(type): def m(cls, x: int) -> int: pass class C(metaclass=M): pass x = C y: Type[C] = C reveal_type(type(C).m) # E: Revealed type is 'def (cls: __main__.M, x: builtins.int) -> builtins.int' reveal_type(type(x).m) # E: Revealed type is 'def (cls: __main__.M, x: builtins.int) -> builtins.int' reveal_type(type(y).m) # E: Revealed type is 'def (cls: __main__.M, x: builtins.int) -> builtins.int' [out] [case testMetaclassMemberAccessViaType2] from typing import Any, Type class M(type): def m(cls, x: int) -> int: pass B: Any class C(B, metaclass=M): pass x: Type[C] reveal_type(x.m) # E: Revealed type is 'def (x: builtins.int) -> builtins.int' reveal_type(x.whatever) # E: Revealed type is 'Any' [out] [case testMetaclassMemberAccessViaType3] from typing import Any, Type, TypeVar T = TypeVar('T') class C(Any): def bar(self: T) -> Type[T]: pass def foo(self) -> None: reveal_type(self.bar()) # E: Revealed type is 'Type[__main__.C*]' reveal_type(self.bar().__name__) # E: Revealed type is 'builtins.str' [builtins fixtures/type.pyi] [out] mypy-0.560/test-data/unit/check-classvar.test0000644€tŠÔÚ€2›s®0000001432713215007205025335 0ustar jukkaDROPBOX\Domain Users00000000000000[case testAssignmentOnClass] from typing import ClassVar class A: x = 1 # type: ClassVar[int] A.x = 2 [case testAssignmentOnInstance] from typing import ClassVar class A: x = 1 # type: ClassVar[int] A().x = 2 [out] main:4: error: Cannot assign to class variable "x" via instance [case testAssignmentOnSubclassInstance] from typing import ClassVar class A: x = 1 # type: ClassVar[int] class B(A): pass B().x = 2 [out] main:6: error: Cannot assign to class variable "x" via instance [case testOverrideOnSelf] from typing import ClassVar class A: x = None # type: ClassVar[int] def __init__(self) -> None: self.x = 0 [out] main:5: error: Cannot assign to class variable "x" via instance [case testOverrideOnSelfInSubclass] from typing import ClassVar class A: x = None # type: ClassVar[int] class B(A): def __init__(self) -> None: self.x = 0 [out] main:6: error: Cannot assign to class variable "x" via instance [case testReadingFromInstance] from typing import ClassVar class A: x = 1 # type: ClassVar[int] A().x reveal_type(A().x) [out] main:5: error: Revealed type is 'builtins.int' [case testReadingFromSelf] from typing import ClassVar class A: x = 1 # type: ClassVar[int] def __init__(self) -> None: reveal_type(self.x) [out] main:5: error: Revealed type is 'builtins.int' [case testTypecheckSimple] from typing import ClassVar class A: x = 1 # type: ClassVar[int] y = A.x # type: int [case testTypecheckWithUserType] from typing import ClassVar class A: pass class B: x = A() # type: ClassVar[A] [case testTypeCheckOnAssignment] from typing import ClassVar class A: pass class B: pass class C: x = None # type: ClassVar[A] C.x = B() [out] main:8: error: Incompatible types in assignment (expression has type "B", variable has type "A") [case testTypeCheckWithOverridden] from typing import ClassVar class A: pass class B(A): pass class C: x = A() # type: ClassVar[A] C.x = B() [case testRevealType] from typing import ClassVar class A: x = None # type: ClassVar[int] reveal_type(A.x) [out] main:4: error: Revealed type is 'builtins.int' [case testInfer] from typing import ClassVar class A: x = 1 # type: ClassVar[int] y = A.x reveal_type(y) [out] main:5: error: Revealed type is 'builtins.int' [case testAssignmentOnUnion] from typing import ClassVar, Union class A: x = None # type: int class B: x = None # type: ClassVar[int] c = A() # type: Union[A, B] c.x = 1 [out] main:7: error: Cannot assign to class variable "x" via instance [case testAssignmentOnInstanceFromType] from typing import ClassVar, Type class A: x = None # type: ClassVar[int] def f(a: Type[A]) -> None: a().x = 0 [out] main:5: error: Cannot assign to class variable "x" via instance [case testAssignmentOnInstanceFromSubclassType] from typing import ClassVar, Type class A: x = None # type: ClassVar[int] class B(A): pass def f(b: Type[B]) -> None: b().x = 0 [out] main:7: error: Cannot assign to class variable "x" via instance [case testClassVarWithList] from typing import ClassVar, List class A: x = None # type: ClassVar[List[int]] A.x = ['a'] A().x.append(1) A().x.append('') [builtins fixtures/list.pyi] [out] main:4: error: List item 0 has incompatible type "str"; expected "int" main:6: error: Argument 1 to "append" of "list" has incompatible type "str"; expected "int" [case testClassVarWithUnion] from typing import ClassVar, Union class A: x = None # type: ClassVar[Union[int, str]] class B: pass A.x = 0 A.x = 'a' A.x = B() reveal_type(A().x) [out] main:8: error: Incompatible types in assignment (expression has type "B", variable has type "Union[int, str]") main:9: error: Revealed type is 'Union[builtins.int, builtins.str]' [case testOverrideWithNarrowedUnion] from typing import ClassVar, Union class A: pass class B: pass class C: pass class D: x = None # type: ClassVar[Union[A, B, C]] class E(D): x = None # type: ClassVar[Union[A, B]] [case testOverrideWithExtendedUnion] from typing import ClassVar, Union class A: pass class B: pass class C: pass class D: x = None # type: ClassVar[Union[A, B]] class E(D): x = None # type: ClassVar[Union[A, B, C]] [out] main:8: error: Incompatible types in assignment (expression has type "Union[A, B, C]", base class "D" defined the type as "Union[A, B]") [case testAssignmentToCallableRet] from typing import ClassVar class A: x = None # type: ClassVar[int] def f() -> A: return A() f().x = 0 [out] main:6: error: Cannot assign to class variable "x" via instance [case testOverrideWithIncomatibleType] from typing import ClassVar class A: x = None # type: ClassVar[int] class B(A): x = None # type: ClassVar[str] [out] main:5: error: Incompatible types in assignment (expression has type "str", base class "A" defined the type as "int") [case testOverrideWithNormalAttribute] from typing import ClassVar class A: x = 1 # type: ClassVar[int] class B(A): x = 2 # type: int [out] main:5: error: Cannot override class variable (previously declared on base class "A") with instance variable [case testOverrideWithAttributeWithClassVar] from typing import ClassVar class A: x = 1 # type: int class B(A): x = 2 # type: ClassVar[int] [out] main:5: error: Cannot override instance variable (previously declared on base class "A") with class variable [case testOverrideClassVarManyBases] from typing import ClassVar class A: x = 1 # type: ClassVar[int] class B: x = 2 # type: int class C(A, B): x = 3 # type: ClassVar[int] [out] main:7: error: Cannot override instance variable (previously declared on base class "B") with class variable [case testOverrideClassVarWithClassVar] from typing import ClassVar class A: x = 1 # type: ClassVar[int] class B(A): x = 2 # type: ClassVar[int] [case testOverrideOnABCSubclass] from abc import ABCMeta from typing import ClassVar class A(metaclass=ABCMeta): x = None # type: ClassVar[int] class B(A): x = 0 # type: ClassVar[int] [case testAcrossModules] import m reveal_type(m.A().x) m.A().x = 0 [file m.py] from typing import ClassVar class A: x = None # type: ClassVar[int] [out] main:2: error: Revealed type is 'builtins.int' main:3: error: Cannot assign to class variable "x" via instance mypy-0.560/test-data/unit/check-columns.test0000644€tŠÔÚ€2›s®0000000402513215007205025171 0ustar jukkaDROPBOX\Domain Users00000000000000[case testColumnsSyntaxError] # flags: --show-column-numbers 1 + [out] main:2:4: error: invalid syntax [case testColumnsNestedFunctions] # flags: --show-column-numbers import typing def f() -> 'A': def g() -> 'B': return A() # fail return B() # fail class A: pass class B: pass [out] main:5:8: error: Incompatible return value type (got "A", expected "B") main:6:4: error: Incompatible return value type (got "B", expected "A") [case testColumnsNestedFunctionsWithFastParse] # flags: --show-column-numbers import typing def f() -> 'A': def g() -> 'B': return A() # fail return B() # fail class A: pass class B: pass [out] main:5:8: error: Incompatible return value type (got "A", expected "B") main:6:4: error: Incompatible return value type (got "B", expected "A") [case testColumnsMethodDefaultArgumentsAndSignatureAsComment] # flags: --show-column-numbers import typing class A: def f(self, x = 1, y = 'hello'): # type: (int, str) -> str pass A().f() A().f(1) A().f('') # E:0: Argument 1 to "f" of "A" has incompatible type "str"; expected "int" A().f(1, 1) # E:0: Argument 2 to "f" of "A" has incompatible type "int"; expected "str" A().f(1, 'hello', 'hi') # E:0: Too many arguments for "f" of "A" [case testColumnsMultipleStatementsPerLine] # flags: --show-column-numbers x = 1 y = 'hello' x = 2; y = x; y += 1 [out] main:4:7: error: Incompatible types in assignment (expression has type "int", variable has type "str") main:4:14: error: Unsupported operand types for + ("str" and "int") [case testColumnsSimpleIsinstance] # flags: --show-column-numbers import typing def f(x: object, n: int, s: str) -> None: n = x # E:4: Incompatible types in assignment (expression has type "object", variable has type "int") if isinstance(x, int): n = x s = x # E:8: Incompatible types in assignment (expression has type "int", variable has type "str") n = x # E:4: Incompatible types in assignment (expression has type "object", variable has type "int") [builtins fixtures/isinstance.pyi] [out] mypy-0.560/test-data/unit/check-custom-plugin.test0000644€tŠÔÚ€2›s®0000001061513215007205026321 0ustar jukkaDROPBOX\Domain Users00000000000000-- Test cases for user-defined plugins -- -- Note: Plugins used by tests live under test-data/unit/plugins. Defining -- plugin files in test cases does not work reliably. [case testFunctionPlugin] # flags: --config-file tmp/mypy.ini def f() -> str: ... reveal_type(f()) # E: Revealed type is 'builtins.int' [file mypy.ini] [[mypy] plugins=/test-data/unit/plugins/fnplugin.py [case testFunctionPluginFullnameIsNotNone] # flags: --config-file tmp/mypy.ini from typing import Callable, TypeVar f: Callable[[], None] T = TypeVar('T') def g(x: T) -> T: return x # This strips out the name of a callable g(f)() [file mypy.ini] [[mypy] plugins=/test-data/unit/plugins/fnplugin.py [case testTwoPlugins] # flags: --config-file tmp/mypy.ini def f(): ... def g(): ... def h(): ... reveal_type(f()) # E: Revealed type is 'builtins.int' reveal_type(g()) # E: Revealed type is 'builtins.str' reveal_type(h()) # E: Revealed type is 'Any' [file mypy.ini] [[mypy] plugins=/test-data/unit/plugins/fnplugin.py, /test-data/unit/plugins/plugin2.py [case testMissingPlugin] # flags: --config-file tmp/mypy.ini [file mypy.ini] [[mypy] plugins=missing.py [out] tmp/mypy.ini:2: error: Can't find plugin 'tmp/missing.py' --' (work around syntax highlighting) [case testMultipleSectionsDefinePlugin] # flags: --config-file tmp/mypy.ini [file mypy.ini] [[acme] plugins=acmeplugin [[mypy] plugins=missing.py [[another] plugins=another_plugin [out] tmp/mypy.ini:4: error: Can't find plugin 'tmp/missing.py' --' (work around syntax highlighting) [case testInvalidPluginExtension] # flags: --config-file tmp/mypy.ini [file mypy.ini] [[mypy] plugins=dir/badext.pyi [file dir/badext.pyi] [out] tmp/mypy.ini:2: error: Plugin 'badext.pyi' does not have a .py extension [case testMissingPluginEntryPoint] # flags: --config-file tmp/mypy.ini [file mypy.ini] [[mypy] plugins = /test-data/unit/plugins/noentry.py [out] tmp/mypy.ini:2: error: Plugin '/test-data/unit/plugins/noentry.py' does not define entry point function "plugin" [case testInvalidPluginEntryPointReturnValue] # flags: --config-file tmp/mypy.ini def f(): pass f() [file mypy.ini] [[mypy] plugins=/test-data/unit/plugins/badreturn.py [out] tmp/mypy.ini:3: error: Type object expected as the return value of "plugin"; got None (in /test-data/unit/plugins/badreturn.py) [case testInvalidPluginEntryPointReturnValue2] # flags: --config-file tmp/mypy.ini def f(): pass f() [file mypy.ini] [[mypy] plugins=/test-data/unit/plugins/badreturn2.py [out] tmp/mypy.ini:2: error: Return value of "plugin" must be a subclass of "mypy.plugin.Plugin" (in /test-data/unit/plugins/badreturn2.py) [case testAttributeTypeHookPlugin] # flags: --config-file tmp/mypy.ini from typing import Callable from m import Signal s: Signal[Callable[[int], None]] = Signal() s(1) s('') # E: Argument 1 has incompatible type "str"; expected "int" [file m.py] from typing import TypeVar, Generic, Callable T = TypeVar('T', bound=Callable[..., None]) class Signal(Generic[T]): __call__: Callable[..., None] # This type is replaced by the plugin [file mypy.ini] [[mypy] plugins=/test-data/unit/plugins/attrhook.py [case testTypeAnalyzeHookPlugin] # flags: --config-file tmp/mypy.ini from typing import Callable from mypy_extensions import DefaultArg from m import Signal s: Signal[[int, DefaultArg(str, 'x')]] = Signal() reveal_type(s) # E: Revealed type is 'm.Signal[def (builtins.int, x: builtins.str =)]' s.x # E: "Signal[Callable[[int, str], None]]" has no attribute "x" ss: Signal[int, str] # E: Invalid "Signal" type (expected "Signal[[t, ...]]") [file m.py] from typing import TypeVar, Generic, Callable T = TypeVar('T', bound=Callable[..., None]) class Signal(Generic[T]): __call__: Callable[..., None] [file mypy.ini] [[mypy] plugins=/test-data/unit/plugins/type_anal_hook.py [builtins fixtures/dict.pyi] [case testFunctionPluginHookForReturnedCallable] # flags: --config-file tmp/mypy.ini from m import decorator1, decorator2 @decorator1() def f() -> None: pass @decorator2() def g() -> None: pass reveal_type(f) # E: Revealed type is 'def (*Any, **Any) -> builtins.str' reveal_type(g) # E: Revealed type is 'def (*Any, **Any) -> builtins.int' [file m.py] from typing import Callable def decorator1() -> Callable[..., Callable[..., int]]: pass def decorator2() -> Callable[..., Callable[..., int]]: pass [file mypy.ini] [[mypy] plugins=/test-data/unit/plugins/named_callable.py mypy-0.560/test-data/unit/check-default-plugin.test0000644€tŠÔÚ€2›s®0000000222213215007205026426 0ustar jukkaDROPBOX\Domain Users00000000000000-- Test cases for the default plugin -- -- Note that we have additional test cases in pythoneval.test (that use real typeshed stubs). [case testContextManagerWithGenericFunction] from contextlib import contextmanager from typing import TypeVar, Iterator T = TypeVar('T') @contextmanager def yield_id(item: T) -> Iterator[T]: yield item reveal_type(yield_id) # E: Revealed type is 'def [T] (item: T`-1) -> contextlib.GeneratorContextManager[T`-1]' with yield_id(1) as x: reveal_type(x) # E: Revealed type is 'builtins.int*' f = yield_id def g(x, y): pass f = g # E: Incompatible types in assignment (expression has type "Callable[[Any, Any], Any]", variable has type "Callable[[T], GeneratorContextManager[T]]") [typing fixtures/typing-full.pyi] [case testContextManagerWithUnspecifiedArguments] from contextlib import contextmanager from typing import Callable, Iterator c: Callable[..., Iterator[int]] reveal_type(c) # E: Revealed type is 'def (*Any, **Any) -> typing.Iterator[builtins.int]' reveal_type(contextmanager(c)) # E: Revealed type is 'def (*Any, **Any) -> contextlib.GeneratorContextManager[builtins.int*]' [typing fixtures/typing-full.pyi] mypy-0.560/test-data/unit/check-dynamic-typing.test0000644€tŠÔÚ€2›s®0000003544213215007205026454 0ustar jukkaDROPBOX\Domain Users00000000000000-- Assignment -- ---------- [case testAssignmentWithDynamic] from typing import Any d = None # type: Any a = None # type: A a = d # Everything ok d = a d = d d.x = a d.x = d class A: pass [case testMultipleAssignmentWithDynamic] from typing import Any d = None # type: Any a, b = None, None # type: (A, B) d, a = b, b # E: Incompatible types in assignment (expression has type "B", variable has type "A") d, d = d, d, d # E: Too many values to unpack (2 expected, 3 provided) a, b = d, d d, d = a, b a, b = d s, t = d class A: pass class B: pass -- Expressions -- ----------- [case testCallingFunctionWithDynamicArgumentTypes] from typing import Any a, b = None, None # type: (A, B) b = f(a) # E: Incompatible types in assignment (expression has type "A", variable has type "B") a = f(a) a = f(b) a = f(None) a = f(f) def f(x: Any) -> 'A': pass class A: pass class B: pass [case testCallingWithDynamicReturnType] from typing import Any a, b = None, None # type: (A, B) a = f(b) # E: Argument 1 to "f" has incompatible type "B"; expected "A" a = f(a) b = f(a) def f(x: 'A') -> Any: pass class A: pass class B: pass [case testBinaryOperationsWithDynamicLeftOperand] from typing import Any d = None # type: Any a = None # type: A c = None # type: C b = None # type: bool n = 0 d in a # E: Unsupported right operand type for in ("A") d and a d or a c = d and b # E: Incompatible types in assignment (expression has type "Union[Any, bool]", variable has type "C") c = d or b # E: Incompatible types in assignment (expression has type "Union[Any, bool]", variable has type "C") c = d + a c = d - a c = d * a c = d / a c = d // a c = d % a c = d ** a b = d == a b = d != a b = d < a b = d <= a b = d > a b = d >= a b = d in c b = d and b b = d or b class A: pass class C: def __contains__(self, a: A) -> bool: pass [file builtins.py] class object: def __init__(self): pass class bool: pass class int: pass class type: pass class function: pass class str: pass [case testBinaryOperationsWithDynamicAsRightOperand] from typing import Any d = None # type: Any a = None # type: A c = None # type: C b = None # type: bool n = 0 a and d a or d c = a in d c = b and d # E: Incompatible types in assignment (expression has type "Union[bool, Any]", variable has type "C") c = b or d # E: Incompatible types in assignment (expression has type "Union[bool, Any]", variable has type "C") b = a + d b = a / d c = a + d c = a - d c = a * d c = a / d c = a // d c = a % d c = a ** d b = a in d b = b and d b = b or d class A: def __add__(self, a: 'A') -> 'C': pass def __sub__(self, a: 'A') -> 'C': pass def __mul__(self, a: 'A') -> 'C': pass def __truediv__(self, a: 'A') -> 'C': pass def __floordiv__(self, a: 'A') -> 'C': pass def __mod__(self, a: 'A') -> 'C': pass def __pow__(self, a: 'A') -> 'C': pass def _lt(self, a: 'A') -> bool: pass def _gt(self, a: 'A') -> bool: pass class C: pass [file builtins.py] class object: def __init__(self): pass class bool: pass class int: pass class type: pass class function: pass class str: pass [case testDynamicWithUnaryExpressions] from typing import Any d = None # type: Any a = None # type: A b = None # type: bool a = not d # E: Incompatible types in assignment (expression has type "bool", variable has type "A") b = not d a = -d class A: pass [builtins fixtures/bool.pyi] [out] [case testDynamicWithMemberAccess] from typing import Any d = None # type: Any a = None # type: A a = d.foo(a()) # E: "A" not callable a = d.x a = d.foo(a, a) d.x = a d.x.y.z # E: "A" has no attribute "y" class A: pass [out] [case testIndexingWithDynamic] from typing import Any d = None # type: Any a = None # type: A a = d[a()] # E: "A" not callable d[a()] = a # E: "A" not callable a = d[a] d[a] = a d[a], d[a] = a, a class A: pass [case testTupleExpressionsWithDynamci] from typing import Tuple, Any t2 = None # type: Tuple[A, A] d = None # type: Any t2 = (d, d, d) # E: Incompatible types in assignment (expression has type "Tuple[Any, Any, Any]", variable has type "Tuple[A, A]") t2 = (d, d) class A: pass [builtins fixtures/tuple.pyi] [case testCastsWithDynamicType] from typing import Any, cast class A: pass class B: pass d = None # type: Any a = None # type: A b = None # type: B b = cast(A, d) # E: Incompatible types in assignment (expression has type "A", variable has type "B") a = cast(A, d) b = cast(Any, d) a = cast(Any, f()) def f() -> None: pass [case testCompatibilityOfDynamicWithOtherTypes] from typing import Any, Tuple d = None # type: Any t = None # type: Tuple[A, A] # TODO: callable types, overloaded functions d = None # All ok d = t d = g d = A t = d f = d def g(a: 'A') -> None: pass class A: pass class B: pass [builtins fixtures/tuple.pyi] -- Statements -- ---------- [case testDynamicCondition] from typing import Any d = None # type: Any while d: pass if d: pass elif d: pass [builtins fixtures/bool.pyi] [case testRaiseWithDynamic] from typing import Any d = None # type: Any raise d [builtins fixtures/exception.pyi] [case testReturnWithDynamic] from typing import Any d = None # type: Any def f() -> None: return d # Ok def g() -> 'A': return d # Ok class A: pass -- Implicit dynamic types for functions -- ------------------------------------ [case testImplicitGlobalFunctionSignature] from typing import Any, Callable x = None # type: Any a = None # type: A g = None # type: Callable[[], None] h = None # type: Callable[[A], None] f() # E: Too few arguments for "f" f(x, x) # E: Too many arguments for "f" g = f # E: Incompatible types in assignment (expression has type "Callable[[Any], Any]", variable has type "Callable[[], None]") f(a) f(x) a = f(a) h = f def f(x): pass class A: pass [case testImplicitGlobalFunctionSignatureWithDifferentArgCounts] from typing import Callable g0 = None # type: Callable[[], None] g1 = None # type: Callable[[A], None] g2 = None # type: Callable[[A, A], None] a = None # type: A g1 = f0 # E: Incompatible types in assignment (expression has type "Callable[[], Any]", variable has type "Callable[[A], None]") g2 = f0 # E: Incompatible types in assignment (expression has type "Callable[[], Any]", variable has type "Callable[[A, A], None]") g0 = f2 # E: Incompatible types in assignment (expression has type "Callable[[Any, Any], Any]", variable has type "Callable[[], None]") g1 = f2 # E: Incompatible types in assignment (expression has type "Callable[[Any, Any], Any]", variable has type "Callable[[A], None]") g0 = g0 g2 = f2 f0() f2(a, a) def f0(): pass def f2(x, y): pass class A: pass [case testImplicitGlobalFunctionSignatureWithDefaultArgs] from typing import Callable a, b = None, None # type: (A, B) g0 = None # type: Callable[[], None] g1 = None # type: Callable[[A], None] g2 = None # type: Callable[[A, A], None] g3 = None # type: Callable[[A, A, A], None] g4 = None # type: Callable[[A, A, A, A], None] f01(a, a) # Fail f13() # Fail f13(a, a, a, a) # Fail g2 = f01 # Fail g0 = f13 # Fail g4 = f13 # Fail f01() f01(a) f13(a) f13(a, a) f13(a, a, a) g0 = f01 g1 = f01 g1 = f13 g2 = f13 g3 = f13 def f01(x = b): pass def f13(x, y = b, z = b): pass class A: pass class B: pass [out] main:10: error: Too many arguments for "f01" main:11: error: Too few arguments for "f13" main:12: error: Too many arguments for "f13" main:13: error: Incompatible types in assignment (expression has type "Callable[[Any], Any]", variable has type "Callable[[A, A], None]") main:14: error: Incompatible types in assignment (expression has type "Callable[[Any, Any, Any], Any]", variable has type "Callable[[], None]") main:15: error: Incompatible types in assignment (expression has type "Callable[[Any, Any, Any], Any]", variable has type "Callable[[A, A, A, A], None]") [case testSkipTypeCheckingWithImplicitSignature] a = None # type: A def f(): a() def g(x): a() a.x a + a if a(): a() class A: pass [builtins fixtures/bool.pyi] [case testSkipTypeCheckingWithImplicitSignatureAndDefaultArgs] a = None # type: A def f(x=a()): a() def g(x, y=a, z=a()): a() class A: pass [case testImplicitMethodSignature] from typing import Callable g0 = None # type: Callable[[], None] g1 = None # type: Callable[[A], None] g2 = None # type: Callable[[A, A], None] a = None # type: A g0 = a.f # E: Incompatible types in assignment (expression has type "Callable[[Any], Any]", variable has type "Callable[[], None]") g2 = a.f # E: Incompatible types in assignment (expression has type "Callable[[Any], Any]", variable has type "Callable[[A, A], None]") a = a.f # E: Incompatible types in assignment (expression has type "Callable[[Any], Any]", variable has type "A") class A: def g(self) -> None: a = self.f(a) def f(self, x): pass g1 = a.f a = a.f(a) [case testSkipTypeCheckingImplicitMethod] a = None # type: A class A: def f(self): a() def g(self, x, y=a()): a() [case testImplicitInheritedMethod] from typing import Callable g0 = None # type: Callable[[], None] g1 = None # type: Callable[[A], None] a = None # type: A g0 = a.f # E: Incompatible types in assignment (expression has type "Callable[[Any], Any]", variable has type "Callable[[], None]") g1 = a.f a = a.f(a) class B: def f(self, x): pass class A(B): def g(self) -> None: a = self.f(a) [case testEmptyReturnWithImplicitSignature] import typing def f(): return class A: def g(self): return [case testVarArgsWithImplicitSignature] from typing import Any o = None # type: Any def f(x, *a): pass f() # E: Too few arguments for "f" f(o) f(o, o) f(o, o, o) [builtins fixtures/list.pyi] -- Implicit types for constructors -- ------------------------------- [case testInitMethodWithImplicitSignature] from typing import Callable f1 = None # type: Callable[[A], A] f2 = None # type: Callable[[A, A], A] a = None # type: A A(a) # Fail f1 = A # Fail A(a, a) f2 = A class A: def __init__(self, a, b): pass [out] main:6: error: Too few arguments for "A" main:7: error: Incompatible types in assignment (expression has type "Type[A]", variable has type "Callable[[A], A]") [case testUsingImplicitTypeObjectWithIs] t = None # type: type t = A t = B class A: pass class B: def __init__(self): pass -- Type compatibility -- ------------------ [case testTupleTypeCompatibility] from typing import Any, Tuple t1 = None # type: Tuple[Any, A] t2 = None # type: Tuple[A, Any] t3 = None # type: Tuple[Any, Any] t4 = None # type: Tuple[A, A] t5 = None # type: Tuple[Any, Any, Any] t3 = t5 # E: Incompatible types in assignment (expression has type "Tuple[Any, Any, Any]", variable has type "Tuple[Any, Any]") t5 = t4 # E: Incompatible types in assignment (expression has type "Tuple[A, A]", variable has type "Tuple[Any, Any, Any]") t1 = t1 t1 = t2 t1 = t3 t1 = t4 t2 = t1 t2 = t3 t2 = t4 t3 = t1 t3 = t2 t3 = t4 t4 = t1 t4 = t2 t4 = t3 class A: pass [builtins fixtures/tuple.pyi] [case testFunctionTypeCompatibilityAndReturnTypes] from typing import Any, Callable f1 = None # type: Callable[[], Any] f11 = None # type: Callable[[], Any] f2 = None # type: Callable[[], A] f3 = None # type: Callable[[], None] f2 = f3 f1 = f2 f1 = f3 f2 = f11 f3 = f11 class A: pass [case testFunctionTypeCompatibilityAndArgumentTypes] from typing import Any, Callable f1 = None # type: Callable[[A, Any], None] f2 = None # type: Callable[[Any, A], None] f3 = None # type: Callable[[A, A], None] f1 = f1 f1 = f2 f1 = f3 f2 = f1 f2 = f2 f2 = f3 f3 = f1 f3 = f2 f3 = f3 class A: pass [case testFunctionTypeCompatibilityAndArgumentCounts] from typing import Any, Callable f1 = None # type: Callable[[Any], None] f2 = None # type: Callable[[Any, Any], None] f1 = f2 # E: Incompatible types in assignment (expression has type "Callable[[Any, Any], None]", variable has type "Callable[[Any], None]") -- Overriding -- ---------- [case testOverridingMethodWithDynamicTypes] from typing import Any a, b = None, None # type: (A, B) b.f(b) # E: Argument 1 to "f" of "B" has incompatible type "B"; expected "A" a = a.f(b) class B: def f(self, x: 'A') -> 'B': pass def g(self, x: 'B') -> None: pass class A(B): def f(self, x: Any) -> Any: pass def g(self, x: Any) -> None: pass [case testOverridingMethodWithImplicitDynamicTypes] a, b = None, None # type: (A, B) b.f(b) # E: Argument 1 to "f" of "B" has incompatible type "B"; expected "A" a = a.f(b) class B: def f(self, x: 'A') -> 'B': pass def g(self, x: 'B') -> None: pass class A(B): def f(self, x): pass def g(self, x): pass [case testOverridingMethodAcrossHierarchy] import typing class C: def f(self, a: 'A') -> None: pass class B(C): def f(self, a): pass class A(B): def f(self, a: 'D') -> None: # E: Argument 1 of "f" incompatible with supertype "C" pass class D: pass [out] [case testInvalidOverrideArgumentCountWithImplicitSignature1] import typing class B: def f(self, x: A) -> None: pass class A(B): def f(self, x, y): # dynamic function not type checked x() [out] [case testInvalidOverrideArgumentCountWithImplicitSignature2] import typing class B: def f(self, x, y): pass class A(B): def f(self, x: 'A') -> None: # E: Signature of "f" incompatible with supertype "B" pass [out] [case testInvalidOverrideArgumentCountWithImplicitSignature3] import typing class B: def f(self, x: A) -> None: pass class A(B): def f(self, x, y) -> None: # E: Signature of "f" incompatible with supertype "B" x() [out] [case testInvalidOverrideWithImplicitSignatureAndClassMethod1] class B: @classmethod def f(cls, x, y): pass class A(B): @classmethod def f(cls, x, y, z): pass # No error since no annotations [builtins fixtures/classmethod.pyi] [case testInvalidOverrideWithImplicitSignatureAndClassMethod2] class B: @classmethod def f(cls, x: int, y): pass class A(B): @classmethod def f(cls, x, y, z): pass # No error since no annotations [builtins fixtures/classmethod.pyi] [case testInvalidOverrideWithImplicitSignatureAndStaticMethod1] class B: @staticmethod def f(x, y): pass class A(B): @staticmethod def f(x, y, z): pass # No error since no annotations [builtins fixtures/classmethod.pyi] [case testInvalidOverrideWithImplicitSignatureAndStaticMethod2] class B: @staticmethod def f(self, x: int, y): pass class A(B): @staticmethod def f(self, x, y, z): pass # No error since no annotations [builtins fixtures/classmethod.pyi] -- Don't complain about too few/many arguments in dynamic functions -- ---------------------------------------------------------------- [case testTooManyArgsInDynamic] def f() -> None: pass def g(): f(1) # Silent [out] [case testTooFewArgsInDynamic] def f(a: int) -> None: pass def g(): f() # Silent [out] [case testJustRightInDynamic] def f(a: int) -> None: pass def g(): f('') # Silent [out] mypy-0.560/test-data/unit/check-enum.test0000644€tŠÔÚ€2›s®0000002312713215007205024461 0ustar jukkaDROPBOX\Domain Users00000000000000-- This test file checks Enum [case testEnumBasics] from enum import Enum class Medal(Enum): gold = 1 silver = 2 bronze = 3 m = Medal.gold m = 1 [out] main:7: error: Incompatible types in assignment (expression has type "int", variable has type "Medal") [case testEnumNameAndValue] from enum import Enum class Truth(Enum): true = True false = False x = '' x = Truth.true.name reveal_type(Truth.true.name) reveal_type(Truth.false.value) [builtins fixtures/bool.pyi] [out] main:7: error: Revealed type is 'builtins.str' main:8: error: Revealed type is 'Any' [case testEnumUnique] import enum @enum.unique class E(enum.Enum): x = 1 y = 1 # NOTE: This duplicate value is not detected by mypy at the moment x = 1 x = E.x [out] main:7: error: Incompatible types in assignment (expression has type "E", variable has type "int") [case testIntEnum_assignToIntVariable] from enum import IntEnum class N(IntEnum): x = 1 y = 1 n = 1 n = N.x # Subclass of int, so it's okay s = '' s = N.y [out] main:8: error: Incompatible types in assignment (expression has type "N", variable has type "str") [case testIntEnum_functionTakingIntEnum] from enum import IntEnum class SomeIntEnum(IntEnum): x = 1 def takes_some_int_enum(n: SomeIntEnum): pass takes_some_int_enum(SomeIntEnum.x) takes_some_int_enum(1) # Error takes_some_int_enum(SomeIntEnum(1)) # How to deal with the above [out] main:7: error: Argument 1 to "takes_some_int_enum" has incompatible type "int"; expected "SomeIntEnum" [case testIntEnum_functionTakingInt] from enum import IntEnum class SomeIntEnum(IntEnum): x = 1 def takes_int(i: int): pass takes_int(SomeIntEnum.x) takes_int(2) [case testIntEnum_functionReturningIntEnum] from enum import IntEnum class SomeIntEnum(IntEnum): x = 1 def returns_some_int_enum() -> SomeIntEnum: return SomeIntEnum.x an_int = 1 an_int = returns_some_int_enum() an_enum = SomeIntEnum.x an_enum = returns_some_int_enum() [out] [case testEnumMethods] from enum import Enum class Color(Enum): red = 1 green = 2 def m(self, x: int): pass @staticmethod def m2(x: int): pass Color.red.m('') Color.m2('') [builtins fixtures/staticmethod.pyi] [out] main:11: error: Argument 1 to "m" of "Color" has incompatible type "str"; expected "int" main:12: error: Argument 1 to "m2" of "Color" has incompatible type "str"; expected "int" [case testIntEnum_ExtendedIntEnum_functionTakingExtendedIntEnum] from enum import IntEnum class ExtendedIntEnum(IntEnum): pass class SomeExtIntEnum(ExtendedIntEnum): x = 1 def takes_int(i: int): pass takes_int(SomeExtIntEnum.x) def takes_some_ext_int_enum(s: SomeExtIntEnum): pass takes_some_ext_int_enum(SomeExtIntEnum.x) [case testNamedTupleEnum] from typing import NamedTuple from enum import Enum N = NamedTuple('N', [('bar', int)]) class E(N, Enum): X = N(1) def f(x: E) -> None: pass f(E.X) [case testEnumCall] from enum import IntEnum class E(IntEnum): a = 1 x = None # type: int reveal_type(E(x)) [out] main:5: error: Revealed type is '__main__.E' [case testEnumIndex] from enum import IntEnum class E(IntEnum): a = 1 s = None # type: str reveal_type(E[s]) [out] main:5: error: Revealed type is '__main__.E' [case testEnumIndexError] from enum import IntEnum class E(IntEnum): a = 1 E[1] # E: Enum index should be a string (actual index type "int") x = E[1] # E: Enum index should be a string (actual index type "int") [case testEnumIndexIsNotAnAlias] from enum import Enum class E(Enum): a = 1 b = 2 reveal_type(E['a']) # E: Revealed type is '__main__.E' E['a'] x = E['a'] reveal_type(x) # E: Revealed type is '__main__.E' def get_member(name: str) -> E: val = E[name] return val reveal_type(get_member('a')) # E: Revealed type is '__main__.E' [case testGenericEnum] from enum import Enum from typing import Generic, TypeVar T = TypeVar('T') class F(Generic[T], Enum): # E: Enum class cannot be generic x: T y: T reveal_type(F[int].x) # E: Revealed type is '__main__.F[builtins.int*]' [case testEnumFlag] from enum import Flag class C(Flag): a = 1 b = 2 x = C.a x = 1 x = x | C.b [out] main:6: error: Incompatible types in assignment (expression has type "int", variable has type "C") [case testEnumIntFlag] from enum import IntFlag class C(IntFlag): a = 1 b = 2 x = C.a x = 1 x = x | C.b [out] main:6: error: Incompatible types in assignment (expression has type "int", variable has type "C") [case testAnonymousEnum] from enum import Enum class A: def f(self) -> None: class E(Enum): a = 1 self.x = E.a a = A() reveal_type(a.x) [out] main:8: error: Revealed type is '__main__.E@4' [case testEnumInClassBody] from enum import Enum class A: class E(Enum): a = 1 class B: class E(Enum): a = 1 x = A.E.a y = B.E.a x = y [out] main:10: error: Incompatible types in assignment (expression has type "__main__.B.E", variable has type "__main__.A.E") [case testFunctionalEnumString] from enum import Enum, IntEnum E = Enum('E', 'foo bar') I = IntEnum('I', ' bar, baz ') reveal_type(E.foo) reveal_type(E.bar.value) reveal_type(I.bar) reveal_type(I.baz.value) [out] main:4: error: Revealed type is '__main__.E' main:5: error: Revealed type is 'Any' main:6: error: Revealed type is '__main__.I' main:7: error: Revealed type is 'builtins.int' [case testFunctionalEnumListOfStrings] from enum import Enum, IntEnum E = Enum('E', ('foo', 'bar')) F = IntEnum('F', ['bar', 'baz']) reveal_type(E.foo) reveal_type(F.baz) [out] main:4: error: Revealed type is '__main__.E' main:5: error: Revealed type is '__main__.F' [case testFunctionalEnumListOfPairs] from enum import Enum, IntEnum E = Enum('E', [('foo', 1), ['bar', 2]]) F = IntEnum('F', (['bar', 1], ('baz', 2))) reveal_type(E.foo) reveal_type(F.baz) reveal_type(E.foo.value) reveal_type(F.bar.name) [out] main:4: error: Revealed type is '__main__.E' main:5: error: Revealed type is '__main__.F' main:6: error: Revealed type is 'Any' main:7: error: Revealed type is 'builtins.str' [case testFunctionalEnumDict] from enum import Enum, IntEnum E = Enum('E', {'foo': 1, 'bar': 2}) F = IntEnum('F', {'bar': 1, 'baz': 2}) reveal_type(E.foo) reveal_type(F.baz) reveal_type(E.foo.value) reveal_type(F.bar.name) [out] main:4: error: Revealed type is '__main__.E' main:5: error: Revealed type is '__main__.F' main:6: error: Revealed type is 'Any' main:7: error: Revealed type is 'builtins.str' [case testFunctionalEnumErrors] from enum import Enum, IntEnum A = Enum('A') B = Enum('B', 42) C = Enum('C', 'a b', 'x') D = Enum('D', foo) bar = 'x y z' E = Enum('E', bar) I = IntEnum('I') J = IntEnum('I', 42) K = IntEnum('I', 'p q', 'z') L = Enum('L', ' ') M = Enum('M', ()) N = IntEnum('M', []) P = Enum('P', [42]) Q = Enum('Q', [('a', 42, 0)]) R = IntEnum('R', [[0, 42]]) S = Enum('S', {1: 1}) T = Enum('T', keyword='a b') U = Enum('U', *['a']) V = Enum('U', **{'a': 1}) W = Enum('W', 'a b') W.c [out] main:2: error: Too few arguments for Enum() main:3: error: Enum() expects a string, tuple, list or dict literal as the second argument main:4: error: Too many arguments for Enum() main:5: error: Enum() expects a string, tuple, list or dict literal as the second argument main:5: error: Name 'foo' is not defined main:7: error: Enum() expects a string, tuple, list or dict literal as the second argument main:8: error: Too few arguments for IntEnum() main:9: error: IntEnum() expects a string, tuple, list or dict literal as the second argument main:10: error: Too many arguments for IntEnum() main:11: error: Enum() needs at least one item main:12: error: Enum() needs at least one item main:13: error: IntEnum() needs at least one item main:14: error: Enum() with tuple or list expects strings or (name, value) pairs main:15: error: Enum() with tuple or list expects strings or (name, value) pairs main:16: error: IntEnum() with tuple or list expects strings or (name, value) pairs main:17: error: Enum() with dict literal requires string literals main:18: error: Unexpected arguments to Enum() main:19: error: Unexpected arguments to Enum() main:20: error: Unexpected arguments to Enum() main:22: error: "Type[W]" has no attribute "c" [case testFunctionalEnumFlag] from enum import Flag, IntFlag A = Flag('A', 'x y') B = IntFlag('B', 'a b') reveal_type(A.x) reveal_type(B.a) [out] main:4: error: Revealed type is '__main__.A' main:5: error: Revealed type is '__main__.B' [case testAnonymousFunctionalEnum] from enum import Enum class A: def f(self) -> None: E = Enum('E', 'a b') self.x = E.a a = A() reveal_type(a.x) [out] main:7: error: Revealed type is '__main__.A.E@4' [case testFunctionalEnumInClassBody] from enum import Enum class A: E = Enum('E', 'a b') class B: E = Enum('E', 'a b') x = A.E.a y = B.E.a x = y [out] main:8: error: Incompatible types in assignment (expression has type "__main__.B.E", variable has type "__main__.A.E") [case testEnumWorkWithForward] from enum import Enum a: E = E.x class E(Enum): x = 1 y = 2 [out] [case testEnumWorkWithForward2] from enum import Enum b: F F = Enum('F', {'x': 1, 'y': 2}) def fn(x: F) -> None: pass fn(b) [out] [case testFunctionalEnum_python2] from enum import Enum Eu = Enum(u'Eu', u'a b') Eb = Enum(b'Eb', b'a b') Gu = Enum(u'Gu', {u'a': 1}) Gb = Enum(b'Gb', {b'a': 1}) Hu = Enum(u'Hu', [u'a']) Hb = Enum(b'Hb', [b'a']) Eu.a Eb.a Gu.a Gb.a Hu.a Hb.a [out] [case testEnumIncremental] import m reveal_type(m.E.a) reveal_type(m.F.b) [file m.py] from enum import Enum class E(Enum): a = 1 b = 2 F = Enum('F', 'a b') [rechecked] [stale] [out1] main:2: error: Revealed type is 'm.E' main:3: error: Revealed type is 'm.F' [out2] main:2: error: Revealed type is 'm.E' main:3: error: Revealed type is 'm.F' mypy-0.560/test-data/unit/check-expressions.test0000644€tŠÔÚ€2›s®0000013314113215007205026075 0ustar jukkaDROPBOX\Domain Users00000000000000-- Test cases for simple expressions. -- -- See also: -- * check-functions.test contains test cases for calls. -- * check-varargs.test contains test cases for *args. -- * check-dynamic.test contains test cases related to 'Any' type. -- * check-generics.test contains test cases for generic values. -- None expression -- --------------- [case testNoneAsRvalue] import typing a = None # type: A class A: pass [out] [case testNoneAsArgument] import typing def f(x: 'A', y: 'B') -> None: pass f(None, None) class A: pass class B(A): pass [out] -- Simple expressions -- ------------------ [case testIntLiteral] a = 0 b = None # type: A b = 1 # E: Incompatible types in assignment (expression has type "int", variable has type "A") a = 1 class A: pass [case testStrLiteral] a = '' b = None # type: A b = 'x' # E: Incompatible types in assignment (expression has type "str", variable has type "A") a = 'x' a = r"x" a = """foo""" class A: pass [case testFloatLiteral] a = 0.0 b = None # type: A b = 1.1 # E: Incompatible types in assignment (expression has type "float", variable has type "A") a = 1.1 class A: pass [file builtins.py] class object: def __init__(self): pass class type: pass class function: pass class float: pass class str: pass [case testComplexLiteral] a = 0.0j b = None # type: A b = 1.1j # E: Incompatible types in assignment (expression has type "complex", variable has type "A") a = 1.1j class A: pass [file builtins.py] class object: def __init__(self): pass class type: pass class function: pass class complex: pass class str: pass [case testBytesLiteral] b, a = None, None # type: (bytes, A) b = b'foo' b = br"foo" b = b'''foo''' a = b'foo' # E: Incompatible types in assignment (expression has type "bytes", variable has type "A") class A: pass [file builtins.py] class object: def __init__(self): pass class type: pass class tuple: pass class function: pass class bytes: pass class str: pass [case testUnicodeLiteralInPython3] s = None # type: str s = u'foo' b = None # type: bytes b = u'foo' # E: Incompatible types in assignment (expression has type "str", variable has type "bytes") [builtins fixtures/primitives.pyi] -- Binary operators -- ---------------- [case testAdd] a, b, c = None, None, None # type: (A, B, C) c = a + c # Fail a = a + b # Fail c = b + a # Fail c = a + b class A: def __add__(self, x: 'B') -> 'C': pass class B: pass class C: pass [out] main:3: error: Unsupported operand types for + ("A" and "C") main:4: error: Incompatible types in assignment (expression has type "C", variable has type "A") main:5: error: Unsupported left operand type for + ("B") [case testAdd] a, b, c = None, None, None # type: (A, B, C) c = a + c # Fail a = a + b # Fail c = b + a # Fail c = a + b class A: def __add__(self, x: 'B') -> 'C': pass class B: pass class C: pass [out] main:3: error: Unsupported operand types for + ("A" and "C") main:4: error: Incompatible types in assignment (expression has type "C", variable has type "A") main:5: error: Unsupported left operand type for + ("B") [case testSub] a, b, c = None, None, None # type: (A, B, C) c = a - c # Fail a = a - b # Fail c = b - a # Fail c = a - b class A: def __sub__(self, x: 'B') -> 'C': pass class B: pass class C: pass [out] main:3: error: Unsupported operand types for - ("A" and "C") main:4: error: Incompatible types in assignment (expression has type "C", variable has type "A") main:5: error: Unsupported left operand type for - ("B") [case testMul] a, b, c = None, None, None # type: (A, B, C) c = a * c # Fail a = a * b # Fail c = b * a # Fail c = a * b class A: def __mul__(self, x: 'B') -> 'C': pass class B: pass class C: pass [out] main:3: error: Unsupported operand types for * ("A" and "C") main:4: error: Incompatible types in assignment (expression has type "C", variable has type "A") main:5: error: Unsupported left operand type for * ("B") [case testMatMul] a, b, c = None, None, None # type: (A, B, C) c = a @ c # E: Unsupported operand types for @ ("A" and "C") a = a @ b # E: Incompatible types in assignment (expression has type "C", variable has type "A") c = b @ a # E: Unsupported left operand type for @ ("B") c = a @ b class A: def __matmul__(self, x: 'B') -> 'C': pass class B: pass class C: pass [case testDiv] a, b, c = None, None, None # type: (A, B, C) c = a / c # Fail a = a / b # Fail c = b / a # Fail c = a / b class A: def __truediv__(self, x: 'B') -> 'C': pass class B: pass class C: pass [out] main:3: error: Unsupported operand types for / ("A" and "C") main:4: error: Incompatible types in assignment (expression has type "C", variable has type "A") main:5: error: Unsupported left operand type for / ("B") [case testIntDiv] a, b, c = None, None, None # type: (A, B, C) c = a // c # Fail a = a // b # Fail c = b // a # Fail c = a // b class A: def __floordiv__(self, x: 'B') -> 'C': pass class B: pass class C: pass [out] main:3: error: Unsupported operand types for // ("A" and "C") main:4: error: Incompatible types in assignment (expression has type "C", variable has type "A") main:5: error: Unsupported left operand type for // ("B") [case testMod] a, b, c = None, None, None # type: (A, B, C) c = a % c # Fail a = a % b # Fail c = b % a # Fail c = a % b class A: def __mod__(self, x: 'B') -> 'C': pass class B: pass class C: pass [out] main:3: error: Unsupported operand types for % ("A" and "C") main:4: error: Incompatible types in assignment (expression has type "C", variable has type "A") main:5: error: Unsupported left operand type for % ("B") [case testPow] a, b, c = None, None, None # type: (A, B, C) c = a ** c # Fail a = a ** b # Fail c = b ** a # Fail c = a ** b class A: def __pow__(self, x: 'B') -> 'C': pass class B: pass class C: pass [out] main:3: error: Unsupported operand types for ** ("A" and "C") main:4: error: Incompatible types in assignment (expression has type "C", variable has type "A") main:5: error: Unsupported left operand type for ** ("B") [case testMiscBinaryOperators] a, b = None, None # type: (A, B) b = a & a # Fail b = a | b # Fail b = a ^ a # Fail b = a << b # Fail b = a >> a # Fail b = a & b b = a | a b = a ^ b b = a << a b = a >> b class A: def __and__(self, x: 'B') -> 'B': pass def __or__(self, x: 'A') -> 'B': pass def __xor__(self, x: 'B') -> 'B': pass def __lshift__(self, x: 'A') -> 'B': pass def __rshift__(self, x: 'B') -> 'B': pass class B: pass [out] main:3: error: Unsupported operand types for & ("A" and "A") main:4: error: Unsupported operand types for | ("A" and "B") main:5: error: Unsupported operand types for ^ ("A" and "A") main:6: error: Unsupported operand types for << ("A" and "B") main:7: error: Unsupported operand types for >> ("A" and "A") [case testBooleanAndOr] a, b = None, None # type: (A, bool) b = b and b b = b or b b = b and a # E: Incompatible types in assignment (expression has type "Union[bool, A]", variable has type "bool") b = a and b # E: Incompatible types in assignment (expression has type "Union[A, bool]", variable has type "bool") b = b or a # E: Incompatible types in assignment (expression has type "Union[bool, A]", variable has type "bool") b = a or b # E: Incompatible types in assignment (expression has type "Union[A, bool]", variable has type "bool") class A: pass [builtins fixtures/bool.pyi] [case testRestrictedTypeAnd] b = None # type: bool i = None # type: str j = not b and i if j: reveal_type(j) # E: Revealed type is 'builtins.str' [builtins fixtures/bool.pyi] [case testRestrictedTypeOr] b = None # type: bool i = None # type: str j = b or i if not j: reveal_type(j) # E: Revealed type is 'builtins.str' [builtins fixtures/bool.pyi] [case testAndOr] s = "" b = bool() reveal_type(s and b or b) # E: Revealed type is 'builtins.bool' [builtins fixtures/bool.pyi] [case testNonBooleanOr] c, d, b = None, None, None # type: (C, D, bool) c = c or c c = c or d c = d or c b = c or c # E: Incompatible types in assignment (expression has type "C", variable has type "bool") d = c or d # E: Incompatible types in assignment (expression has type "C", variable has type "D") d = d or c # E: Incompatible types in assignment (expression has type "C", variable has type "D") class C: pass class D(C): pass [builtins fixtures/bool.pyi] [case testInOperator] from typing import Iterator, Iterable, Any a, b, c, d, e = None, None, None, None, None # type: (A, B, bool, D, Any) c = c in a # Fail a = b in a # Fail c = a in b # Fail c = b in d # Fail c = b in a c = a in d c = e in d c = a in e class A: def __contains__(self, x: 'B') -> bool: pass class B: pass class D(Iterable[A]): def __iter__(self) -> Iterator[A]: pass [builtins fixtures/bool.pyi] [out] main:3: error: Unsupported operand types for in ("bool" and "A") main:4: error: Incompatible types in assignment (expression has type "bool", variable has type "A") main:5: error: Unsupported right operand type for in ("B") main:6: error: Unsupported operand types for in ("B" and "D") [case testNotInOperator] from typing import Iterator, Iterable, Any a, b, c, d, e = None, None, None, None, None # type: (A, B, bool, D, Any) c = c not in a # Fail a = b not in a # Fail c = a not in b # Fail c = b not in d # Fail c = b not in a c = a not in d c = e in d c = a in e class A: def __contains__(self, x: 'B') -> bool: pass class B: pass class D(Iterable[A]): def __iter__(self) -> Iterator[A]: pass [builtins fixtures/bool.pyi] [out] main:3: error: Unsupported operand types for in ("bool" and "A") main:4: error: Incompatible types in assignment (expression has type "bool", variable has type "A") main:5: error: Unsupported right operand type for in ("B") main:6: error: Unsupported operand types for in ("B" and "D") [case testNonBooleanContainsReturnValue] a, b = None, None # type: (A, bool) b = a not in a b = a in a class A: def __contains__(self, x: 'A') -> object: pass [builtins fixtures/bool.pyi] [out] main:4: error: Incompatible types in assignment (expression has type "object", variable has type "bool") [case testEq] a, b = None, None # type: (A, bool) a = a == b # Fail a = a != b # Fail b = a == b b = a != b class A: def __eq__(self, o: object) -> bool: pass def __ne__(self, o: object) -> bool: pass [builtins fixtures/bool.pyi] [out] main:3: error: Incompatible types in assignment (expression has type "bool", variable has type "A") main:4: error: Incompatible types in assignment (expression has type "bool", variable has type "A") [case testLtAndGt] a, b, bo = None, None, None # type: (A, B, bool) a = a < b # Fail a = a > b # Fail bo = a < b bo = a > b class A: def __lt__(self, o: 'B') -> bool: pass def __gt__(self, o: 'B') -> bool: pass class B: def __lt__(self, o: 'B') -> bool: pass def __gt__(self, o: 'B') -> bool: pass [builtins fixtures/bool.pyi] [out] main:3: error: Incompatible types in assignment (expression has type "bool", variable has type "A") main:4: error: Incompatible types in assignment (expression has type "bool", variable has type "A") [case testCmp_python2] a, b, c, bo = None, None, None, None # type: (A, B, C, bool) bo = a == a # E: Unsupported operand types for == ("A" and "A") bo = a != a # E: Argument 1 to "__cmp__" of "A" has incompatible type "A"; expected "B" bo = a < b bo = a > b bo = b <= b bo = b <= c bo = b >= c # E: Argument 1 to "__cmp__" of "B" has incompatible type "C"; expected "B" bo = a >= b bo = c >= b bo = c <= b # E: Argument 1 to "__cmp__" of "C" has incompatible type "B"; expected "A" bo = a == c bo = b == c # E: Unsupported operand types for == ("C" and "B") class A: def __cmp__(self, o): # type: ('B') -> bool pass def __eq__(self, o): # type: ('int') -> bool pass class B: def __cmp__(self, o): # type: ('B') -> bool pass def __le__(self, o): # type: ('C') -> bool pass class C: def __cmp__(self, o): # type: ('A') -> bool pass def __eq__(self, o): # type: ('int') -> bool pass [builtins_py2 fixtures/bool.pyi] [case cmpIgnoredPy3] a, b, bo = None, None, None # type: (A, B, bool) bo = a <= b # E: Unsupported left operand type for <= ("A") class A: def __cmp__(self, o: 'B') -> bool: pass class B: pass [builtins fixtures/bool.pyi] [case testLeAndGe] a, b, bo = None, None, None # type: (A, B, bool) a = a <= b # Fail a = a >= b # Fail bo = a <= b bo = a >= b class A: def __le__(self, o: 'B') -> bool: pass def __ge__(self, o: 'B') -> bool: pass class B: def __le__(self, o: 'B') -> bool: pass def __ge__(self, o: 'B') -> bool: pass [builtins fixtures/bool.pyi] [out] main:3: error: Incompatible types in assignment (expression has type "bool", variable has type "A") main:4: error: Incompatible types in assignment (expression has type "bool", variable has type "A") [case testChainedComp] a, b, bo = None, None, None # type: (A, B, bool) a < a < b < b # Fail a < b < b < b a < a > a < b # Fail class A: def __lt__(self, o: 'B') -> bool: pass def __gt__(self, o: 'B') -> bool: pass class B: def __lt__(self, o: 'B') -> bool: pass def __gt__(self, o: 'B') -> bool: pass [builtins fixtures/bool.pyi] [out] main:3: error: Unsupported operand types for > ("A" and "A") main:5: error: Unsupported operand types for > ("A" and "A") main:5: error: Unsupported operand types for < ("A" and "A") [case testChainedCompBoolRes] a, b, bo = None, None, None # type: (A, B, bool) bo = a < b < b a = a < b < b # Fail class A: def __lt__(self, o: 'B') -> bool: pass def __gt__(self, o: 'B') -> bool: pass class B: def __lt__(self, o: 'B') -> bool: pass def __gt__(self, o: 'B') -> bool: pass [builtins fixtures/bool.pyi] [out] main:4: error: Incompatible types in assignment (expression has type "bool", variable has type "A") [case testChainedCompResTyp] x, y = None, None # type: (X, Y) a, b, p, bo = None, None, None, None # type: (A, B, P, bool) b = y == y == y bo = y == y == y # Fail a = x < y a = x < y == y # Fail p = x < y == y class P: pass class A(P): pass class B(P): pass class X: def __lt__(self, o: 'Y') -> A: pass def __gt__(self, o: 'Y') -> A: pass class Y: def __lt__(self, o: 'Y') -> A: pass def __gt__(self, o: 'Y') -> A: pass def __eq__(self, o: 'Y') -> B: pass [builtins fixtures/bool.pyi] [out] main:5: error: Incompatible types in assignment (expression has type "B", variable has type "bool") main:7: error: Incompatible types in assignment (expression has type "P", variable has type "A") [case testIs] a, b = None, None # type: (A, bool) a = a is b # Fail b = a is b b = b is a b = a is None class A: pass [builtins fixtures/bool.pyi] [out] main:3: error: Incompatible types in assignment (expression has type "bool", variable has type "A") [case testIsNot] a, b = None, None # type: (A, bool) a = a is not b # Fail b = a is not b b = b is not a b = a is not None class A: pass [builtins fixtures/bool.pyi] [out] main:3: error: Incompatible types in assignment (expression has type "bool", variable has type "A") [case testIsRightOperand] 1 is 1() [builtins fixtures/bool.pyi] [out] main:2: error: "int" not callable [case testReverseBinaryOperator] class A: def __add__(self, x: int) -> int: pass class B: def __radd__(self, x: A) -> str: pass s = None # type: str n = None # type: int n = A() + 1 s = A() + B() n = A() + B() # E: Incompatible types in assignment (expression has type "str", variable has type "int") [case testReverseBinaryOperator2] class A: def __add__(self, x: 'A') -> object: pass class B: def __radd__(self, x: A) -> str: pass s = None # type: str n = None # type: int s = A() + B() n = A() + B() # E: Incompatible types in assignment (expression has type "str", variable has type "int") [case testReverseBinaryOperator3] class N: def __add__(self, x: 'N') -> object: pass class A: def __add__(self, x: N) -> int: pass class B: def __radd__(self, x: N) -> str: pass s = None # type: str s = A() + B() # E: Unsupported operand types for + ("A" and "B") [case testBinaryOperatorWithAnyRightOperand] from typing import Any, cast class A: pass A() + cast(Any, 1) [case testReverseComparisonOperator] class C: def __gt__(self, x: 'A') -> object: pass class A: def __lt__(self, x: C) -> int: pass class B: def __gt__(self, x: A) -> str: pass s = None # type: str n = None # type: int n = A() < C() s = A() < B() n = A() < B() # E: Incompatible types in assignment (expression has type "str", variable has type "int") s = object() < B() # E: Unsupported operand types for > ("B" and "object") [case testErrorContextAndBinaryOperators] import typing class A: def __getitem__(self, i: str) -> int: pass def f() -> None: A()[1] # Error class B: A()[1] # Error A()[1] # Error [out] main:5: error: Invalid index type "int" for "A"; expected type "str" main:7: error: Invalid index type "int" for "A"; expected type "str" main:8: error: Invalid index type "int" for "A"; expected type "str" [case testErrorContextAndBinaryOperators2] import m [file m.py] import typing class A: def __getitem__(self, i: str) -> int: pass def f() -> None: A()[1] # Error class B: A()[1] # Error A()[1] # Error [out] tmp/m.py:5: error: Invalid index type "int" for "A"; expected type "str" tmp/m.py:7: error: Invalid index type "int" for "A"; expected type "str" tmp/m.py:8: error: Invalid index type "int" for "A"; expected type "str" -- Unary operators -- --------------- [case testUnaryMinus] a, b = None, None # type: (A, B) a = -a # Fail b = -b # Fail b = -a class A: def __neg__(self) -> 'B': pass class B: pass [out] main:3: error: Incompatible types in assignment (expression has type "B", variable has type "A") main:4: error: Unsupported operand type for unary - ("B") [case testUnaryPlus] a, b = None, None # type: (A, B) a = +a # Fail b = +b # Fail b = +a class A: def __pos__(self) -> 'B': pass class B: pass [out] main:3: error: Incompatible types in assignment (expression has type "B", variable has type "A") main:4: error: Unsupported operand type for unary + ("B") [case testUnaryNot] a, b = None, None # type: (A, bool) a = not b # Fail b = not a b = not b class A: pass [builtins fixtures/bool.pyi] [out] main:3: error: Incompatible types in assignment (expression has type "bool", variable has type "A") [case testUnaryBitwiseNeg] a, b = None, None # type: (A, B) a = ~a # Fail b = ~b # Fail b = ~a class A: def __invert__(self) -> 'B': pass class B: pass [out] main:3: error: Incompatible types in assignment (expression has type "B", variable has type "A") main:4: error: Unsupported operand type for ~ ("B") -- Indexing -- -------- [case testIndexing] a, b, c = None, None, None # type: (A, B, C) c = a[c] # Fail a = a[b] # Fail c = b[a] # Fail c = a[b] class A: def __getitem__(self, x: 'B') -> 'C': pass class B: pass class C: pass [out] main:3: error: Invalid index type "C" for "A"; expected type "B" main:4: error: Incompatible types in assignment (expression has type "C", variable has type "A") main:5: error: Value of type "B" is not indexable [case testIndexingAsLvalue] a, b, c = None, None, None # type: (A, B, C) a[c] = c # Fail a[b] = a # Fail b[a] = c # Fail a[b] = c class A: def __setitem__(self, x: 'B', y: 'C') -> None: pass class B: pass class C: pass [out] main:3: error: Invalid index type "C" for "A"; expected type "B" main:4: error: Incompatible types in assignment (expression has type "A", target has type "C") main:5: error: Unsupported target for indexed assignment [case testOverloadedIndexing] from foo import * [file foo.pyi] from typing import overload a, b, c = None, None, None # type: (A, B, C) a[b] a[c] a[1] # E: No overload variant of "__getitem__" of "A" matches argument types [builtins.int] i, s = None, None # type: (int, str) i = a[b] s = a[b] # E: Incompatible types in assignment (expression has type "int", variable has type "str") i = a[c] # E: Incompatible types in assignment (expression has type "str", variable has type "int") s = a[c] class A: @overload def __getitem__(self, x: 'B') -> int: pass @overload def __getitem__(self, x: 'C') -> str: pass class B: pass class C: pass [out] -- Cast expression -- --------------- [case testCastExpressions] from typing import cast, Any class A: pass class B: pass class C(A): pass a, b, c = None, None, None # type: (A, B, C) a = cast(A, a()) # E: "A" not callable a = cast(Any, a()) # E: "A" not callable b = cast(A, a) # E: Incompatible types in assignment (expression has type "A", variable has type "B") a = cast(A, b) a = cast(A, a) c = cast(C, a) a = cast(A, c) a = cast(Any, b) b = cast(Any, a) [out] [case testAnyCast] from typing import cast, Any a, b = None, None # type: (A, B) a = cast(Any, a()) # Fail a = cast(Any, b) b = cast(Any, a) class A: pass class B: pass [out] main:3: error: "A" not callable -- None return type -- ---------------- [case testNoneReturnTypeBasics] a, o = None, None # type: (A, object) a = f() # E: "f" does not return a value o = a() # E: Function does not return a value o = A().g(a) # E: "g" of "A" does not return a value A().g(f()) # E: "f" does not return a value x: A = f() # E: "f" does not return a value f() A().g(a) def f() -> None: pass class A: def g(self, x: object) -> None: pass def __call__(self) -> None: pass [case testNoneReturnTypeWithStatements] import typing if f(): # Fail pass elif f(): # Fail pass while f(): # Fail pass def g() -> object: return f() # Fail raise f() # Fail def f() -> None: pass [builtins fixtures/exception.pyi] [out] main:2: error: "f" does not return a value main:4: error: "f" does not return a value main:6: error: "f" does not return a value main:9: error: "f" does not return a value main:10: error: "f" does not return a value [case testNoneReturnTypeWithExpressions] from typing import cast a = None # type: A [f()] # E: "f" does not return a value f() + a # E: "f" does not return a value a + f() # E: "f" does not return a value f() == a # E: "f" does not return a value a != f() # E: "f" does not return a value cast(A, f()) f().foo # E: "f" does not return a value def f() -> None: pass class A: def __add__(self, x: 'A') -> 'A': pass [builtins fixtures/list.pyi] [case testNoneReturnTypeWithExpressions2] import typing a, b = None, None # type: (A, bool) f() in a # E: "f" does not return a value # E: Unsupported right operand type for in ("A") a < f() # E: "f" does not return a value f() <= a # E: "f" does not return a value a in f() # E: "f" does not return a value -f() # E: "f" does not return a value not f() # E: "f" does not return a value f() and b # E: "f" does not return a value b or f() # E: "f" does not return a value def f() -> None: pass class A: def __add__(self, x: 'A') -> 'A': pass [builtins fixtures/bool.pyi] -- Slicing -- ------- [case testGetSlice] a, b = None, None # type: (A, B) a = a[1:2] # E: Incompatible types in assignment (expression has type "B", variable has type "A") a = a[1:] # E: Incompatible types in assignment (expression has type "B", variable has type "A") a = a[:2] # E: Incompatible types in assignment (expression has type "B", variable has type "A") a = a[:] # E: Incompatible types in assignment (expression has type "B", variable has type "A") b = a[1:2] b = a[1:] b = a[:2] b = a[:] class A: def __getitem__(self, s: slice) -> 'B': pass class B: pass [builtins fixtures/slice.pyi] [case testSlicingWithInvalidBase] a = None # type: A a[1:2] # E: Invalid index type "slice" for "A"; expected type "int" a[:] # E: Invalid index type "slice" for "A"; expected type "int" class A: def __getitem__(self, n: int) -> 'A': pass [builtins fixtures/slice.pyi] [case testSlicingWithNonindexable] o = None # type: object o[1:2] # E: Value of type "object" is not indexable o[:] # E: Value of type "object" is not indexable [builtins fixtures/slice.pyi] [case testNonIntSliceBounds] from typing import Any a, o = None, None # type: (Any, object) a[o:1] # E: Slice index must be an integer or None a[1:o] # E: Slice index must be an integer or None a[o:] # E: Slice index must be an integer or None a[:o] # E: Slice index must be an integer or None [builtins fixtures/slice.pyi] [case testNoneSliceBounds] from typing import Any a = None # type: Any a[None:1] a[1:None] a[None:] a[:None] [builtins fixtures/slice.pyi] [case testNoneSliceBoundsWithStrictOptional] # flags: --strict-optional from typing import Any a = None # type: Any a[None:1] a[1:None] a[None:] a[:None] [builtins fixtures/slice.pyi] -- String interpolation -- -------------------- [case testStringInterpolationType] from typing import Tuple i, f, s, t = None, None, None, None # type: (int, float, str, Tuple[int]) '%d' % i '%f' % f '%s' % s '%d' % (f,) '%d' % (s,) # E: Incompatible types in string interpolation (expression has type "str", placeholder has type "Union[int, float]") '%d' % t '%d' % s # E: Incompatible types in string interpolation (expression has type "str", placeholder has type "Union[int, float]") '%f' % s # E: Incompatible types in string interpolation (expression has type "str", placeholder has type "Union[int, float]") [builtins fixtures/primitives.pyi] [case testStringInterpolationSAcceptsAnyType] from typing import Any i, o, s = None, None, None # type: (int, object, str) '%s %s %s' % (i, o, s) [builtins fixtures/primitives.pyi] [case testStringInterpolationCount] '%d %d' % 1 # E: Not enough arguments for format string '%d %d' % (1, 2) '%d %d' % (1, 2, 3) # E: Not all arguments converted during string formatting t = 1, 's' '%d %s' % t '%s %d' % t # E: Incompatible types in string interpolation (expression has type "str", placeholder has type "Union[int, float]") '%d' % t # E: Not all arguments converted during string formatting [builtins fixtures/primitives.pyi] [case testStringInterpolationWithAnyType] from typing import Any a = None # type: Any '%d %d' % a [builtins fixtures/primitives.pyi] [case testStringInterpolationInvalidPlaceholder] '%W' % 1 # E: Unsupported format character 'W' '%b' % 1 # E: Format character 'b' is only supported on bytes patterns [case testStringInterPolationPython2] # flags: --python-version 2.7 b'%b' % 1 # E: Format character 'b' is only supported in Python 3.5 and later b'%s' % 1 b'%a' % 1 # E: Format character 'a' is only supported in Python 3 [case testBytesInterpolationBefore35] # flags: --python-version 3.4 b'%b' % 1 # E: Unsupported left operand type for % ("bytes") [case testBytesInterpolation] b'%b' % 1 # E: Incompatible types in string interpolation (expression has type "int", placeholder has type "bytes") b'%b' % b'1' b'%a' % 3 [case testStringInterpolationWidth] '%2f' % 3.14 '%*f' % 3.14 # E: Not enough arguments for format string '%*f' % (4, 3.14) '%*f' % (1.1, 3.14) # E: * wants int [builtins fixtures/primitives.pyi] [case testStringInterpolationPrecision] '%.2f' % 3.14 '%.*f' % 3.14 # E: Not enough arguments for format string '%.*f' % (4, 3.14) '%.*f' % (1.1, 3.14) # E: * wants int [builtins fixtures/primitives.pyi] [case testStringInterpolationWidthAndPrecision] '%4.2f' % 3.14 '%4.*f' % 3.14 # E: Not enough arguments for format string '%*.2f' % 3.14 # E: Not enough arguments for format string '%*.*f' % 3.14 # E: Not enough arguments for format string '%*.*f' % (4, 2, 3.14) [builtins fixtures/primitives.pyi] [case testStringInterpolationFlagsAndLengthModifiers] '%04hd' % 1 '%-.4ld' % 1 '%+*Ld' % (1, 1) '% .*ld' % (1, 1) [builtins fixtures/primitives.pyi] [case testStringInterpolationDoublePercentage] '%% %d' % 1 '%3% %d' % 1 '%*%' % 1 '%*% %d' % 1 # E: Not enough arguments for format string [builtins fixtures/primitives.pyi] [case testStringInterpolationC] '%c' % 1 '%c' % 's' '%c' % '' # E: %c requires int or char '%c' % 'ab' # E: %c requires int or char [builtins fixtures/primitives.pyi] [case testStringInterpolationMappingTypes] '%(a)d %(b)s' % {'a': 1, 'b': 's'} '%(a)d %(b)s' % {'a': 's', 'b': 1} # E: Incompatible types in string interpolation (expression has type "str", placeholder with key 'a' has type "Union[int, float]") [builtins fixtures/primitives.pyi] [case testStringInterpolationMappingKeys] '%()d' % {'': 2} '%(a)d' % {'a': 1, 'b': 2, 'c': 3} '%(q)d' % {'a': 1, 'b': 2, 'c': 3} # E: Key 'q' not found in mapping '%(a)d %%' % {'a': 1} [builtins fixtures/dict.pyi] [case testStringInterpolationMappingDictTypes] from typing import Any, Dict a = None # type: Any ds, do, di = None, None, None # type: Dict[str, int], Dict[object, int], Dict[int, int] '%(a)' % 1 # E: Format requires a mapping (expression has type "int", expected type for mapping is "Dict[Any, Any]") '%()d' % a '%()d' % ds '%()d' % do [builtins fixtures/dict.pyi] [case testStringInterpolationMappingInvalidDictTypes-skip] from typing import Any, Dict di = None # type: Dict[int, int] '%()d' % di # E: Format requires a mapping (expression has type Dict[int, int], expected type for mapping is Dict[str, Any]) [builtins fixtures/dict.pyi] [case testStringInterpolationMappingInvalidSpecifiers] '%(a)d %d' % 1 # E: String interpolation mixes specifier with and without mapping keys '%(b)*d' % 1 # E: String interpolation contains both stars and mapping keys '%(b).*d' % 1 # E: String interpolation contains both stars and mapping keys [case testStringInterpolationMappingFlagsAndLengthModifiers] '%(a)1d' % {'a': 1} '%(a).1d' % {'a': 1} '%(a)#1.1ld' % {'a': 1} [builtins fixtures/dict.pyi] [case testStringInterpolationFloatPrecision] '%.f' % 1.2 '%.3f' % 1.2 '%.f' % 'x' '%.3f' % 'x' [builtins fixtures/primitives.pyi] [out] main:3: error: Incompatible types in string interpolation (expression has type "str", placeholder has type "Union[int, float]") main:4: error: Incompatible types in string interpolation (expression has type "str", placeholder has type "Union[int, float]") [case testStringInterpolationSpaceKey] '%( )s' % {' ': 'foo'} [case testByteByteInterpolation] def foo(a: bytes, b: bytes): b'%s:%s' % (a, b) foo(b'a', b'b') == b'a:b' [case testStringInterpolationStarArgs] x = (1, 2) "%d%d" % (*x,) [case testBytePercentInterpolationSupported] b'%s' % (b'xyz',) b'%(name)s' % {'name': 'jane'} b'%c' % (123) [case testUnicodeInterpolation_python2] u'%s' % (u'abc',) -- Lambdas -- ------- [case testTrivialLambda] from typing import Callable f = lambda: 1 # type: Callable[[], int] f = lambda: ''.x f = lambda: '' [out] main:3: error: "str" has no attribute "x" main:4: error: Incompatible types in assignment (expression has type "Callable[[], str]", variable has type "Callable[[], int]") main:4: error: Incompatible return value type (got "str", expected "int") [case testVoidLambda] import typing def void() -> None: pass x = lambda: void() # type: typing.Callable[[], None] [case testNoCrashOnLambdaGenerator] from typing import Iterator, Callable # These should not crash lambda: (yield) gen: Callable[[], Iterator[str]] gen = (lambda: (yield 1)) # E: Incompatible types in "yield" (actual type "int", expected type "str") def fun(cb: Callable[[], Iterator[str]]) -> None: pass fun(lambda: (yield from [1])) # E: Incompatible types in "yield from" (actual type "int", expected type "str") [builtins fixtures/list.pyi] [out] -- List comprehensions -- ------------------- [case testSimpleListComprehension] from typing import List a = None # type: List[A] a = [x for x in a] b = [x for x in a] # type: List[B] # E: List comprehension has incompatible type List[A]; expected List[B] class A: pass class B: pass [builtins fixtures/for.pyi] [case testSimpleListComprehensionNestedTuples] from typing import List, Tuple l = None # type: List[Tuple[A, Tuple[A, B]]] a = [a2 for a1, (a2, b1) in l] # type: List[A] b = [a2 for a1, (a2, b1) in l] # type: List[B] # E: List comprehension has incompatible type List[A]; expected List[B] class A: pass class B: pass [builtins fixtures/for.pyi] [case testSimpleListComprehensionNestedTuples2] from typing import List, Tuple l = None # type: List[Tuple[int, Tuple[int, str]]] a = [f(d) for d, (i, s) in l] b = [f(s) for d, (i, s) in l] # E: Argument 1 to "f" has incompatible type "str"; expected "int" def f(x: int): pass [builtins fixtures/for.pyi] [case testListComprehensionWithNonDirectMapping] from typing import List a = None # type: List[A] b = None # type: List[B] b = [f(x) for x in a] a = [f(x) for x in a] # E: List comprehension has incompatible type List[B]; expected List[A] ([f(x) for x in b]) # E: Argument 1 to "f" has incompatible type "B"; expected "A" class A: pass class B: pass def f(a: A) -> B: pass [builtins fixtures/for.pyi] [case testErrorInListComprehensionCondition] from typing import List a = None # type: List[A] a = [x for x in a if x()] # E: "A" not callable class A: pass [builtins fixtures/for.pyi] [case testTypeInferenceOfListComprehension] from typing import List a = None # type: List[A] o = [x for x in a] # type: List[object] class A: pass [builtins fixtures/for.pyi] [case testSimpleListComprehensionInClassBody] from typing import List class A: a = None # type: List[A] a = [x for x in a] b = [x for x in a] # type: List[B] # E: List comprehension has incompatible type List[A]; expected List[B] class B: pass [builtins fixtures/for.pyi] [out] -- Set comprehension -- ----------------- [case testSimpleSetComprehension] from typing import Set a = None # type: Set[A] a = {x for x in a} b = {x for x in a} # type: Set[B] # E: Set comprehension has incompatible type Set[A]; expected Set[B] class A: pass class B: pass [builtins fixtures/set.pyi] -- Dictionary comprehension -- ------------------------ [case testSimpleDictionaryComprehension] from typing import Dict, List, Tuple abd = None # type: Dict[A, B] abl = None # type: List[Tuple[A, B]] abd = {a: b for a, b in abl} x = {a: b for a, b in abl} # type: Dict[B, A] y = {a: b for a, b in abl} # type: A class A: pass class B: pass [builtins fixtures/dict.pyi] [out] main:5: error: Key expression in dictionary comprehension has incompatible type "A"; expected type "B" main:5: error: Value expression in dictionary comprehension has incompatible type "B"; expected type "A" main:6: error: Incompatible types in assignment (expression has type "Dict[A, B]", variable has type "A") [case testDictionaryComprehensionWithNonDirectMapping] from typing import Dict, List, Tuple abd = None # type: Dict[A, B] abl = None # type: List[Tuple[A, B]] abd = {a: f(b) for a, b in abl} class A: pass class B: pass class C: pass def f(b: A) -> C: pass [builtins fixtures/dict.pyi] [out] main:4: error: Value expression in dictionary comprehension has incompatible type "C"; expected type "B" main:4: error: Argument 1 to "f" has incompatible type "B"; expected "A" -- Generator expressions -- --------------------- [case testSimpleGeneratorExpression] from typing import Iterator # The implementation is mostly identical to list comprehensions, so only a few # test cases is ok. a = None # type: Iterator[int] a = (x for x in a) b = None # type: Iterator[str] b = (x for x in a) # E: Generator has incompatible item type "int"; expected "str" [builtins fixtures/for.pyi] [case testGeneratorIncompatibleErrorMessage] from typing import Callable, Iterator, List a = [] # type: List[Callable[[], str]] b = None # type: Iterator[Callable[[], int]] b = (x for x in a) # E: Generator has incompatible item type "Callable[[], str]"; expected "Callable[[], int]" [builtins fixtures/list.pyi] -- Conditional expressions -- ----------------------- [case testSimpleConditionalExpression] import typing y = '' x = 1 if y else 2 x = 3 x = '' # E: Incompatible types in assignment (expression has type "str", variable has type "int") [case testConditionalExpressionWithEmptyCondition] import typing def f() -> None: pass x = 1 if f() else 2 # E: "f" does not return a value [case testConditionalExpressionWithSubtyping] import typing class A: pass class B(A): pass x = B() if bool() else A() x = A() x = '' # E: Incompatible types in assignment (expression has type "str", variable has type "A") y = A() if bool() else B() y = A() y = '' # E: Incompatible types in assignment (expression has type "str", variable has type "A") [builtins fixtures/bool.pyi] [case testConditionalExpressionAndTypeContext] import typing x = [1] if bool() else [] x = [1] x = ['x'] # E: List item 0 has incompatible type "str"; expected "int" [builtins fixtures/list.pyi] -- Special cases -- ------------- [case testOperationsWithNonInstanceTypes] from typing import cast class A: def __add__(self, a: 'A') -> 'A': pass a = None # type: A None + a # Fail f + a # Fail a + f # Fail cast(A, f) def f() -> None: pass [out] main:5: error: Unsupported left operand type for + ("None") main:6: error: Unsupported left operand type for + ("Callable[[], None]") main:7: error: Unsupported operand types for + ("A" and "Callable[[], None]") [case testOperatorMethodWithInvalidArgCount] a = None # type: A a + a # Fail class A: def __add__(self) -> 'A': pass [out] main:3: error: Too many arguments for "__add__" of "A" [case testOperatorMethodAsVar] from typing import Any class A: def __init__(self, _add: Any) -> None: self.__add__ = _add a = None # type: A a + a [out] [case testOperatorMethodAsVar2] class A: def f(self, x: int) -> str: pass __add__ = f s = None # type: str s = A() + 1 A() + (A() + 1) [out] main:7: error: Argument 1 has incompatible type "str"; expected "int" [case testIndexedLvalueWithSubtypes] a, b, c = None, None, None # type: (A, B, C) a[c] = c a[b] = c a[c] = b class A: def __setitem__(self, x: 'B', y: 'B') -> None: pass class B: pass class C(B): pass [out] -- Ellipsis -- -------- [case testEllipsis] a = None # type: A a = ... # E: Incompatible types in assignment (expression has type "ellipsis", variable has type "A") b = ... c = ... b = c ....__class__ ....a # E: "ellipsis" has no attribute "a" class A: pass [file builtins.py] class object: def __init__(self): pass class ellipsis: def __init__(self): pass __class__ = object() class type: pass class function: pass class str: pass [out] -- Yield expression -- ---------------- [case testYieldExpression] def f(x: int) -> None: x = yield f('') x = 1 [builtins fixtures/for.pyi] [out] main:1: error: The return type of a generator function should be "Generator" or one of its supertypes main:2: error: Argument 1 to "f" has incompatible type "str"; expected "int" main:2: error: "f" does not return a value [case testYieldExpressionWithNone] from typing import Iterator def f(x: int) -> Iterator[None]: (yield) [builtins fixtures/for.pyi] [out] -- Yield from expression -- ---------------- [case testYieldFromIteratorHasNoValue] from typing import Iterator def f() -> Iterator[int]: yield 5 def g() -> Iterator[int]: a = yield from f() # E: Function does not return a value [case testYieldFromGeneratorHasValue] from typing import Iterator, Generator def f() -> Generator[int, None, str]: yield 5 return "ham" def g() -> Iterator[int]: a = "string" a = yield from f() [out] -- dict(...) -- --------- -- Note that the stub used in unit tests does not have all overload -- variants, but it should not matter. [case testDictWithKeywordArgsOnly] from typing import Dict, Any d1 = dict(a=1, b=2) # type: Dict[str, int] d2 = dict(a=1, b='') # type: Dict[str, int] # E: Dict entry 1 has incompatible type "str": "str"; expected "str": "int" d3 = dict(a=1) # type: Dict[int, int] # E: Dict entry 0 has incompatible type "str": "int"; expected "int": "int" d4 = dict(a=1, b=1) d4.xyz # E: "Dict[str, int]" has no attribute "xyz" d5 = dict(a=1, b='') # type: Dict[str, Any] [builtins fixtures/dict.pyi] [case testDictWithoutKeywordArgs] from typing import Dict d = dict() # E: Need type annotation for variable d2 = dict() # type: Dict[int, str] dict(undefined) # E: Name 'undefined' is not defined [builtins fixtures/dict.pyi] [case testDictFromList] from typing import Dict d = dict([(1, 'x'), (2, 'y')]) d() # E: "Dict[int, str]" not callable d2 = dict([(1, 'x')]) # type: Dict[str, str] # E: List item 0 has incompatible type "Tuple[int, str]"; expected "Tuple[str, str]" [builtins fixtures/dict.pyi] [case testDictFromIterableAndKeywordArg] from typing import Dict it = [('x', 1)] d = dict(it, x=1) d() # E: "Dict[str, int]" not callable d2 = dict(it, x='') # E: Cannot infer type argument 2 of "dict" d2() # E: "Dict[Any, Any]" not callable d3 = dict(it, x='') # type: Dict[str, int] # E: Argument 2 to "dict" has incompatible type "str"; expected "int" [builtins fixtures/dict.pyi] [case testDictFromIterableAndKeywordArg2] it = [(1, 'x')] dict(it, x='y') # E: Keyword argument only valid with "str" key type in call to "dict" [builtins fixtures/dict.pyi] [case testDictFromIterableAndKeywordArg3] d = dict([], x=1) d() # E: "Dict[str, int]" not callable [builtins fixtures/dict.pyi] [case testDictFromIterableAndStarStarArgs] from typing import Dict it = [('x', 1)] kw = {'x': 1} d = dict(it, **kw) d() # E: "Dict[str, int]" not callable kw2 = {'x': ''} d2 = dict(it, **kw2) # E: Cannot infer type argument 2 of "dict" d2() # E: "Dict[Any, Any]" not callable d3 = dict(it, **kw2) # type: Dict[str, int] # E: Argument 2 to "dict" has incompatible type "**Dict[str, str]"; expected "int" [builtins fixtures/dict.pyi] [case testDictFromIterableAndStarStarArgs2] it = [(1, 'x')] kw = {'x': 'y'} d = dict(it, **kw) # E: Keyword argument only valid with "str" key type in call to "dict" d() # E: "Dict[int, str]" not callable [builtins fixtures/dict.pyi] [case testUserDefinedClassNamedDict] from typing import Generic, TypeVar T = TypeVar('T') S = TypeVar('S') class dict(Generic[T, S]): def __init__(self, x: T, **kwargs: T) -> None: pass dict(1, y=1) [builtins fixtures/dict.pyi] [case testSpecialSignatureForSubclassOfDict] from typing import TypeVar, Dict, Generic T = TypeVar('T') S = TypeVar('S') class D1(dict): pass # Implicit base class Dict[Any, Any] D1([(1, 2)], x=1) class D2(Dict[T, S], Generic[T, S]): pass da = D2([('x', 2)], x=1) da() # E: "D2[str, int]" not callable D2([(1, 2)], x=1) # E: Keyword argument only valid with "str" key type in call to "dict" db = D2(x=1) db() # E: "D2[str, int]" not callable [builtins fixtures/dict.pyi] [case testSpecialSignatureForSubclassOfDict2] from typing import TypeVar, Dict, Generic T = TypeVar('T') class D(Dict[str, T], Generic[T]): pass D([('x', 1)], x=1) [builtins fixtures/dict.pyi] [case testOverridingSpecialSignatureInSubclassOfDict] from typing import TypeVar, Dict, Generic T = TypeVar('T') S = TypeVar('S') class D(Dict[T, S], Generic[T, S]): def __init__(self, x: S, y: T) -> None: pass d = D(1, y='') d() # E: "D[str, int]" not callable [builtins fixtures/dict.pyi] [case testRevealType] reveal_type(1) # E: Revealed type is 'builtins.int' [case testUndefinedRevealType] reveal_type(x) [out] main:1: error: Revealed type is 'Any' main:1: error: Name 'x' is not defined [case testUserDefinedRevealType] def reveal_type(x: int) -> None: pass reveal_type("foo") # E: Argument 1 to "reveal_type" has incompatible type "str"; expected "int" [case testRevealTypeVar] reveal_type = 1 1 + "foo" # E: Unsupported operand types for + ("int" and "str") [case testRevealForward] def f() -> None: reveal_type(x) x = 1 + 1 [out] main:2: error: Revealed type is 'builtins.int' [case testRevealUncheckedFunction] def f(): x = 42 reveal_type(x) [out] main:3: error: Revealed type is 'Any' main:3: note: 'reveal_type' always outputs 'Any' in unchecked functions [case testRevealCheckUntypedDefs] # flags: --check-untyped-defs def f(): x = 42 reveal_type(x) [out] main:4: error: Revealed type is 'builtins.int' [case testRevealTypedDef] def f() -> None: x = 42 reveal_type(x) [out] main:3: error: Revealed type is 'builtins.int' [case testEqNone] None == None [builtins fixtures/ops.pyi] [case testLtNone] None < None # E: Unsupported left operand type for < ("None") [builtins fixtures/ops.pyi] [case testDictWithStarExpr] b = {'z': 26, *a} # E: invalid syntax [builtins fixtures/dict.pyi] [case testDictWithStarStarExpr] from typing import Dict a = {'a': 1} b = {'z': 26, **a} c = {**b} d = {**a, **b, 'c': 3} e = {1: 'a', **a} # E: Argument 1 to "update" of "dict" has incompatible type "Dict[str, int]"; expected "Mapping[int, str]" f = {**b} # type: Dict[int, int] # E: List item 0 has incompatible type "Dict[str, int]"; expected "Mapping[int, int]" [builtins fixtures/dict.pyi] [case testDictIncompatibleTypeErrorMessage] from typing import Dict, Callable def things() -> int: return 42 stuff: Dict[int, Callable[[], str]] = { # E: Dict entry 0 has incompatible type "int": "Callable[[], int]"; expected "int": "Callable[[], str]" 1: things } [builtins fixtures/dict.pyi] [case testDictIncompatibleKeyVerbosity] from typing import Dict import mod class A: ... class B(A): ... d: Dict[A, B] = {A(): mod.B()} # E: Dict entry 0 has incompatible type "A": "mod.B"; expected "A": "__main__.B" [file mod.py] class B: ... [builtins fixtures/dict.pyi] [case testDictIncompatibleValueVerbosity] from typing import Dict import mod class A: ... class B(A): ... d: Dict[B, A] = {mod.B(): A()} # E: Dict entry 0 has incompatible type "mod.B": "A"; expected "__main__.B": "A" [file mod.py] class B: ... [builtins fixtures/dict.pyi] -- Type checker default plugin -- --------------------------- [case testIntPow] a = 1 b = a + 2 reveal_type(a**0) # E: Revealed type is 'builtins.int' reveal_type(a**1) # E: Revealed type is 'builtins.int' reveal_type(a**2) # E: Revealed type is 'builtins.int' reveal_type(a**-0) # E: Revealed type is 'builtins.int' reveal_type(a**-1) # E: Revealed type is 'builtins.float' reveal_type(a**(-2)) # E: Revealed type is 'builtins.float' reveal_type(a**b) # E: Revealed type is 'Any' reveal_type(a.__pow__(2)) # E: Revealed type is 'builtins.int' reveal_type(a.__pow__(a)) # E: Revealed type is 'Any' a.__pow__() # E: Too few arguments for "__pow__" of "int" [builtins fixtures/ops.pyi] mypy-0.560/test-data/unit/check-fastparse.test0000644€tŠÔÚ€2›s®0000003041213215007205025500 0ustar jukkaDROPBOX\Domain Users00000000000000[case testFastParseSyntaxError] 1 + # E: invalid syntax [case testFastParseTypeCommentSyntaxError] x = None # type: a : b # E: syntax error in type comment [case testFastParseInvalidTypeComment] x = None # type: a + b # E: invalid type comment or annotation -- Function type comments are attributed to the function def line. -- This happens in both parsers. [case testFastParseFunctionAnnotationSyntaxError] def f(): # E: syntax error in type comment # N: Suggestion: wrap argument types in parentheses # type: None -> None pass [case testFastParseFunctionAnnotationSyntaxErrorSpaces] def f(): # E: syntax error in type comment # N: Suggestion: wrap argument types in parentheses # type: None -> None pass [case testFastParseInvalidFunctionAnnotation] def f(x): # E: invalid type comment or annotation # type: (a + b) -> None pass [case testFastParseInvalidTypes2] # flags: --py2 # All of these should not crash from typing import Callable, Tuple, Iterable x = None # type: Tuple[int, str].x # E: invalid type comment or annotation x = None # type: Iterable[x].x # E: invalid type comment or annotation x = None # type: Tuple[x][x] # E: invalid type comment or annotation x = None # type: Iterable[x][x] # E: invalid type comment or annotation x = None # type: Callable[..., int][x] # E: invalid type comment or annotation x = None # type: Callable[..., int].x # E: invalid type comment or annotation x = None # type: Tuple[1] # E: invalid type comment or annotation def f1(x): # E: invalid type comment or annotation # type: (Tuple[int, str].x) -> None pass def f2(x): # E: invalid type comment or annotation # type: (Iterable[x].x) -> None pass def f3(x): # E: invalid type comment or annotation # type: (Tuple[x][x]) -> None pass def f4(x): # E: invalid type comment or annotation # type: (Iterable[x][x]) -> None pass def f5(x): # E: invalid type comment or annotation # type: (Callable[..., int][x]) -> None pass def f6(x): # E: invalid type comment or annotation # type: (Callable[..., int].x) -> None pass def f7(x): # E: invalid type comment or annotation # type: (Tuple[1]) -> None pass [case testFastParseInvalidTypes3] # flags: --python-version 3.6 # All of these should not crash from typing import Callable, Tuple, Iterable x: Tuple[int, str].x # E: invalid type comment or annotation x: Iterable[x].x # E: invalid type comment or annotation x: Tuple[x][x] # E: invalid type comment or annotation x: Iterable[x][x] # E: invalid type comment or annotation x: Callable[..., int][x] # E: invalid type comment or annotation x: Callable[..., int].x # E: invalid type comment or annotation x: Tuple[1] # E: invalid type comment or annotation x = None # type: Tuple[int, str].x # E: invalid type comment or annotation x = None # type: Iterable[x].x # E: invalid type comment or annotation x = None # type: Tuple[x][x] # E: invalid type comment or annotation x = None # type: Iterable[x][x] # E: invalid type comment or annotation x = None # type: Callable[..., int][x] # E: invalid type comment or annotation x = None # type: Callable[..., int].x # E: invalid type comment or annotation x = None # type: Tuple[1] # E: invalid type comment or annotation def f1(x: Tuple[int, str].x) -> None: pass # E: invalid type comment or annotation def f2(x: Iterable[x].x) -> None: pass # E: invalid type comment or annotation def f3(x: Tuple[x][x]) -> None: pass # E: invalid type comment or annotation def f4(x: Iterable[x][x]) -> None: pass # E: invalid type comment or annotation def f5(x: Callable[..., int][x]) -> None: pass # E: invalid type comment or annotation def f6(x: Callable[..., int].x) -> None: pass # E: invalid type comment or annotation def f7(x: Tuple[1]) -> None: pass # E: invalid type comment or annotation [case testFastParseProperty] class C: @property def x(self) -> str: pass @x.setter def x(self, value: str) -> None: pass [builtins fixtures/property.pyi] [case testFastParseConditionalProperty] class C: if bool(): @property def x(self) -> str: pass @x.setter def x(self, value: str) -> None: pass [builtins fixtures/property.pyi] [case testFastParsePerArgumentAnnotations] class A: pass class B: pass class C: pass class D: pass class E: pass class F: pass def f(a, # type: A b = None, # type: B *args, # type: C d = None, # type: D e, # type: E **kwargs # type: F ): reveal_type(a) # E: Revealed type is '__main__.A' reveal_type(b) # E: Revealed type is 'Union[__main__.B, builtins.None]' reveal_type(args) # E: Revealed type is 'builtins.tuple[__main__.C]' reveal_type(d) # E: Revealed type is 'Union[__main__.D, builtins.None]' reveal_type(e) # E: Revealed type is '__main__.E' reveal_type(kwargs) # E: Revealed type is 'builtins.dict[builtins.str, __main__.F]' [builtins fixtures/dict.pyi] [out] [case testFastParsePerArgumentAnnotationsWithReturn] class A: pass class B: pass class C: pass class D: pass class E: pass class F: pass def f(a, # type: A b = None, # type: B *args, # type: C d = None, # type: D e, # type: E **kwargs # type: F ): # type: (...) -> int reveal_type(a) # E: Revealed type is '__main__.A' reveal_type(b) # E: Revealed type is 'Union[__main__.B, builtins.None]' reveal_type(args) # E: Revealed type is 'builtins.tuple[__main__.C]' reveal_type(d) # E: Revealed type is 'Union[__main__.D, builtins.None]' reveal_type(e) # E: Revealed type is '__main__.E' reveal_type(kwargs) # E: Revealed type is 'builtins.dict[builtins.str, __main__.F]' return "not an int" # E: Incompatible return value type (got "str", expected "int") [builtins fixtures/dict.pyi] [out] [case testFastParsePerArgumentAnnotationsWithAnnotatedBareStar] def f(*, # type: int # E: bare * has associated type comment x # type: str ): # type: (...) -> int pass [builtins fixtures/dict.pyi] [out] [case testFastParsePerArgumentAnnotationsWithReturnAndBareStar] def f(*, x # type: str ): # type: (...) -> int reveal_type(x) # E: Revealed type is 'builtins.str' return "not an int" # E: Incompatible return value type (got "str", expected "int") [builtins fixtures/dict.pyi] [out] [case testFastParsePerArgumentAnnotations_python2] class A: pass class B: pass class C: pass class D: pass def f(a, # type: A b = None, # type: B *args # type: C # kwargs not tested due to lack of 2.7 dict fixtures ): reveal_type(a) # E: Revealed type is '__main__.A' reveal_type(b) # E: Revealed type is 'Union[__main__.B, builtins.None]' reveal_type(args) # E: Revealed type is 'builtins.tuple[__main__.C]' [builtins fixtures/dict.pyi] [out] [case testFastParsePerArgumentAnnotationsWithReturn_python2] class A: pass class B: pass class C: pass class D: pass def f(a, # type: A b = None, # type: B *args # type: C # kwargs not tested due to lack of 2.7 dict fixtures ): # type: (...) -> int reveal_type(a) # E: Revealed type is '__main__.A' reveal_type(b) # E: Revealed type is 'Union[__main__.B, builtins.None]' reveal_type(args) # E: Revealed type is 'builtins.tuple[__main__.C]' return "not an int" # E: Incompatible return value type (got "str", expected "int") [builtins fixtures/dict.pyi] [out] [case testFasterParseTooManyArgumentsAnnotation] def f(): # E: Type signature has too many arguments # type: (int) -> None pass [case testFasterParseTooFewArgumentsAnnotation] def f(x): # E: Type signature has too few arguments # type: () -> None pass [case testFasterParseTypeCommentError_python2] from typing import Tuple def f(a): # type: (Tuple(int, int)) -> int pass [out] main:3: error: invalid type comment or annotation main:3: note: Suggestion: use Tuple[...] instead of Tuple(...) [case testFasterParseTypeErrorList_python2] from typing import List def f(a): # type: (List(int)) -> int pass [out] main:3: error: invalid type comment or annotation main:3: note: Suggestion: use List[...] instead of List(...) [case testFasterParseTypeErrorCustom] from typing import TypeVar, Generic T = TypeVar('T') class Foo(Generic[T]): pass def f(a: Foo(int)) -> int: pass [out] main:7: error: invalid type comment or annotation main:7: note: Suggestion: use Foo[...] instead of Foo(...) [case testFastParseMatMul] from typing import Any x = None # type: Any x @ 1 x @= 1 [case testIncorrectTypeCommentIndex] from typing import Dict x = None # type: Dict[x: y] [out] main:3: error: syntax error in type comment [case testPrintStatementTrailingCommaFastParser_python2] print 0, print 1, 2, [case testFastParserShowsMultipleErrors] def f(x): # E: Type signature has too few arguments # type: () -> None pass def g(): # E: Type signature has too many arguments # type: (int) -> None pass [case testFastParseMalformedAssert] assert 1, 2 assert (1, 2) # W: Assertion is always true, perhaps remove parentheses? assert (1, 2), 3 # W: Assertion is always true, perhaps remove parentheses? assert () assert (1,) # W: Assertion is always true, perhaps remove parentheses? [case testFastParseAssertMessage] assert 1 assert 1, 2 assert 1, 1+2 assert 1, 1+'test' # E: Unsupported operand types for + ("int" and "str") assert 1, f() # E: Name 'f' is not defined [case testFastParserConsistentFunctionTypes] def f(x, y, z): # type: (int, int, int) -> int pass def f(x, # type: int # E: Function has duplicate type signatures y, # type: int z # type: int ): # type: (int, int, int) -> int pass def f(x, # type: int y, # type: int z # type: int ): # type: (...) -> int pass def f(x, y, z): # type: (int, int, int) -> int pass def f(x) -> int: # E: Function has duplicate type signatures # type: (int) -> int pass def f(x: int, y: int, z: int): # type: (...) -> int pass def f(x: int): # E: Function has duplicate type signatures # type: (int) -> int pass [case testFastParserDuplicateNames] def f(x, y, z): pass def g(x, y, x): # E: Duplicate argument 'x' in function definition pass def h(x, y, *x): # E: Duplicate argument 'x' in function definition pass def i(x, y, *z, **z): # E: Duplicate argument 'z' in function definition pass def j(x: int, y: int, *, x: int = 3): # E: Duplicate argument 'x' in function definition pass def k(*, y, z, y): # E: Duplicate argument 'y' in function definition pass lambda x, y, x: ... # E: Duplicate argument 'x' in function definition [case testFastParserDuplicateNames_python2] def f(x, y, z): pass def g(x, y, x): # E: Duplicate argument 'x' in function definition pass def h(x, y, *x): # E: Duplicate argument 'x' in function definition pass def i(x, y, *z, **z): # E: Duplicate argument 'z' in function definition pass def j(x, (y, y), z): # E: Duplicate argument 'y' in function definition pass def k(x, (y, x)): # E: Duplicate argument 'x' in function definition pass def l((x, y), (z, x)): # E: Duplicate argument 'x' in function definition pass def m(x, ((x, y), z)): # E: Duplicate argument 'x' in function definition pass lambda x, (y, x): None # E: Duplicate argument 'x' in function definition [case testNoCrashOnImportFromStar] from pack import * [file pack/__init__.py] from . import * [case testNoCrashOnImportFromStarNested] import blamodule [file blamodule/__init__.py] from . import command from . import backends [file blamodule/backends/__init__.py] from .Bla import Bla reveal_type(Bla().method()) # E: Revealed type is 'builtins.str' [file blamodule/backends/Bla.py] from .. import * class Bla: def method(self) -> str: return command.call() [file blamodule/command.py] def call() -> str: pass [case testNoCrashOnImportFromStarPython2] # flags: --py2 from . import * # E: No parent module -- cannot perform relative import [case testSpuriousTrailingComma_python2] from typing import Optional def update_state(tid, # type: int vid, # type: int update_ts=None, # type: Optional[float], ): # type: (...) -> str pass [out] main:3: error: Invalid tuple literal type main:3: note: Suggestion: Is there a spurious trailing comma? mypy-0.560/test-data/unit/check-flags.test0000644€tŠÔÚ€2›s®0000006217613215007205024620 0ustar jukkaDROPBOX\Domain Users00000000000000[case testUnannotatedFunction] # flags: --disallow-untyped-defs def f(x): pass [out] main:2: error: Function is missing a type annotation [case testUnannotatedArgument] # flags: --disallow-untyped-defs def f(x) -> int: pass [out] main:2: error: Function is missing a type annotation for one or more arguments [case testNoArgumentFunction] # flags: --disallow-untyped-defs def f() -> int: pass [out] [case testUnannotatedReturn] # flags: --disallow-untyped-defs def f(x: int): pass [out] main:2: error: Function is missing a return type annotation [case testUnannotatedReturnWithFastParser] # flags: --disallow-untyped-defs def f(x: int): pass [out] main:2: error: Function is missing a return type annotation [case testLambda] # flags: --disallow-untyped-defs lambda x: x [out] [case testUntypedDef] # flags: --disallow-untyped-defs def f(): 1 + "str" [out] main:2: error: Function is missing a type annotation [case testUntypedAsyncDef] # flags: --disallow-untyped-defs async def f(): # E: Function is missing a type annotation pass [builtins fixtures/async_await.pyi] [typing fixtures/typing-full.pyi] [case testAsyncUnannotatedArgument] # flags: --disallow-untyped-defs async def f(x) -> None: # E: Function is missing a type annotation for one or more arguments pass [builtins fixtures/async_await.pyi] [typing fixtures/typing-full.pyi] [case testAsyncUnannotatedReturn] # flags: --disallow-untyped-defs from typing import Any async def f(x: int): # E: Function is missing a return type annotation pass # Make sure explicit Any is allowed. async def g(x: int) -> Any: pass [builtins fixtures/async_await.pyi] [typing fixtures/typing-full.pyi] [case testDisallowUntypedDefsUntypedDecorator] # flags: --disallow-untyped-decorators def d(p): return p @d # E: Untyped decorator makes function "f" untyped def f(i: int) -> int: return i [case testDisallowUntypedDecoratorsUnresolvedDecorator] # flags: --disallow-untyped-decorators --ignore-missing-imports from nonexistent import d @d # E: Untyped decorator makes function "f" untyped def f(i: int) -> int: return i [case testDisallowUntypedDecoratorUntypedDef] # flags: --disallow-untyped-decorators def d(p): return p @d # no error def f(): pass [case testDisallowUntypedDecoratorsPartialFunction] # flags: --disallow-untyped-decorators def d(p): return p @d # E: Untyped decorator makes function "f" untyped def f(x) -> None: pass @d # E: Untyped decorator makes function "g" untyped def g(x, y: int): pass @d # E: Untyped decorator makes function "h" untyped def h(x: int): pass [case testDisallowUntypedDecoratorsImpreciseDecorator] # flags: --disallow-untyped-decorators from typing import Any def d(p) -> Any: return p @d # no error def f() -> None: pass [case testDisallowUntypedDecoratorsMultipleDecorators] # flags: --disallow-untyped-decorators from typing import Any def d1(p): return p def d2(p): return p def d3(p) -> Any: return p @d1 # E: Untyped decorator makes function "f" untyped @d2 # E: Untyped decorator makes function "f" untyped @d3 # no error @d1 # E: Untyped decorator makes function "f" untyped def f() -> None: pass [case testSubclassingAny] # flags: --disallow-subclassing-any from typing import Any FakeClass = None # type: Any class Foo(FakeClass): pass # E: Class cannot subclass 'FakeClass' (has type 'Any') [out] [case testSubclassingAnyMultipleBaseClasses] # flags: --disallow-subclassing-any from typing import Any FakeClass = None # type: Any class ActualClass: pass class Foo(ActualClass, FakeClass): pass # E: Class cannot subclass 'FakeClass' (has type 'Any') [out] [case testSubclassingAnySilentImports] # flags: --disallow-subclassing-any --follow-imports=skip # cmd: mypy -m main [file main.py] from ignored_module import BaseClass class Foo(BaseClass): pass [file ignored_module.py] class BaseClass: pass [out] tmp/main.py:2: error: Class cannot subclass 'BaseClass' (has type 'Any') [case testSubclassingAnySilentImports2] # flags: --disallow-subclassing-any --follow-imports=skip # cmd: mypy -m main [file main.py] import ignored_module class Foo(ignored_module.BaseClass): pass [file ignored_module.py] class BaseClass: pass [out] tmp/main.py:2: error: Class cannot subclass 'BaseClass' (has type 'Any') [case testWarnNoReturnIgnoresTrivialFunctions] # flags: --warn-no-return def f() -> int: pass def g() -> int: ... def h() -> int: """with docstring""" pass def i() -> int: """with docstring""" ... def j() -> int: u"""with unicode docstring""" pass def k() -> int: """docstring only""" [case testWarnNoReturnWorksWithAlwaysTrue] # flags: --warn-no-return PY3 = True def f() -> int: if PY3: return 0 else: return 0 [builtins fixtures/bool.pyi] [case testWarnNoReturnWorksWithAlwaysFalse] # flags: --warn-no-return PY2 = False def f() -> int: if PY2: return 0 else: return 0 [builtins fixtures/bool.pyi] [case testWarnNoReturnWorksWithMypyTrue] # flags: --warn-no-return MYPY = False def f() -> int: if MYPY: return 0 else: return 0 [builtins fixtures/bool.pyi] [case testNoReturnDisallowsReturn] # flags: --warn-no-return from mypy_extensions import NoReturn def f() -> NoReturn: if bool(): return 5 # E: Return statement in function which does not return else: return # E: Return statement in function which does not return [builtins fixtures/dict.pyi] [case testNoReturnWithoutImplicitReturn] # flags: --warn-no-return from mypy_extensions import NoReturn def no_return() -> NoReturn: pass def f() -> NoReturn: no_return() [builtins fixtures/dict.pyi] [case testNoReturnDisallowsImplicitReturn] # flags: --warn-no-return from mypy_extensions import NoReturn def f() -> NoReturn: # N: Implicit return in function which does not return non_trivial_function = 1 [builtins fixtures/dict.pyi] [case testNoReturnNoWarnNoReturn] # flags: --warn-no-return from mypy_extensions import NoReturn def no_return() -> NoReturn: pass def f() -> int: if bool(): return 0 else: no_return() [builtins fixtures/dict.pyi] [case testNoReturnInExpr] # flags: --warn-no-return from mypy_extensions import NoReturn def no_return() -> NoReturn: pass def f() -> int: return 0 reveal_type(f() or no_return()) # E: Revealed type is 'builtins.int' [builtins fixtures/dict.pyi] [case testNoReturnVariable] # flags: --warn-no-return from mypy_extensions import NoReturn x = 0 # type: NoReturn # E: Incompatible types in assignment (expression has type "int", variable has type "NoReturn") [builtins fixtures/dict.pyi] [case testNoReturnImportFromTyping] from typing import NoReturn def h() -> NoReturn: if bool(): return 5 # E: Return statement in function which does not return else: return # E: Return statement in function which does not return def no_return() -> NoReturn: pass def f() -> NoReturn: no_return() x: NoReturn = 0 # E: Incompatible types in assignment (expression has type "int", variable has type "NoReturn") [builtins fixtures/dict.pyi] [case testShowErrorContextFunction] # flags: --show-error-context def f() -> None: 0 + "" [out] main: note: In function "f": main:3: error: Unsupported operand types for + ("int" and "str") [case testShowErrorContextClass] # flags: --show-error-context class A: 0 + "" [out] main: note: In class "A": main:3: error: Unsupported operand types for + ("int" and "str") [case testShowErrorContextMember] # flags: --show-error-context class A: def f(self, x: int) -> None: self.f("") [out] main: note: In member "f" of class "A": main:4: error: Argument 1 to "f" of "A" has incompatible type "str"; expected "int" [case testShowErrorContextModule] # flags: --show-error-context import m [file m.py] 0 + "" [out] main:2: note: In module imported here: tmp/m.py:1: error: Unsupported operand types for + ("int" and "str") [case testShowErrorContextTopLevel] # flags: --show-error-context def f() -> None: 0 + "" 0 + "" [out] main: note: In function "f": main:3: error: Unsupported operand types for + ("int" and "str") main: note: At top level: main:4: error: Unsupported operand types for + ("int" and "str") [case testShowErrorContextFromHere] # flags: --show-error-context import a [file a.py] import b [file b.py] 0 + "" [out] tmp/a.py:1: note: In module imported here, main:2: note: ... from here: tmp/b.py:1: error: Unsupported operand types for + ("int" and "str") [case testFollowImportsNormal] # flags: --follow-imports=normal from mod import x x + "" [file mod.py] 1 + "" x = 0 [out] tmp/mod.py:1: error: Unsupported operand types for + ("int" and "str") main:3: error: Unsupported operand types for + ("int" and "str") [case testFollowImportsSilent] # flags: --follow-imports=silent from mod import x x + "" # E: Unsupported operand types for + ("int" and "str") [file mod.py] 1 + "" x = 0 [case testFollowImportsSkip] # flags: --follow-imports=skip from mod import x x + "" [file mod.py] this deliberate syntax error will not be reported [out] [case testFollowImportsError] # flags: --follow-imports=error from mod import x x + "" [file mod.py] deliberate syntax error [out] main:2: note: Import of 'mod' ignored main:2: note: (Using --follow-imports=error, module not passed on command line) [case testIgnoreMissingImportsFalse] from mod import x [out] main:1: error: Cannot find module named 'mod' main:1: note: (Perhaps setting MYPYPATH or using the "--ignore-missing-imports" flag would help) [case testIgnoreMissingImportsTrue] # flags: --ignore-missing-imports from mod import x [out] [case testStrictBoolean] # flags: --strict-boolean if True: pass if 'test': # E: Condition must be a boolean pass elif 1: # E: Condition must be a boolean pass def f() -> bool: return True if f: # E: Condition must be a boolean pass if f(): pass class A: def __call__(self) -> bool: return False if A: # E: Condition must be a boolean pass if A(): # E: Condition must be a boolean pass if A()(): pass [builtins fixtures/bool.pyi] [case testStrictBooleanTernary] # flags: --strict-boolean x = 1 if 'test' else 2 # E: Condition must be a boolean y = 1 if not 'test' else 2 [builtins fixtures/bool.pyi] [case testStrictBooleanWhile] # flags: --strict-boolean while 5: # E: Condition must be a boolean pass while False: pass [builtins fixtures/bool.pyi] [case testStrictBooleanComplexTypes] # flags: --strict-boolean from typing import Any, Type, Union x = True # type: Any y = True # type: Union[bool, int] z = int # type: Type[int] if x: pass if y: # E: Condition must be a boolean pass if z: # E: Condition must be a boolean pass [builtins fixtures/bool.pyi] [case testPerFileStrictOptionalBasic] # flags: --config-file tmp/mypy.ini import standard, optional [file standard.py] x = 0 x = None [file optional.py] x = 0 x = None # E: Incompatible types in assignment (expression has type "None", variable has type "int") [file mypy.ini] [[mypy] strict_optional = False [[mypy-optional] strict_optional = True [case testPerFileStrictOptionalBasicImportStandard] # flags: --config-file tmp/mypy.ini import standard, optional [file standard.py] from typing import Optional def f(x: int) -> None: pass an_int = 0 # type: int optional_int = None # type: Optional[int] f(an_int) # ints can be used as ints f(optional_int) # optional ints can be used as ints in this file [file optional.py] import standard def f(x: int) -> None: pass standard.an_int = None # E: Incompatible types in assignment (expression has type "None", variable has type "int") standard.optional_int = None # OK -- explicitly declared as optional f(standard.an_int) # ints can be used as ints f(standard.optional_int) # E: Argument 1 to "f" has incompatible type "None"; expected "int" [file mypy.ini] [[mypy] strict_optional = False [[mypy-optional] strict_optional = True [case testPerFileStrictOptionalBasicImportOptional] # flags: --config-file tmp/mypy.ini import standard, optional [file standard.py] import optional def f(x: int) -> None: pass f(optional.x) # OK -- in non-strict Optional context f(optional.y) # OK -- in non-strict Optional context [file optional.py] from typing import Optional def f(x: int) -> None: pass x = 0 # type: Optional[int] y = None # type: None [file mypy.ini] [[mypy] strict_optional = False [[mypy-optional] strict_optional = True [case testPerFileStrictOptionalListItemImportOptional] # flags: --config-file tmp/mypy.ini import standard, optional [file standard.py] import optional from typing import List def f(x: List[int]) -> None: pass f(optional.x) # OK -- in non-strict Optional context f(optional.y) # OK -- in non-strict Optional context [file optional.py] from typing import Optional, List def f(x: List[int]) -> None: pass x = [] # type: List[Optional[int]] y = [] # type: List[int] [file mypy.ini] [[mypy] strict_optional = False [[mypy-optional] strict_optional = True [builtins fixtures/list.pyi] [case testPerFileStrictOptionalComplicatedList] from typing import Union, Optional, List def f() -> None: x = [] # type: Union[List[Optional[str]], str] [builtins fixtures/list.pyi] [case testPerFileStrictOptionalNoneArguments] # flags: --config-file tmp/mypy.ini import standard, optional [file standard.py] def f(x: int = None) -> None: pass [file optional.py] import standard def f(x: int = None) -> None: pass standard.f(None) [file mypy.ini] [[mypy] strict_optional = False [[mypy-optional] strict_optional = True [case testDisallowImplicitTypesIgnoreMissingTypes] # flags: --ignore-missing-imports --disallow-any-unimported from missing import MyType def f(x: MyType) -> None: # E: Argument 1 to "f" becomes "Any" due to an unfollowed import pass [case testDisallowImplicitTypes] # flags: --disallow-any-unimported from missing import MyType def f(x: MyType) -> None: pass [out] main:2: error: Cannot find module named 'missing' main:2: note: (Perhaps setting MYPYPATH or using the "--ignore-missing-imports" flag would help) main:4: error: Argument 1 to "f" becomes "Any" due to an unfollowed import [case testDisallowImplicitAnyVariableDefinition] # flags: --ignore-missing-imports --disallow-any-unimported from missing import Unchecked t: Unchecked = 12 # E: Type of variable becomes "Any" due to an unfollowed import [case testDisallowImplicitAnyGeneric] # flags: --ignore-missing-imports --disallow-any-unimported from missing import Unchecked from typing import List def foo(l: List[Unchecked]) -> List[Unchecked]: t = [] # type: List[Unchecked] return l [builtins fixtures/list.pyi] [out] main:5: error: Return type becomes "List[Any]" due to an unfollowed import main:5: error: Argument 1 to "foo" becomes "List[Any]" due to an unfollowed import main:6: error: Type of variable becomes "List[Any]" due to an unfollowed import [case testDisallowImplicitAnyInherit] # flags: --ignore-missing-imports --disallow-any-unimported from missing import Unchecked from typing import List class C(Unchecked): # E: Base type Unchecked becomes "Any" due to an unfollowed import pass class A(List[Unchecked]): # E: Base type becomes "List[Any]" due to an unfollowed import pass [builtins fixtures/list.pyi] [case testDisallowImplicitAnyAlias] # flags: --ignore-missing-imports --disallow-any-unimported from missing import Unchecked from typing import List X = List[Unchecked] def f(x: X) -> None: # E: Argument 1 to "f" becomes "List[Any]" due to an unfollowed import pass [builtins fixtures/list.pyi] [case testDisallowImplicitAnyCast] # flags: --ignore-missing-imports --disallow-any-unimported from missing import Unchecked from typing import List, cast foo = [1, 2, 3] cast(List[Unchecked], foo) # E: Target type of cast becomes "List[Any]" due to an unfollowed import cast(Unchecked, foo) # E: Target type of cast becomes "Any" due to an unfollowed import [builtins fixtures/list.pyi] [case testDisallowImplicitAnyNamedTuple] # flags: --ignore-missing-imports --disallow-any-unimported from typing import List, NamedTuple from missing import Unchecked Point = NamedTuple('Point', [('x', List[Unchecked]), ('y', Unchecked)]) [builtins fixtures/list.pyi] [out] main:5: error: NamedTuple type becomes "Tuple[List[Any], Any]" due to an unfollowed import [case testDisallowImplicitAnyTypeVarConstraints] # flags: --ignore-missing-imports --disallow-any-unimported from typing import List, NamedTuple, TypeVar, Any from missing import Unchecked T = TypeVar('T', Unchecked, List[Unchecked], str) [builtins fixtures/list.pyi] [out] main:5: error: Constraint 1 becomes "Any" due to an unfollowed import main:5: error: Constraint 2 becomes "List[Any]" due to an unfollowed import [case testDisallowImplicitAnyNewType] # flags: --ignore-missing-imports --disallow-any-unimported from typing import NewType, List from missing import Unchecked Baz = NewType('Baz', Unchecked) # E: Argument 2 to NewType(...) must be subclassable (got "Any") Bar = NewType('Bar', List[Unchecked]) # E: Argument 2 to NewType(...) becomes "List[Any]" due to an unfollowed import [builtins fixtures/list.pyi] [case testDisallowImplicitAnyCallableAndTuple] # flags: --ignore-missing-imports --disallow-any-unimported from typing import Callable, Tuple from missing import Unchecked def foo(f: Callable[[], Unchecked]) -> Tuple[Unchecked]: return f() [builtins fixtures/list.pyi] [out] main:5: error: Return type becomes "Tuple[Any]" due to an unfollowed import main:5: error: Argument 1 to "foo" becomes "Callable[[], Any]" due to an unfollowed import [case testDisallowImplicitAnySubclassingExplicitAny] # flags: --ignore-missing-imports --disallow-any-unimported --disallow-subclassing-any from typing import Any class C(Any): # E: Class cannot subclass 'Any' (has type 'Any') pass [case testDisallowImplicitAnyVarDeclaration] # flags: --ignore-missing-imports --disallow-any-unimported from missing import Unchecked foo: Unchecked = "" foo = "" x, y = 1, 2 # type: Unchecked, Unchecked [out] main:4: error: Type of variable becomes "Any" due to an unfollowed import main:6: error: A type on this line becomes "Any" due to an unfollowed import [case testDisallowUnimportedAnyTypedDictSimple] # flags: --ignore-missing-imports --disallow-any-unimported from mypy_extensions import TypedDict from x import Unchecked M = TypedDict('M', {'x': str, 'y': Unchecked}) # E: Type of a TypedDict key becomes "Any" due to an unfollowed import def f(m: M) -> M: pass # no error [builtins fixtures/dict.pyi] [case testDisallowUnimportedAnyTypedDictGeneric] # flags: --ignore-missing-imports --disallow-any-unimported from mypy_extensions import TypedDict from typing import List from x import Unchecked M = TypedDict('M', {'x': str, 'y': List[Unchecked]}) # E: Type of a TypedDict key becomes "List[Any]" due to an unfollowed import def f(m: M) -> M: pass # no error [builtins fixtures/dict.pyi] [case testDisallowAnyDecoratedUnannotatedDecorator] # flags: --disallow-any-decorated from typing import Any def d(f): return f @d def f(x: Any) -> Any: # E: Function is untyped after decorator transformation pass @d def h(x): # E: Function is untyped after decorator transformation pass [builtins fixtures/list.pyi] [case testDisallowAnyDecoratedErrorIsReportedOnlyOnce] # flags: --disallow-any-decorated def d(f): return f def d2(f): return f @d @d2 @d def f(x: int) -> None: pass # E: Function is untyped after decorator transformation [case testDisallowAnyDecoratedReturnAny] # flags: --disallow-any-decorated from typing import Any def d(f) -> Any: return f @d def f() -> None: pass # E: Function is untyped after decorator transformation [builtins fixtures/list.pyi] [case testDisallowAnyDecoratedReturnCallable] # flags: --disallow-any-decorated from typing import Any, Callable def d(f) -> Callable[..., None]: return f @d def g(i: int, s: str) -> None: pass # E: Type of decorated function contains type "Any" ("Callable[..., None]") [builtins fixtures/list.pyi] [case testDisallowAnyDecoratedNonexistentDecorator] # flags: --disallow-any-decorated --ignore-missing-imports from nonexistent import d @d def f() -> None: pass # E: Function is untyped after decorator transformation [builtins fixtures/list.pyi] [case testDisallowAnyDecoratedPartlyTypedCallable] # flags: --disallow-any-decorated --ignore-missing-imports from typing import Callable, Any, List def d(f) -> Callable[[int, Any], Any]: pass def d2(f) -> Callable[[int], List[Any]]: pass def d3(f) -> Callable[[Any], List[str]]: pass @d def f(i: int, s: str) -> None: # E: Type of decorated function contains type "Any" ("Callable[[int, Any], Any]") pass @d2 def g(i: int) -> None: # E: Type of decorated function contains type "Any" ("Callable[[int], List[Any]]") pass @d3 def h(i: int) -> None: # E: Type of decorated function contains type "Any" ("Callable[[Any], List[str]]") pass [builtins fixtures/list.pyi] [case testDisallowAnyDecoratedReturnsCallableNoParams] # flags: --disallow-any-decorated from typing import Callable def d(p) -> Callable[[], int]: return p @d def f(i): return i [builtins fixtures/list.pyi] [case testDisallowAnyDecoratedDecoratorReturnsNonCallable] # flags: --disallow-any-decorated def d(p) -> int: return p(0) @d def f(i): return i [case testDisallowAnyDecoratedUntypedUndecoratedFunction] # flags: --disallow-any-decorated from typing import Callable def f(i): # no error return i [case testDisallowAnyDecoratedTwoDecorators] # flags: --disallow-any-decorated from typing import Callable def typed_dec(f) -> Callable[[], int]: pass def untyped_dec(f): pass @typed_dec @untyped_dec def f(): # no error return i @untyped_dec @typed_dec def g(): # E: Function is untyped after decorator transformation return i [case testDisallowAnyExprSimple] # flags: --disallow-any-expr from typing import Any def f(s): yield s x = f(0) # E: Expression has type "Any" for x in f(0): # E: Expression has type "Any" g(x) # E: Expression has type "Any" def g(x) -> Any: yield x # E: Expression has type "Any" l = [1, 2, 3] l[f(0)] # E: Expression has type "Any" f(l) f(f(0)) # E: Expression has type "Any" [builtins fixtures/list.pyi] [case testDisallowAnyExprUnannotatedFunction] # flags: --disallow-any-expr def g(s): return s g(0) w: int = g(1) [case testDisallowAnyExprExplicitAnyParam] # flags: --disallow-any-expr from typing import Any, List def f(s: Any) -> None: pass def g(s: List[Any]) -> None: pass f(0) # type of list below is inferred with expected type of "List[Any]", so that becomes it's type # instead of List[str] g(['']) # E: Expression type contains "Any" (has type "List[Any]") [builtins fixtures/list.pyi] [case testDisallowAnyExprAllowsAnyInCast] # flags: --disallow-any-expr from typing import Any, cast class Foo: g: Any = 2 z = cast(int, Foo().g) m = cast(Any, Foo().g) # E: Expression has type "Any" k = Foo.g # E: Expression has type "Any" [builtins fixtures/list.pyi] [case testDisallowAnyExprAllowsAnyInVariableAssignmentWithExplicitTypeAnnotation] # flags: --disallow-any-expr from typing import Any class Foo: g: Any = 2 z: int = Foo().g x = Foo().g # type: int m: Any = Foo().g # E: Expression has type "Any" n = Foo().g # type: Any # E: Expression has type "Any" [builtins fixtures/list.pyi] [case testDisallowAnyExprGeneric] # flags: --disallow-any-expr from typing import List l: List = [] l.append(1) # E: Expression type contains "Any" (has type "List[Any]") k = l[0] # E: Expression type contains "Any" (has type "List[Any]") # E: Expression has type "Any" [builtins fixtures/list.pyi] [case testDisallowAnyExprTypeVar] # flags: --disallow-any-expr from typing import TypeVar T = TypeVar('T') # no error def f(t: T) -> T: return t [builtins fixtures/list.pyi] [case testDisallowAnyExprNamedTuple] # flags: --disallow-any-expr from typing import NamedTuple Point = NamedTuple('Point', [('x', int), ('y', int)]) # no error def origin() -> Point: return Point(x=0, y=0) [builtins fixtures/list.pyi] [case testDisallowAnyExprNewType] # flags: --disallow-any-expr from typing import NewType NT = NewType('NT', int) # no error def nt() -> NT: return NT(1) [builtins fixtures/list.pyi] [case testDisallowAnyExprEnum] # flags: --disallow-any-expr from enum import Enum E = Enum('E', '1, 2, 3') # no error def k(s: E) -> None: pass [builtins fixtures/list.pyi] [case testDisallowAnyExprTypedDict] # flags: --disallow-any-expr from mypy_extensions import TypedDict Movie = TypedDict('Movie', {'name': str, 'year': int}) def g(m: Movie) -> Movie: return m [builtins fixtures/dict.pyi] [case testDisallowIncompleteDefs] # flags: --disallow-incomplete-defs def f(i: int): # E: Function is missing a return type annotation pass def g(i) -> None: # E: Function is missing a type annotation for one or more arguments pass def h(i: int) -> int: # no error return i def i() -> None: # no error pass [case testDisallowIncompleteDefsNoReturn] # flags: --disallow-incomplete-defs --disallow-untyped-defs def f(i: int): # E: Function is missing a return type annotation pass [case testDisallowIncompleteDefsSelf] # flags: --disallow-incomplete-defs class C: def foo(self) -> None: # no error pass [case testDisallowIncompleteDefsPartiallyAnnotatedParams] # flags: --disallow-incomplete-defs def f(i: int, s): pass [out] main:3: error: Function is missing a return type annotation main:3: error: Function is missing a type annotation for one or more arguments mypy-0.560/test-data/unit/check-functions.test0000644€tŠÔÚ€2›s®0000017123413215007205025530 0ustar jukkaDROPBOX\Domain Users00000000000000-- Test cases for the type checker related to functions, function types and -- calls. -- See also check-varargs.test. -- Callable type basics -- -------------------- [case testCallingVariableWithFunctionType] from typing import Callable f = None # type: Callable[[A], B] a, b = None, None # type: (A, B) a = f(a) # E: Incompatible types in assignment (expression has type "B", variable has type "A") b = f(b) # E: Argument 1 has incompatible type "B"; expected "A" b = f() # E: Too few arguments b = f(a, a) # E: Too many arguments b = f(a) class A: pass class B: pass [case testKeywordOnlyArgumentOrderInsensitivity] import typing class A(object): def f(self, *, a: int, b: str) -> None: pass class B(A): def f(self, *, b: str, a: int) -> None: pass class C(A): def f(self, *, b: int, a: str) -> None: pass # E: Signature of "f" incompatible with supertype "A" [case testPositionalOverridingArgumentNameInsensitivity] import typing class A(object): def f(self, a: int, b: str) -> None: pass class B(A): def f(self, b: str, a: int) -> None: pass # E: Argument 1 of "f" incompatible with supertype "A" # E: Argument 2 of "f" incompatible with supertype "A" class C(A): def f(self, foo: int, bar: str) -> None: pass [case testPositionalOverridingArgumentNamesCheckedWhenMismatchingPos] import typing class A(object): def f(self, a: int, b: str) -> None: pass class B(A): def f(self, b: int, a: str) -> None: pass # E: Signature of "f" incompatible with supertype "A" [case testSubtypingFunctionTypes] from typing import Callable class A: pass class B(A): pass f = None # type: Callable[[B], A] g = None # type: Callable[[A], A] # subtype of f h = None # type: Callable[[B], B] # subtype of f g = h # E: Incompatible types in assignment (expression has type "Callable[[B], B]", variable has type "Callable[[A], A]") h = f # E: Incompatible types in assignment (expression has type "Callable[[B], A]", variable has type "Callable[[B], B]") h = g # E: Incompatible types in assignment (expression has type "Callable[[A], A]", variable has type "Callable[[B], B]") g = f # E: Incompatible types in assignment (expression has type "Callable[[B], A]", variable has type "Callable[[A], A]") f = g f = h f = f g = g h = h [case testSubtypingFunctionsDoubleCorrespondence] def l(x) -> None: ... def r(__, *, x) -> None: ... r = l # E: Incompatible types in assignment (expression has type "Callable[[Any], None]", variable has type "Callable[[Any, NamedArg(Any, 'x')], None]") [case testSubtypingFunctionsRequiredLeftArgNotPresent] def l(x, y) -> None: ... def r(x) -> None: ... r = l # E: Incompatible types in assignment (expression has type "Callable[[Any, Any], None]", variable has type "Callable[[Any], None]") [case testSubtypingFunctionsImplicitNames] from typing import Any def f(a, b): pass def g(c: Any, d: Any) -> Any: pass ff = f gg = g gg = f ff = g [case testSubtypingFunctionsDefaultsNames] from typing import Callable def f(a: int, b: str) -> None: pass f_nonames = None # type: Callable[[int, str], None] def g(a: int, b: str = "") -> None: pass def h(aa: int, b: str = "") -> None: pass ff_nonames = f_nonames ff = f gg = g hh = h ff = gg ff_nonames = ff ff_nonames = f_nonames # reset ff = ff_nonames # E: Incompatible types in assignment (expression has type "Callable[[int, str], None]", variable has type "Callable[[Arg(int, 'a'), Arg(str, 'b')], None]") ff = f # reset gg = ff # E: Incompatible types in assignment (expression has type "Callable[[Arg(int, 'a'), Arg(str, 'b')], None]", variable has type "Callable[[Arg(int, 'a'), DefaultArg(str, 'b')], None]") gg = hh # E: Incompatible types in assignment (expression has type "Callable[[Arg(int, 'aa'), DefaultArg(str, 'b')], None]", variable has type "Callable[[Arg(int, 'a'), DefaultArg(str, 'b')], None]") [case testSubtypingFunctionsArgsKwargs] from typing import Any, Callable def everything(*args: Any, **kwargs: Any) -> None: pass everywhere = None # type: Callable[..., None] def specific_1(a: int, b: str) -> None: pass def specific_2(a: int, *, b: str) -> None: pass ss_1 = specific_1 ss_2 = specific_2 ee_def = everything ee_var = everywhere ss_1 = ee_def ss_1 = specific_1 ss_2 = ee_def ss_2 = specific_2 ee_def = everywhere ee_def = everything ee_var = everything ee_var = everywhere ee_var = specific_1 # The difference between Callable[..., blah] and one with a *args: Any, **kwargs: Any is that the ... goes loosely both ways. ee_def = specific_1 # E: Incompatible types in assignment (expression has type "Callable[[int, str], None]", variable has type "Callable[[VarArg(Any), KwArg(Any)], None]") [builtins fixtures/dict.pyi] [case testSubtypingFunctionsDecorated] from typing import Any # untyped decorator def deco(f): pass class A: @deco def f(self) -> Any: pass class B(A): @deco def f(self) -> Any: pass [builtins fixtures/list.pyi] [case testLackOfNames] def f(__a: int, __b: str) -> None: pass def g(a: int, b: str) -> None: pass ff = f gg = g ff = g gg = f # E: Incompatible types in assignment (expression has type "Callable[[int, str], None]", variable has type "Callable[[Arg(int, 'a'), Arg(str, 'b')], None]") [case testLackOfNamesFastparse] def f(__a: int, __b: str) -> None: pass def g(a: int, b: str) -> None: pass ff = f gg = g ff = g gg = f # E: Incompatible types in assignment (expression has type "Callable[[int, str], None]", variable has type "Callable[[Arg(int, 'a'), Arg(str, 'b')], None]") [case testFunctionTypeCompatibilityWithOtherTypes] from typing import Callable f = None # type: Callable[[], None] a, o = None, None # type: (A, object) a = f # E: Incompatible types in assignment (expression has type "Callable[[], None]", variable has type "A") f = a # E: Incompatible types in assignment (expression has type "A", variable has type "Callable[[], None]") f = o # E: Incompatible types in assignment (expression has type "object", variable has type "Callable[[], None]") f = f() # E: Function does not return a value f = f f = None o = f class A: pass [case testFunctionSubtypingWithVoid] from typing import Callable f = None # type: Callable[[], None] g = None # type: Callable[[], object] f = g # E: Incompatible types in assignment (expression has type "Callable[[], object]", variable has type "Callable[[], None]") g = f # OK f = f g = g [case testFunctionSubtypingWithMultipleArgs] from typing import Callable f = None # type: Callable[[A, A], None] g = None # type: Callable[[A, B], None] h = None # type: Callable[[B, B], None] f = g # E: Incompatible types in assignment (expression has type "Callable[[A, B], None]", variable has type "Callable[[A, A], None]") f = h # E: Incompatible types in assignment (expression has type "Callable[[B, B], None]", variable has type "Callable[[A, A], None]") g = h # E: Incompatible types in assignment (expression has type "Callable[[B, B], None]", variable has type "Callable[[A, B], None]") g = f h = f h = g f = f g = g h = h class A: pass class B(A): pass [case testFunctionTypesWithDifferentArgumentCounts] from typing import Callable f = None # type: Callable[[], None] g = None # type: Callable[[A], None] h = None # type: Callable[[A, A], None] f = g # E: Incompatible types in assignment (expression has type "Callable[[A], None]", variable has type "Callable[[], None]") f = h # E: Incompatible types in assignment (expression has type "Callable[[A, A], None]", variable has type "Callable[[], None]") h = f # E: Incompatible types in assignment (expression has type "Callable[[], None]", variable has type "Callable[[A, A], None]") h = g # E: Incompatible types in assignment (expression has type "Callable[[A], None]", variable has type "Callable[[A, A], None]") f = f g = g h = h class A: pass [out] [case testCompatibilityOfSimpleTypeObjectWithStdType] t = None # type: type a = None # type: A a = A # E: Incompatible types in assignment (expression has type "Type[A]", variable has type "A") t = f # E: Incompatible types in assignment (expression has type "Callable[[], None]", variable has type "type") t = A class A: def __init__(self, a: 'A') -> None: pass def f() -> None: pass [case testFunctionTypesWithOverloads] from foo import * [file foo.pyi] from typing import Callable, overload f = None # type: Callable[[AA], A] g = None # type: Callable[[B], B] h = None # type: Callable[[A], AA] h = i # E: Incompatible types in assignment (expression has type overloaded function, variable has type "Callable[[A], AA]") f = j f = i g = i g = j class A: pass class AA(A): pass class B: pass @overload def i(x: AA) -> A: pass @overload def i(x: B) -> B: pass @overload def j(x: B) -> B: pass @overload def j(x: A) -> AA: pass [case testOverloadWithThreeItems] from foo import * [file foo.pyi] from typing import Callable, overload g1 = None # type: Callable[[A], A] g2 = None # type: Callable[[B], B] g3 = None # type: Callable[[C], C] g4 = None # type: Callable[[A], B] a, b, c = None, None, None # type: (A, B, C) b = f(a) # E: Incompatible types in assignment (expression has type "A", variable has type "B") a = f(b) # E: Incompatible types in assignment (expression has type "B", variable has type "A") b = f(c) # E: Incompatible types in assignment (expression has type "C", variable has type "B") g4 = f # E: Incompatible types in assignment (expression has type overloaded function, variable has type "Callable[[A], B]") g1 = f g2 = f g3 = f a = f(a) b = f(b) c = f(c) class A: pass class B: pass class C: pass @overload def f(x: A) -> A: pass @overload def f(x: B) -> B: pass @overload def f(x: C) -> C: pass [case testInferConstraintsUnequalLengths] from typing import Any, Callable, List def f(fields: List[Callable[[Any], Any]]): pass class C: pass f([C]) # E: List item 0 has incompatible type "Type[C]"; expected "Callable[[Any], Any]" class D: def __init__(self, a, b): pass f([D]) # E: List item 0 has incompatible type "Type[D]"; expected "Callable[[Any], Any]" [builtins fixtures/list.pyi] [case testSubtypingTypeTypeAsCallable] from typing import Callable, Type class A: pass x = None # type: Callable[..., A] y = None # type: Type[A] x = y [case testSubtypingCallableAsTypeType] from typing import Callable, Type class A: pass x = None # type: Callable[..., A] y = None # type: Type[A] y = x # E: Incompatible types in assignment (expression has type "Callable[..., A]", variable has type "Type[A]") -- Default argument values -- ----------------------- [case testCallingFunctionsWithDefaultArgumentValues] a, b = None, None # type: (A, B) a = f() # E: Incompatible types in assignment (expression has type "B", variable has type "A") b = f(b) # E: Argument 1 to "f" has incompatible type "B"; expected "Optional[A]" b = f(a, a) # E: Too many arguments for "f" b = f() b = f(a) b = f(AA()) def f(x: 'A' = None) -> 'B': pass class A: pass class AA(A): pass class B: pass [case testDefaultArgumentExpressions] import typing def f(x: 'A' = A()) -> None: b = x # type: B # E: Incompatible types in assignment (expression has type "A", variable has type "B") a = x # type: A class B: pass class A: pass [out] [case testDefaultArgumentExpressions2] import typing def f(x: 'A' = B()) -> None: # E: Incompatible default for argument "x" (default has type "B", argument has type "A") b = x # type: B # E: Incompatible types in assignment (expression has type "A", variable has type "B") a = x # type: A class B: pass class A: pass [case testDefaultArgumentExpressionsGeneric] from typing import TypeVar T = TypeVar('T', bound='A') def f(x: T = B()) -> None: # E: Incompatible default for argument "x" (default has type "B", argument has type "T") b = x # type: B # E: Incompatible types in assignment (expression has type "T", variable has type "B") a = x # type: A class B: pass class A: pass [case testDefaultArgumentExpressionsPython2] # flags: --python-version 2.7 from typing import Tuple def f(x = B()): # E: Incompatible default for argument "x" (default has type "B", argument has type "A") # type: (A) -> None b = x # type: B # E: Incompatible types in assignment (expression has type "A", variable has type "B") a = x # type: A class B: pass class A: pass [case testDefaultTupleArgumentExpressionsPython2] # flags: --python-version 2.7 from typing import Tuple def f((x, y) = (A(), B())): # E: Incompatible default for tuple argument 1 (default has type "Tuple[A, B]", argument has type "Tuple[B, B]") # type: (Tuple[B, B]) -> None b = x # type: B a = x # type: A # E: Incompatible types in assignment (expression has type "B", variable has type "A") def g(a, (x, y) = (A(),)): # E: Incompatible default for tuple argument 2 (default has type "Tuple[A]", argument has type "Tuple[B, B]") # type: (int, Tuple[B, B]) -> None pass def h((x, y) = (A(), B(), A())): # E: Incompatible default for tuple argument 1 (default has type "Tuple[A, B, A]", argument has type "Tuple[B, B]") # type: (Tuple[B, B]) -> None pass class B: pass class A: pass [case testDefaultArgumentsWithSubtypes] import typing def f(x: 'B' = A()) -> None: # E: Incompatible default for argument "x" (default has type "A", argument has type "B") pass def g(x: 'A' = B()) -> None: pass class A: pass class B(A): pass [out] [case testMultipleDefaultArgumentExpressions] import typing def f(x: 'A' = B(), y: 'B' = B()) -> None: # E: Incompatible default for argument "x" (default has type "B", argument has type "A") pass def h(x: 'A' = A(), y: 'B' = B()) -> None: pass class A: pass class B: pass [out] [case testMultipleDefaultArgumentExpressions2] import typing def g(x: 'A' = A(), y: 'B' = A()) -> None: # E: Incompatible default for argument "y" (default has type "A", argument has type "B") pass class A: pass class B: pass [out] [case testDefaultArgumentsAndSignatureAsComment] import typing def f(x = 1): # type: (int) -> str pass f() f(1) f('') # E: Argument 1 to "f" has incompatible type "str"; expected "int" [case testMethodDefaultArgumentsAndSignatureAsComment] import typing class A: def f(self, x = 1): # type: (int) -> str pass A().f() A().f(1) A().f('') # E: Argument 1 to "f" of "A" has incompatible type "str"; expected "int" -- Access to method defined as a data attribute -- -------------------------------------------- [case testMethodAsDataAttribute] from typing import Any, Callable class B: pass x = None # type: Any class A: f = x # type: Callable[[A], None] g = x # type: Callable[[A, B], None] a = None # type: A a.f() a.g(B()) a.f(a) # E: Too many arguments a.g() # E: Too few arguments [case testMethodWithInvalidMethodAsDataAttribute] from typing import Any, Callable class B: pass x = None # type: Any class A: f = x # type: Callable[[], None] g = x # type: Callable[[B], None] a = None # type: A a.f() # E: Attribute function "f" with type "Callable[[], None]" does not accept self argument a.g() # E: Invalid self argument "A" to attribute function "g" with type "Callable[[B], None]" [case testMethodWithDynamicallyTypedMethodAsDataAttribute] from typing import Any, Callable class B: pass x = None # type: Any class A: f = x # type: Callable[[Any], Any] a = None # type: A a.f() a.f(a) # E: Too many arguments [case testOverloadedMethodAsDataAttribute] from foo import * [file foo.pyi] from typing import overload class B: pass class A: @overload def f(self) -> None: pass @overload def f(self, b: B) -> None: pass g = f a = None # type: A a.g() a.g(B()) a.g(a) # E: No overload variant matches argument types [foo.A] [case testMethodAsDataAttributeInferredFromDynamicallyTypedMethod] class A: def f(self, x): pass g = f a = None # type: A a.g(object()) a.g(a, a) # E: Too many arguments a.g() # E: Too few arguments [case testMethodAsDataAttributeInGenericClass] from typing import TypeVar, Generic t = TypeVar('t') class B: pass class A(Generic[t]): def f(self, x: t) -> None: pass g = f a = None # type: A[B] a.g(B()) a.g(a) # E: Argument 1 has incompatible type "A[B]"; expected "B" [case testInvalidMethodAsDataAttributeInGenericClass] from typing import Any, TypeVar, Generic, Callable t = TypeVar('t') class B: pass class C: pass x = None # type: Any class A(Generic[t]): f = x # type: Callable[[A[B]], None] ab = None # type: A[B] ac = None # type: A[C] ab.f() ac.f() # E: Invalid self argument "A[C]" to attribute function "f" with type "Callable[[A[B]], None]" [case testPartiallyTypedSelfInMethodDataAttribute] from typing import Any, TypeVar, Generic, Callable t = TypeVar('t') class B: pass class C: pass x = None # type: Any class A(Generic[t]): f = x # type: Callable[[A], None] ab = None # type: A[B] ac = None # type: A[C] ab.f() ac.f() [case testCallableDataAttribute] from typing import Callable class A: g = None # type: Callable[[A], None] def __init__(self, f: Callable[[], None]) -> None: self.f = f a = A(None) a.f() a.g() a.f(a) # E: Too many arguments a.g(a) # E: Too many arguments -- Nested functions -- ---------------- [case testSimpleNestedFunction] import typing def f(a: 'A') -> None: def g(b: 'B') -> None: b = a # fail aa = a # type: A # ok b = B() g(a) # fail g(B()) class A: pass class B: pass [out] main:4: error: Incompatible types in assignment (expression has type "A", variable has type "B") main:7: error: Argument 1 to "g" has incompatible type "A"; expected "B" [case testReturnAndNestedFunction] import typing def f() -> 'A': def g() -> 'B': return A() # fail return B() return B() # fail return A() class A: pass class B: pass [out] main:4: error: Incompatible return value type (got "A", expected "B") main:6: error: Incompatible return value type (got "B", expected "A") [case testDynamicallyTypedNestedFunction] import typing def f(x: object) -> None: def g(y): pass g() # E: Too few arguments for "g" g(x) [out] [case testNestedFunctionInMethod] import typing class A: def f(self) -> None: def g(x: int) -> None: y = x # type: int a = x # type: A # fail g(2) g(A()) # fail [out] main:6: error: Incompatible types in assignment (expression has type "int", variable has type "A") main:8: error: Argument 1 to "g" has incompatible type "A"; expected "int" [case testMutuallyRecursiveNestedFunctions] def f() -> None: def g() -> None: h(1) h('') # E def h(x: int) -> None: g() g(1) # E [out] main:4: error: Argument 1 to "h" has incompatible type "str"; expected "int" main:7: error: Too many arguments for "g" [case testMutuallyRecursiveDecoratedFunctions] from typing import Callable, Any def dec(f) -> Callable[..., Any]: pass def f() -> None: @dec def g() -> None: h() h.x # E @dec def h(x: int) -> None: g(1) g.x # E [out] main:7: error: "Callable[..., Any]" has no attribute "x" main:11: error: "Callable[..., Any]" has no attribute "x" [case testNestedGenericFunctions] from typing import TypeVar T = TypeVar('T') U = TypeVar('U') def outer(x: T) -> T: def inner(y: U) -> T: ... return inner(1) -- Casts -- ----- [case testCastsToAndFromFunctionTypes] from typing import TypeVar, Callable, Any, cast t = TypeVar('t') def f(x: t, f1: Callable[[], None], f2: Callable[[Any], None], o: object) -> None: x = cast(t, f1) f1 = cast(Callable[[], None], x) f1 = cast(Callable[[], None], f2) f1 = cast(Callable[[], None], o) -- Function decorators -- ------------------- [case testTrivialStaticallyTypedFunctionDecorator] from typing import TypeVar t = TypeVar('t') def dec(f: t) -> t: return f @dec def f(x: int) -> None: pass f(1) f('x') # E: Argument 1 to "f" has incompatible type "str"; expected "int" [case testTrivialStaticallyTypedMethodDecorator] from typing import TypeVar t = TypeVar('t') def dec(f: t) -> t: return f class A: @dec def f(self, x: int) -> None: pass A().f(1) A().f('') # E: Argument 1 to "f" of "A" has incompatible type "str"; expected "int" class B: pass [case testTrivialDecoratedNestedFunction] from typing import TypeVar t = TypeVar('t') def dec(f: t) -> t: return f def g() -> None: @dec def f(x: int) -> None: pass f(1) f('') # E: Argument 1 to "f" has incompatible type "str"; expected "int" [out] [case testCheckingDecoratedFunction] import typing def dec(f): pass @dec def f(x: 'A') -> None: a = x # type: A x = object() # E: Incompatible types in assignment (expression has type "object", variable has type "A") class A: pass [out] [case testDecoratorThatSwitchesType] from typing import Callable def dec(x) -> Callable[[], None]: pass @dec def f(y): pass f() f(None) # E: Too many arguments for "f" [case testDecoratorThatSwitchesTypeWithMethod] from typing import Any, Callable def dec(x) -> Callable[[Any], None]: pass class A: @dec def f(self, a, b, c): pass a = None # type: A a.f() a.f(None) # E: Too many arguments for "f" of "A" [case testNestedDecorators] from typing import Any, Callable def dec1(f: Callable[[Any], None]) -> Callable[[], None]: pass def dec2(f: Callable[[Any, Any], None]) -> Callable[[Any], None]: pass @dec1 @dec2 def f(x, y): pass f() f(None) # E: Too many arguments for "f" [case testInvalidDecorator1] from typing import Any, Callable def dec1(f: Callable[[Any], None]) -> Callable[[], None]: pass def dec2(f: Callable[[Any, Any], None]) -> Callable[[Any], None]: pass @dec1 # E: Argument 1 to "dec2" has incompatible type "Callable[[Any], Any]"; expected "Callable[[Any, Any], None]" @dec2 def f(x): pass [case testInvalidDecorator2] from typing import Any, Callable def dec1(f: Callable[[Any, Any], None]) -> Callable[[], None]: pass def dec2(f: Callable[[Any, Any], None]) -> Callable[[Any], None]: pass @dec1 # E: Argument 1 to "dec1" has incompatible type "Callable[[Any], None]"; expected "Callable[[Any, Any], None]" @dec2 def f(x, y): pass [case testNoTypeCheckDecoratorOnMethod1] from typing import no_type_check @no_type_check def foo(x: 'bar', y: {'x': 4}) -> 42: 1 + 'x' [case testNoTypeCheckDecoratorOnMethod2] import typing @typing.no_type_check def foo(x: 's', y: {'x': 4}) -> 42: 1 + 'x' @typing.no_type_check def bar() -> None: 1 + 'x' [case testCallingNoTypeCheckFunction] import typing @typing.no_type_check def foo(x: {1:2}) -> [1]: 1 + 'x' foo() foo(1, 'b') [case testCallingNoTypeCheckFunction2] import typing def f() -> None: foo() @typing.no_type_check def foo(x: {1:2}) -> [1]: 1 + 'x' [case testNoTypeCheckDecoratorSemanticError] import typing @typing.no_type_check def foo(x: {1:2}) -> [1]: x = y -- Forward references to decorated functions -- ----------------------------------------- [case testForwardReferenceToDynamicallyTypedDecorator] def f(self) -> None: g() g(1) def dec(f): return f @dec def g(): pass [case testForwardReferenceToDecoratorWithAnyReturn] from typing import Any def f(self) -> None: g() g(1) def dec(f) -> Any: return f @dec def g(): pass [case testForwardReferenceToDecoratorWithIdentityMapping] from typing import TypeVar def f(self) -> None: g() g(1) # E: Too many arguments for "g" h(1).x # E: "str" has no attribute "x" h('') # E: Argument 1 to "h" has incompatible type "str"; expected "int" T = TypeVar('T') def dec(f: T) -> T: return f @dec def g(): pass @dec def h(x: int) -> str: pass [out] [case testForwardReferenceToDynamicallyTypedDecoratedMethod] def f(self) -> None: A().f(1).y A().f() class A: @dec def f(self, x): pass def dec(f): return f [builtins fixtures/staticmethod.pyi] [case testForwardReferenceToStaticallyTypedDecoratedMethod] from typing import TypeVar def f(self) -> None: A().f(1).y # E: "str" has no attribute "y" A().f('') # E: Argument 1 to "f" of "A" has incompatible type "str"; expected "int" class A: @dec def f(self, a: int) -> str: return '' T = TypeVar('T') def dec(f: T) -> T: return f [builtins fixtures/staticmethod.pyi] [out] [case testForwardReferenceToDynamicallyTypedProperty] def f(self) -> None: A().x.y class A: @property def x(self): pass [builtins fixtures/property.pyi] [case testForwardReferenceToStaticallyTypedProperty] def f(self) -> None: A().x.y # E: "int" has no attribute "y" class A: @property def x(self) -> int: return 1 [builtins fixtures/property.pyi] [out] [case testForwardReferenceToDynamicallyTypedStaticMethod] def f(self) -> None: A.x(1).y A.x() # E: Too few arguments for "x" class A: @staticmethod def x(x): pass [builtins fixtures/staticmethod.pyi] [out] [case testForwardReferenceToStaticallyTypedStaticMethod] def f(self) -> None: A.x(1).y # E: "str" has no attribute "y" A.x('') # E: Argument 1 to "x" of "A" has incompatible type "str"; expected "int" class A: @staticmethod def x(a: int) -> str: return '' [builtins fixtures/staticmethod.pyi] [out] [case testForwardReferenceToDynamicallyTypedClassMethod] def f(self) -> None: A.x(1).y A.x() # E: Too few arguments for "x" class A: @classmethod def x(cls, a): pass [builtins fixtures/classmethod.pyi] [out] [case testForwardReferenceToStaticallyTypedClassMethod] def f(self) -> None: A.x(1).y # E: "str" has no attribute "y" A.x('') # E: Argument 1 to "x" of "A" has incompatible type "str"; expected "int" class A: @classmethod def x(cls, x: int) -> str: return '' [builtins fixtures/classmethod.pyi] [out] [case testForwardReferenceToDecoratedFunctionUsingMemberExpr] import m def f(self) -> None: g(1).x # E: "str" has no attribute "x" @m.dec def g(x: int) -> str: pass [file m.py] from typing import TypeVar T = TypeVar('T') def dec(f: T) -> T: return f [out] [case testForwardReferenceToFunctionWithMultipleDecorators] def f(self) -> None: g() g(1) def dec(f): return f @dec @dec2 def g(): pass def dec2(f): return f [case testForwardReferenceToDynamicallyTypedDecoratedStaticMethod] def f(self) -> None: A().f(1).y A().f() A().g(1).y A().g() class A: @dec @staticmethod def f(self, x): pass @staticmethod @dec def g(self, x): pass def dec(f): return f [builtins fixtures/staticmethod.pyi] [case testForwardRefereceToDecoratedFunctionWithCallExpressionDecorator] def f(self) -> None: g() g(1) @dec(1) def g(): pass def dec(f): pass -- Decorator functions in import cycles -- ------------------------------------ [case testDecoratorWithIdentityTypeInImportCycle] import a [file a.py] import b from d import dec @dec def f(x: int) -> None: pass b.g(1) # E [file b.py] import a from d import dec @dec def g(x: str) -> None: pass a.f('') [file d.py] from typing import TypeVar T = TypeVar('T') def dec(f: T) -> T: return f [out] tmp/b.py:5: error: Argument 1 to "f" has incompatible type "str"; expected "int" tmp/a.py:5: error: Argument 1 to "g" has incompatible type "int"; expected "str" [case testDecoratorWithNoAnnotationInImportCycle] import a [file a.py] import b from d import dec @dec def f(x: int) -> None: pass b.g(1, z=4) [file b.py] import a from d import dec @dec def g(x: str) -> None: pass a.f('', y=2) [file d.py] def dec(f): return f [case testDecoratorWithFixedReturnTypeInImportCycle] import a [file a.py] import b from d import dec @dec def f(x: int) -> str: pass b.g(1)() [file b.py] import a from d import dec @dec def g(x: int) -> str: pass a.f(1)() [file d.py] from typing import Callable def dec(f: Callable[[int], str]) -> Callable[[int], str]: return f [out] tmp/b.py:5: error: "str" not callable tmp/a.py:5: error: "str" not callable [case testDecoratorWithCallAndFixedReturnTypeInImportCycle] import a [file a.py] import b from d import dec @dec() def f(x: int) -> str: pass b.g(1)() [file b.py] import a from d import dec @dec() def g(x: int) -> str: pass a.f(1)() [file d.py] from typing import Callable def dec() -> Callable[[Callable[[int], str]], Callable[[int], str]]: pass [out] tmp/b.py:5: error: "str" not callable tmp/a.py:5: error: "str" not callable [case testDecoratorWithCallAndFixedReturnTypeInImportCycleAndDecoratorArgs] import a [file a.py] import b from d import dec @dec(1) def f(x: int) -> str: pass b.g(1)() [file b.py] import a from d import dec @dec(1) def g(x: int) -> str: pass a.f(1)() [file d.py] from typing import Callable def dec(x: str) -> Callable[[Callable[[int], str]], Callable[[int], str]]: pass [out] tmp/b.py:3: error: Argument 1 to "dec" has incompatible type "int"; expected "str" tmp/b.py:5: error: "str" not callable tmp/a.py:3: error: Argument 1 to "dec" has incompatible type "int"; expected "str" tmp/a.py:5: error: "str" not callable [case testUndefinedDecoratorInImportCycle] # cmd: mypy -m foo.base [file foo/__init__.py] import foo.base class Derived(foo.base.Base): def method(self) -> None: pass [file foo/base.py] import foo class Base: @decorator def method(self) -> None: pass [out] tmp/foo/base.py:3: error: Name 'decorator' is not defined -- Conditional function definition -- ------------------------------- [case testTypeCheckBodyOfConditionalFunction] from typing import Any x = None # type: Any if x: def f(x: int) -> None: x = 1 x = '' # E: Incompatible types in assignment (expression has type "str", variable has type "int") [out] [case testCallConditionalFunction] from typing import Any x = None # type: Any if x: def f(x: int) -> None: pass f(1) f('x') # E: Argument 1 to "f" has incompatible type "str"; expected "int" f(1) f('x') # E: Argument 1 to "f" has incompatible type "str"; expected "int" [case testConditionalFunctionDefinitionWithIfElse] from typing import Any x = None # type: Any if x: def f(x: int) -> None: x = 'x' # fail x = 1 else: def f(x: int) -> None: x + 'x' # fail x = 1 f(1) f('x') # fail [out] main:5: error: Incompatible types in assignment (expression has type "str", variable has type "int") main:9: error: Unsupported operand types for + ("int" and "str") main:12: error: Argument 1 to "f" has incompatible type "str"; expected "int" [case testNestedConditionalFunctionDefinitionWithIfElse] from typing import Any x = None # type: Any def top() -> None: if x: def f(x: int) -> None: x = 'x' # fail x = 1 else: def f(x: int) -> None: x + 'x' # fail x = 1 f(1) f('x') # fail [out] main:6: error: Incompatible types in assignment (expression has type "str", variable has type "int") main:10: error: Unsupported operand types for + ("int" and "str") main:13: error: Argument 1 to "f" has incompatible type "str"; expected "int" [case testUnconditionalRedefinitionOfConditionalFunction] from typing import Any x = None # type: Any if x: def f(): pass def f(): pass # E: Name 'f' already defined on line 4 [case testIncompatibleConditionalFunctionDefinition] from typing import Any x = None # type: Any if x: def f(x: int) -> None: pass else: def f(x): pass # E: All conditional function variants must have identical signatures [case testIncompatibleConditionalFunctionDefinition2] from typing import Any x = None # type: Any if x: def f(x: int) -> None: pass else: def f(y: int) -> None: pass # E: All conditional function variants must have identical signatures [case testIncompatibleConditionalFunctionDefinition3] from typing import Any x = None # type: Any if x: def f(x: int) -> None: pass else: def f(x: int = 0) -> None: pass # E: All conditional function variants must have identical signatures [case testConditionalFunctionDefinitionUsingDecorator1] from typing import Callable def dec(f) -> Callable[[int], None]: pass x = int() if x: @dec def f(): pass else: def f(x: int) -> None: pass [case testConditionalFunctionDefinitionUsingDecorator2] from typing import Callable def dec(f) -> Callable[[int], None]: pass x = int() if x: @dec def f(): pass else: def f(x: str) -> None: pass # E: Incompatible redefinition (redefinition with type "Callable[[str], None]", original type "Callable[[int], None]") [case testConditionalFunctionDefinitionUsingDecorator3] from typing import Callable def dec(f) -> Callable[[int], None]: pass x = int() if x: def f(x: int) -> None: pass else: # TODO: This should be okay. @dec # E: Name 'f' already defined def f(): pass [case testConditionalFunctionDefinitionUsingDecorator4] from typing import Callable def dec(f) -> Callable[[int], None]: pass x = int() if x: def f(x: str) -> None: pass else: # TODO: We should report an incompatible redefinition. @dec # E: Name 'f' already defined def f(): pass [case testConditionalRedefinitionOfAnUnconditionalFunctionDefinition1] from typing import Any def f(x: str) -> None: pass x = None # type: Any if x: def f(x: int) -> None: pass # E: All conditional function variants must have identical signatures [case testConditionalRedefinitionOfAnUnconditionalFunctionDefinition1] from typing import Any def f(x: int) -> None: pass # N: "f" defined here x = None # type: Any if x: def f(y: int) -> None: pass # E: All conditional function variants must have identical signatures f(x=1) # The first definition takes precedence. f(y=1) # E: Unexpected keyword argument "y" for "f" [case testRedefineFunctionDefinedAsVariable] def g(): pass f = g if g(): def f(): pass f() f(1) # E: Too many arguments [case testRedefineFunctionDefinedAsVariableInitializedToNone] def g(): pass f = None if g(): def f(): pass f() f(1) # E: Too many arguments for "f" [case testRedefineNestedFunctionDefinedAsVariableInitializedToNone] def g() -> None: f = None if object(): def f(x: int) -> None: pass f() # E: Too few arguments for "f" f(1) f('') # E: Argument 1 to "f" has incompatible type "str"; expected "int" [out] [case testRedefineFunctionDefinedAsVariableWithInvalidSignature] def g(): pass f = g if g(): def f(x): pass # E: Incompatible redefinition (redefinition with type "Callable[[Any], Any]", original type "Callable[[], Any]") [case testRedefineFunctionDefinedAsVariableWithVariance1] class B: pass class C(B): pass def g(x: C) -> B: pass f = g if g(C()): def f(x: C) -> C: pass [case testRedefineFunctionDefinedAsVariableWithVariance2] class B: pass class C(B): pass def g(x: C) -> B: pass f = g if g(C()): def f(x: B) -> B: pass [case testRedefineFunctionDefinedAsVariableInitializedToEmptyList] f = [] # E: Need type annotation for variable if object(): def f(): pass # E: Incompatible redefinition f() f(1) [builtins fixtures/list.pyi] [case testDefineConditionallyAsImportedAndDecorated] from typing import Callable def dec(f: Callable[[], None]) -> Callable[[], None]: ... if int(): from m import f else: @dec def f(): yield [file m.py] def f(): pass [case testDefineConditionallyAsImportedAndDecoratedWithInference] if int(): from m import f else: from contextlib import contextmanager @contextmanager def f(): yield [file m.py] from contextlib import contextmanager @contextmanager def f(): yield [typing fixtures/typing-full.pyi] -- Conditional method definition -- ----------------------------- [case testTypeCheckBodyOfConditionalMethod] from typing import Any x = None # type: Any class A: if x: def f(self, x: int) -> None: x = 1 x = '' # E: Incompatible types in assignment (expression has type "str", variable has type "int") [out] [case testCallConditionalMethodInClassBody] from typing import Any x = None # type: Any class A: if x: def f(self, x: int) -> None: pass f(x, 1) f(x, 'x') # E: Argument 2 to "f" of "A" has incompatible type "str"; expected "int" f(x, 1) f(x, 'x') # E: Argument 2 to "f" of "A" has incompatible type "str"; expected "int" [out] [case testCallConditionalMethodViaInstance] from typing import Any x = None # type: Any class A: if x: def f(self, x: int) -> None: pass A().f(1) A().f('x') # E: Argument 1 to "f" of "A" has incompatible type "str"; expected "int" [case testConditionalMethodDefinitionWithIfElse] from typing import Any x = None # type: Any class A: if x: def f(self, x: int) -> None: x = 'x' # fail x = 1 else: def f(self, x: int) -> None: x + 'x' # fail x = 1 A().f(1) A().f('x') # fail [out] main:6: error: Incompatible types in assignment (expression has type "str", variable has type "int") main:10: error: Unsupported operand types for + ("int" and "str") main:13: error: Argument 1 to "f" of "A" has incompatible type "str"; expected "int" [case testUnconditionalRedefinitionOfConditionalMethod] from typing import Any x = None # type: Any class A: if x: def f(self): pass def f(self): pass # E: Name 'f' already defined [case testIncompatibleConditionalMethodDefinition] from typing import Any x = None # type: Any class A: if x: def f(self, x: int) -> None: pass else: def f(self, x): pass # E: All conditional function variants must have identical signatures [out] [case testConditionalFunctionDefinitionInTry] import typing try: def f(x: int) -> None: pass except: def g(x: str) -> None: pass f(1) f('x') # E: Argument 1 to "f" has incompatible type "str"; expected "int" g('x') g(1) # E: Argument 1 to "g" has incompatible type "int"; expected "str" [case testConditionalMethodDefinitionUsingDecorator] from typing import Callable def dec(f) -> Callable[['A', int], None]: pass class A: x = int() if x: @dec def f(self): pass else: def f(self, x: int) -> None: pass -- Callable with specific arg list -- ------------------------------- [case testCallableWithNamedArg] from typing import Callable from mypy_extensions import Arg def a(f: Callable[[Arg(int, 'x')], int]): f(x=4) f(5) f(y=3) # E: Unexpected keyword argument "y" [builtins fixtures/dict.pyi] [case testCallableWithOptionalArg] from typing import Callable from mypy_extensions import DefaultArg def a(f: Callable[[DefaultArg(int, 'x')], int]): f(x=4) f(2) f() f(y=3) # E: Unexpected keyword argument "y" f("foo") # E: Argument 1 has incompatible type "str"; expected "int" [builtins fixtures/dict.pyi] [case testCallableWithNamedArgFromExpr] from typing import Callable from mypy_extensions import Arg F = Callable[[Arg(int, 'x')], int] def a(f: F): f(x=4) f(5) f(y=3) # E: Unexpected keyword argument "y" [builtins fixtures/dict.pyi] [case testCallableWithOptionalArgFromExpr] from typing import Callable from mypy_extensions import DefaultArg F = Callable[[DefaultArg(int, 'x')], int] def a(f: F): f(x=4) f(2) f() f(y=3) # E: Unexpected keyword argument "y" f("foo") # E: Argument 1 has incompatible type "str"; expected "int" [builtins fixtures/dict.pyi] [case testCallableParsingInInheritence] from collections import namedtuple class C(namedtuple('t', 'x')): pass [case testCallableParsingSameName] from typing import Callable def Arg(x, y): pass F = Callable[[Arg(int, 'x')], int] # E: Invalid argument constructor "__main__.Arg" [case testCallableParsingFromExpr] from typing import Callable, List from mypy_extensions import Arg, VarArg, KwArg import mypy_extensions def WrongArg(x, y): return y # Note that for this test, the 'Value of type "int" is not indexable' errors are silly, # and a consequence of Callable being set to an int in the test stub. We can't set it to # something else sensible, because other tests require the stub not have anything # that looks like a function call. F = Callable[[WrongArg(int, 'x')], int] # E: Invalid argument constructor "__main__.WrongArg" G = Callable[[Arg(1, 'x')], int] # E: Invalid type alias # E: Value of type "int" is not indexable H = Callable[[VarArg(int, 'x')], int] # E: VarArg arguments should not have names I = Callable[[VarArg(int)], int] # ok J = Callable[[VarArg(), KwArg()], int] # ok K = Callable[[VarArg(), int], int] # E: Required positional args may not appear after default, named or var args L = Callable[[Arg(name='x', type=int)], int] # ok # I have commented out the following test because I don't know how to expect the "defined here" note part of the error. # M = Callable[[Arg(gnome='x', type=int)], int] E: Invalid type alias E: Unexpected keyword argument "gnome" for "Arg" N = Callable[[Arg(name=None, type=int)], int] # ok O = Callable[[List[Arg(int)]], int] # E: Invalid type alias # E: Value of type "int" is not indexable # E: Type expected within [...] # E: The type "Type[List[Any]]" is not generic and not indexable P = Callable[[mypy_extensions.VarArg(int)], int] # ok Q = Callable[[Arg(int, type=int)], int] # E: Invalid type alias # E: Value of type "int" is not indexable # E: "Arg" gets multiple values for keyword argument "type" R = Callable[[Arg(int, 'x', name='y')], int] # E: Invalid type alias # E: Value of type "int" is not indexable # E: "Arg" gets multiple values for keyword argument "name" [builtins fixtures/dict.pyi] [case testCallableParsing] from typing import Callable from mypy_extensions import Arg, VarArg, KwArg def WrongArg(x, y): return y def b(f: Callable[[Arg(1, 'x')], int]): pass # E: invalid type comment or annotation def d(f: Callable[[VarArg(int)], int]): pass # ok def e(f: Callable[[VarArg(), KwArg()], int]): pass # ok def g(f: Callable[[Arg(name='x', type=int)], int]): pass # ok def h(f: Callable[[Arg(gnome='x', type=int)], int]): pass # E: Unexpected argument "gnome" for argument constructor def i(f: Callable[[Arg(name=None, type=int)], int]): pass # ok def j(f: Callable[[Arg(int, 'x', name='y')], int]): pass # E: "Arg" gets multiple values for keyword argument "name" def k(f: Callable[[Arg(int, type=int)], int]): pass # E: "Arg" gets multiple values for keyword argument "type" [builtins fixtures/dict.pyi] [case testCallableTypeAnalysis] from typing import Callable from mypy_extensions import Arg, VarArg as VARG, KwArg import mypy_extensions as ext def WrongArg(x, y): return y def a(f: Callable[[WrongArg(int, 'x')], int]): pass # E: Invalid argument constructor "__main__.WrongArg" def b(f: Callable[[BadArg(int, 'x')], int]): pass # E: Name 'BadArg' is not defined def d(f: Callable[[ext.VarArg(int)], int]): pass # ok def e(f: Callable[[VARG(), ext.KwArg()], int]): pass # ok def g(f: Callable[[ext.Arg(name='x', type=int)], int]): pass # ok def i(f: Callable[[Arg(name=None, type=int)], int]): pass # ok def f1(*args) -> int: pass def f2(*args, **kwargs) -> int: pass d(f1) e(f2) d(f2) e(f1) # E: Argument 1 to "e" has incompatible type "Callable[[VarArg(Any)], int]"; expected "Callable[[VarArg(Any), KwArg(Any)], int]" [builtins fixtures/dict.pyi] [case testCallableWrongTypeType] from typing import Callable from mypy_extensions import Arg def b(f: Callable[[Arg(1, 'x')], int]): pass # E: invalid type comment or annotation [builtins fixtures/dict.pyi] [case testCallableTooManyVarArg] from typing import Callable from mypy_extensions import VarArg def c(f: Callable[[VarArg(int, 'x')], int]): pass # E: VarArg arguments should not have names [builtins fixtures/dict.pyi] [case testCallableFastParseGood] from typing import Callable from mypy_extensions import VarArg, Arg, KwArg def d(f: Callable[[VarArg(int)], int]): pass # ok def e(f: Callable[[VarArg(), KwArg()], int]): pass # ok def g(f: Callable[[Arg(name='x', type=int)], int]): pass # ok def i(f: Callable[[Arg(name=None, type=int)], int]): pass # ok [builtins fixtures/dict.pyi] [case testCallableFastParseBadArgArgName] from typing import Callable from mypy_extensions import Arg def h(f: Callable[[Arg(gnome='x', type=int)], int]): pass # E: Unexpected argument "gnome" for argument constructor [builtins fixtures/dict.pyi] [case testCallableKindsOrdering] from typing import Callable, Any from mypy_extensions import Arg, VarArg, KwArg, DefaultArg, NamedArg def f(f: Callable[[VarArg(), int], int]): pass # E: Required positional args may not appear after default, named or var args def g(f: Callable[[VarArg(), VarArg()], int]): pass # E: Var args may not appear after named or var args def h(f: Callable[[KwArg(), KwArg()], int]): pass # E: You may only have one **kwargs argument def i(f: Callable[[DefaultArg(), int], int]): pass # E: Required positional args may not appear after default, named or var args def j(f: Callable[[NamedArg(Any, 'x'), DefaultArg(int, 'y')], int]): pass # E: Positional default args may not appear after named or var args def k(f: Callable[[KwArg(), NamedArg(Any, 'x')], int]): pass # E: A **kwargs argument must be the last argument [builtins fixtures/dict.pyi] [case testCallableDuplicateNames] from typing import Callable from mypy_extensions import Arg, VarArg, KwArg, DefaultArg def f(f: Callable[[Arg(int, 'x'), int, Arg(int, 'x')], int]): pass # E: Duplicate argument 'x' in Callable [builtins fixtures/dict.pyi] [case testCallableWithKeywordOnlyArg] from typing import Callable from mypy_extensions import NamedArg def a(f: Callable[[NamedArg(int, 'x')], int]): f(x=4) f(2) # E: Too many positional arguments f() # E: Missing named argument "x" f(y=3) # E: Unexpected keyword argument "y" f(x="foo") # E: Argument 1 has incompatible type "str"; expected "int" [builtins fixtures/dict.pyi] [case testCallableWithKeywordOnlyOptionalArg] from typing import Callable from mypy_extensions import DefaultNamedArg def a(f: Callable[[DefaultNamedArg(int, 'x')], int]): f(x=4) f(2) # E: Too many positional arguments f() f(y=3) # E: Unexpected keyword argument "y" f(x="foo") # E: Argument 1 has incompatible type "str"; expected "int" [builtins fixtures/dict.pyi] [case testCallableWithKwargs] from typing import Callable from mypy_extensions import KwArg def a(f: Callable[[KwArg(int)], int]): f(x=4) f(2) # E: Too many arguments f() f(y=3) f(x=4, y=3, z=10) f(x="foo") # E: Argument 1 has incompatible type "str"; expected "int" [builtins fixtures/dict.pyi] [case testCallableWithVarArg] from typing import Callable from mypy_extensions import VarArg def a(f: Callable[[VarArg(int)], int]): f(x=4) # E: Unexpected keyword argument "x" f(2) f() f(3, 4, 5) f("a") # E: Argument 1 has incompatible type "str"; expected "int" [builtins fixtures/dict.pyi] [case testCallableArgKindSubtyping] from typing import Callable from mypy_extensions import Arg, DefaultArg int_str_fun = None # type: Callable[[int, str], str] int_opt_str_fun = None # type: Callable[[int, DefaultArg(str, None)], str] int_named_str_fun = None # type: Callable[[int, Arg(str, 's')], str] def isf(ii: int, ss: str) -> str: return ss def iosf(i: int, s: str = "bar") -> str: return s def isf_unnamed(__i: int, __s: str) -> str: return __s int_str_fun = isf int_str_fun = isf_unnamed int_named_str_fun = isf_unnamed # E: Incompatible types in assignment (expression has type "Callable[[int, str], str]", variable has type "Callable[[int, Arg(str, 's')], str]") int_opt_str_fun = iosf int_str_fun = iosf int_opt_str_fun = isf # E: Incompatible types in assignment (expression has type "Callable[[Arg(int, 'ii'), Arg(str, 'ss')], str]", variable has type "Callable[[int, DefaultArg(str)], str]") int_named_str_fun = isf # E: Incompatible types in assignment (expression has type "Callable[[Arg(int, 'ii'), Arg(str, 'ss')], str]", variable has type "Callable[[int, Arg(str, 's')], str]") int_named_str_fun = iosf [builtins fixtures/dict.pyi] -- Callable[..., T] -- ---------------- [case testCallableWithArbitraryArgs] from typing import Callable def f(x: Callable[..., int]) -> None: x() x(1) x(z=1) x() + '' # E: Unsupported operand types for + ("int" and "str") [out] [case testCallableWithArbitraryArgs2] from typing import Callable def f(x: Callable[..., int]) -> None: x(*[1], **{'x': 2}) [builtins fixtures/dict.pyi] [case testCastWithCallableAndArbitraryArgs] from typing import Callable, cast f = cast(Callable[..., int], None) f(x=4) + '' # E: Unsupported operand types for + ("int" and "str") [case testCallableWithArbitraryArgsInErrorMessage] from typing import Callable def f(x: Callable[..., int]) -> None: x = 1 # E: Incompatible types in assignment (expression has type "int", variable has type "Callable[..., int]") [out] [case testCallableWithArbitraryArgsInGenericFunction] from typing import Callable, TypeVar T = TypeVar('T') def f(x: Callable[..., T]) -> T: pass def g(*x: int) -> str: pass x = f(g) x + 1 # E: Unsupported left operand type for + ("str") [builtins fixtures/list.pyi] [case testCallableWithArbitraryArgsSubtyping] from typing import Callable def f(x: Callable[..., int]) -> None: pass def g1(): pass def g2(x, y) -> int: pass def g3(*, y: str) -> int: pass def g4(*, y: int) -> str: pass f(g1) f(g2) f(g3) f(g4) # E: Argument 1 to "f" has incompatible type "Callable[[NamedArg(int, 'y')], str]"; expected "Callable[..., int]" [case testCallableWithArbitraryArgsSubtypingWithGenericFunc] from typing import Callable, TypeVar T = TypeVar('T') def f(x: Callable[..., int]) -> None: pass def g1(x: T) -> int: pass def g2(*x: T) -> int: pass def g3(*x: T) -> T: pass f(g1) f(g2) f(g3) -- (...) -> T -- ---------------- [case testEllipsisWithArbitraryArgsOnBareFunction] def f(x, y, z): # type: (...) -> None pass f(1, "hello", []) f(x=1, y="hello", z=[]) [builtins fixtures/dict.pyi] [case testEllipsisWithArbitraryArgsOnBareFunctionWithDefaults] def f(x, y=1, z="hey"): # type: (...) -> None pass f(1, "hello", []) f(x=1, y="hello", z=[]) [builtins fixtures/dict.pyi] [case testEllipsisWithArbitraryArgsOnBareFunctionWithKwargs] from typing import Dict def f(x, **kwargs): # type: (...) -> None success_dict_type = kwargs # type: Dict[str, str] failure_dict_type = kwargs # type: Dict[int, str] # E: Incompatible types in assignment (expression has type "Dict[str, Any]", variable has type "Dict[int, str]") f(1, thing_in_kwargs=["hey"]) [builtins fixtures/dict.pyi] [out] [case testEllipsisWithArbitraryArgsOnBareFunctionWithVarargs] from typing import Tuple, Any def f(x, *args): # type: (...) -> None success_tuple_type = args # type: Tuple[Any, ...] fail_tuple_type = args # type: None # E: Incompatible types in assignment (expression has type "Tuple[Any, ...]", variable has type "None") f(1, "hello") [builtins fixtures/tuple.pyi] [out] [case testEllipsisWithArbitraryArgsOnInstanceMethod] class A: def f(self, x, y, z): # type: (...) -> None pass [case testEllipsisWithArbitraryArgsOnClassMethod] class A: @classmethod def f(cls, x, y, z): # type: (...) -> None pass [builtins fixtures/classmethod.pyi] [case testEllipsisWithArbitraryArgsOnStaticMethod] class A: @staticmethod def f(x, y, z): # type: (...) -> None pass [builtins fixtures/staticmethod.pyi] [case testEllipsisWithSomethingAfterItFails] def f(x, y, z): # type: (..., int) -> None pass [out] main:1: error: Ellipses cannot accompany other argument types in function type signature. [case testEllipsisWithSomethingBeforeItFails] def f(x, y, z): # type: (int, ...) -> None pass [out] main:1: error: Ellipses cannot accompany other argument types in function type signature. [case testRejectCovariantArgument] from typing import TypeVar, Generic t = TypeVar('t', covariant=True) class A(Generic[t]): def foo(self, x: t) -> None: return None [builtins fixtures/bool.pyi] [out] main:5: error: Cannot use a covariant type variable as a parameter [case testRejectCovariantArgumentSplitLine] from typing import TypeVar, Generic t = TypeVar('t', covariant=True) class A(Generic[t]): def foo(self, x: t) -> None: return None [builtins fixtures/bool.pyi] [out] main:6: error: Cannot use a covariant type variable as a parameter [case testRejectCovariantArgumentInLambda] from typing import TypeVar, Generic, Callable t = TypeVar('t', covariant=True) class Thing(Generic[t]): def chain(self, func: Callable[[t], None]) -> None: pass def end(self) -> None: return self.chain( # Note that lambda args have no line numbers lambda _: None) [builtins fixtures/bool.pyi] [out] main:8: error: Cannot use a covariant type variable as a parameter [case testRejectCovariantArgumentInLambdaSplitLine] from typing import TypeVar, Generic, Callable [case testRejectContravariantReturnType] from typing import TypeVar, Generic t = TypeVar('t', contravariant=True) class A(Generic[t]): def foo(self) -> t: return None [builtins fixtures/bool.pyi] [out] main:5: error: Cannot use a contravariant type variable as return type [case testAcceptCovariantReturnType] from typing import TypeVar, Generic t = TypeVar('t', covariant=True) class A(Generic[t]): def foo(self) -> t: return None [builtins fixtures/bool.pyi] [case testAcceptContravariantArgument] from typing import TypeVar, Generic t = TypeVar('t', contravariant=True) class A(Generic[t]): def foo(self, x: t) -> None: return None [builtins fixtures/bool.pyi] -- Redefining functions -- -------------------- [case testRedefineFunction] from typing import Any def f(x) -> Any: pass def g(x, y): pass def h(x): pass def j(y) -> Any: pass f = h f = j # E: Incompatible types in assignment (expression has type "Callable[[Arg(Any, 'y')], Any]", variable has type "Callable[[Arg(Any, 'x')], Any]") f = g # E: Incompatible types in assignment (expression has type "Callable[[Any, Any], Any]", variable has type "Callable[[Any], Any]") [case testRedefineFunction2] def f() -> None: pass def f() -> None: pass # E: Name 'f' already defined on line 1 -- Special cases -- ------------- [case testFunctionDefinitionWithForStatement] for _ in [1]: def f(): pass else: def g(): pass f() g() [builtins fixtures/list.pyi] [case testFunctionDefinitionWithWhileStatement] while bool(): def f(): pass else: def g(): pass f() g() [builtins fixtures/bool.pyi] [case testBareCallable] from typing import Callable, Any def foo(f: Callable) -> bool: return f() def f1() -> bool: return False foo(f1) [builtins fixtures/bool.pyi] [case testFunctionNestedWithinWith] from typing import Any a = 1 # type: Any with a: def f() -> None: pass f(1) # E: Too many arguments for "f" [case testNameForDecoratorMethod] from typing import Callable class A: def f(self) -> None: # In particular, test that the error message contains "g" of "A". self.g() # E: Too few arguments for "g" of "A" self.g(1) @dec def g(self, x: str) -> None: pass def dec(f: Callable[[A, str], None]) -> Callable[[A, int], None]: pass [out] [case testUnknownFunctionNotCallable] def f() -> None: pass def g(x: int) -> None: pass h = f if bool() else g reveal_type(h) # E: Revealed type is 'builtins.function' h(7) # E: Cannot call function of unknown type [builtins fixtures/bool.pyi] -- Positional-only arguments -- ------------------------- [case testPositionalOnlyArg] def f(__a: int) -> None: pass f(1) f(__a=1) # E: Unexpected keyword argument "__a" for "f" [builtins fixtures/bool.pyi] [out] main:1: note: "f" defined here [case testPositionalOnlyArgFastparse] def f(__a: int) -> None: pass f(1) f(__a=1) # E: Unexpected keyword argument "__a" for "f" [builtins fixtures/bool.pyi] [out] main:3: note: "f" defined here [case testMagicMethodPositionalOnlyArg] class A(object): def __eq__(self, other) -> bool: return True # We are all equal. # N: "__eq__" of "A" defined here a = A() a.__eq__(a) a.__eq__(other=a) # E: Unexpected keyword argument "other" for "__eq__" of "A" [builtins fixtures/bool.pyi] [case testMagicMethodPositionalOnlyArgFastparse] class A(object): def __eq__(self, other) -> bool: return True # We are all equal. # N: "__eq__" of "A" defined here a = A() a.__eq__(a) a.__eq__(other=a) # E: Unexpected keyword argument "other" for "__eq__" of "A" [builtins fixtures/bool.pyi] [case testTupleArguments] # flags: --python-version 2.7 def f(a, (b, c), d): pass [case testTupleArgumentsFastparse] # flags: --python-version 2.7 def f(a, (b, c), d): pass -- Type variable shenanagins -- ------------------------- [case testGenericFunctionTypeDecl] from typing import Callable, TypeVar T = TypeVar('T') f: Callable[[T], T] reveal_type(f) # E: Revealed type is 'def [T] (T`-1) -> T`-1' def g(__x: T) -> T: pass f = g reveal_type(f) # E: Revealed type is 'def [T] (T`-1) -> T`-1' i = f(3) reveal_type(i) # E: Revealed type is 'builtins.int*' [case testFunctionReturningGenericFunction] from typing import Callable, TypeVar T = TypeVar('T') def deco() -> Callable[[T], T]: pass reveal_type(deco) # E: Revealed type is 'def () -> def [T] (T`-1) -> T`-1' f = deco() reveal_type(f) # E: Revealed type is 'def [T] (T`-1) -> T`-1' i = f(3) reveal_type(i) # E: Revealed type is 'builtins.int*' [case testFunctionReturningGenericFunctionPartialBinding] from typing import Callable, TypeVar T = TypeVar('T') U = TypeVar('U') def deco(x: U) -> Callable[[T, U], T]: pass reveal_type(deco) # E: Revealed type is 'def [U] (x: U`-1) -> def [T] (T`-2, U`-1) -> T`-2' f = deco("foo") reveal_type(f) # E: Revealed type is 'def [T] (T`-2, builtins.str*) -> T`-2' i = f(3, "eggs") reveal_type(i) # E: Revealed type is 'builtins.int*' [case testFunctionReturningGenericFunctionTwoLevelBinding] from typing import Callable, TypeVar T = TypeVar('T') R = TypeVar('R') def deco() -> Callable[[T], Callable[[T, R], R]]: pass f = deco() reveal_type(f) # E: Revealed type is 'def [T] (T`-1) -> def [R] (T`-1, R`-2) -> R`-2' g = f(3) reveal_type(g) # E: Revealed type is 'def [R] (builtins.int*, R`-2) -> R`-2' s = g(4, "foo") reveal_type(s) # E: Revealed type is 'builtins.str*' [case testGenericFunctionReturnAsDecorator] from typing import Callable, TypeVar T = TypeVar('T') def deco(__i: int) -> Callable[[T], T]: pass @deco(3) def lol(x: int) -> str: ... reveal_type(lol) # E: Revealed type is 'def (x: builtins.int) -> builtins.str' s = lol(4) reveal_type(s) # E: Revealed type is 'builtins.str' [case testGenericFunctionOnReturnTypeOnly] from typing import TypeVar, List T = TypeVar('T') def make_list() -> List[T]: pass l: List[int] = make_list() bad = make_list() # E: Need type annotation for variable [builtins fixtures/list.pyi] [case testAnonymousArgumentError] def foo(__b: int, x: int, y: int) -> int: pass foo(x=2, y=2) # E: Missing positional argument foo(y=2) # E: Missing positional arguments [case testReturnTypeLineNumberWithDecorator] def dec(f): pass @dec def test(a: str) -> (str,): # E: Invalid tuple literal type # N: Suggestion: Is there a spurious trailing comma? return None [case testReturnTypeLineNumberNewLine] def fn(a: str ) -> badtype: # E: Name 'badtype' is not defined pass [case testArgumentTypeLineNumberWithDecorator] def dec(f): pass @dec def some_method(self: badtype): pass # E: Name 'badtype' is not defined [case TestArgumentTypeLineNumberNewline] def fn( a: badtype) -> None: # E: Name 'badtype' is not defined pass [case testInferredTypeSubTypeOfReturnType] from typing import Union, Dict, List def f() -> List[Union[str, int]]: x = ['a'] return x # E: Incompatible return value type (got "List[str]", expected "List[Union[str, int]]") \ # N: Perhaps you need a type annotation for "x"? Suggestion: "List[Union[str, int]]" def g() -> Dict[str, Union[str, int]]: x = {'a': 'a'} return x # E: Incompatible return value type (got "Dict[str, str]", expected "Dict[str, Union[str, int]]") \ # N: Perhaps you need a type annotation for "x"? Suggestion: "Dict[str, Union[str, int]]" def h() -> Dict[Union[str, int], str]: x = {'a': 'a'} return x # E: Incompatible return value type (got "Dict[str, str]", expected "Dict[Union[str, int], str]") \ # N: Perhaps you need a type annotation for "x"? Suggestion: "Dict[Union[str, int], str]" def i() -> List[Union[int, float]]: x: List[int] = [1] return x # E: Incompatible return value type (got "List[int]", expected "List[Union[int, float]]") \ # N: Perhaps you need a type annotation for "x"? Suggestion: "List[Union[int, float]]" [builtins fixtures/dict.pyi] [case testInferredTypeNotSubTypeOfReturnType] from typing import Union, List def f() -> List[Union[int, float]]: x = ['a'] return x # E: Incompatible return value type (got "List[str]", expected "List[Union[int, float]]") def g() -> List[Union[str, int]]: x = ('a', 2) return x # E: Incompatible return value type (got "Tuple[str, int]", expected "List[Union[str, int]]") [builtins fixtures/list.pyi] [case testInferredTypeIsObjectMismatch] from typing import Union, Dict, List def f() -> Dict[str, Union[str, int]]: x = {'a': 'a', 'b': 2} return x # E: Incompatible return value type (got "Dict[str, object]", expected "Dict[str, Union[str, int]]") def g() -> Dict[str, Union[str, int]]: x: Dict[str, Union[str, int]] = {'a': 'a', 'b': 2} return x def h() -> List[Union[str, int]]: x = ['a', 2] return x # E: Incompatible return value type (got "List[object]", expected "List[Union[str, int]]") def i() -> List[Union[str, int]]: x: List[Union[str, int]] = ['a', 2] return x [builtins fixtures/dict.pyi] [case testLambdaSemanal] f = lambda: xyz [out] main:1: error: Name 'xyz' is not defined [case testLambdaTypeCheck] f = lambda: 1 + '1' [out] main:1: error: Unsupported operand types for + ("int" and "str") [case testLambdaTypeInference] f = lambda: 5 reveal_type(f) [out] main:2: error: Revealed type is 'def () -> builtins.int' mypy-0.560/test-data/unit/check-generic-subtyping.test0000644€tŠÔÚ€2›s®0000004657213215007205027164 0ustar jukkaDROPBOX\Domain Users00000000000000-- Test cases for the type checker related to subtyping and inheritance with -- generics. -- Subtyping + inheritance -- ----------------------- [case testSubtypingAndInheritingNonGenericTypeFromGenericType] from typing import TypeVar, Generic T = TypeVar('T') ac = None # type: A[C] ad = None # type: A[D] b = None # type: B b = ad # E: Incompatible types in assignment (expression has type "A[D]", variable has type "B") ad = b # E: Incompatible types in assignment (expression has type "B", variable has type "A[D]") b = ac # E: Incompatible types in assignment (expression has type "A[C]", variable has type "B") b = b ac = b class C: pass class A(Generic[T]): pass class B(A[C]): pass class D: pass [case testSubtypingAndInheritingGenericTypeFromNonGenericType] from typing import TypeVar, Generic T = TypeVar('T') a = None # type: A bc = None # type: B[C] bd = None # type: B[D] bc = bd # E: Incompatible types in assignment (expression has type "B[D]", variable has type "B[C]") bd = bc # E: Incompatible types in assignment (expression has type "B[C]", variable has type "B[D]") bc = a # E: Incompatible types in assignment (expression has type "A", variable has type "B[C]") bd = a # E: Incompatible types in assignment (expression has type "A", variable has type "B[D]") a = bc a = bd class A: pass class B(A, Generic[T]): pass class C: pass class D: pass [case testSubtypingAndInheritingGenericTypeFromGenericType] from typing import TypeVar, Generic T = TypeVar('T') S = TypeVar('S') ac = None # type: A[C] ad = None # type: A[D] bcc = None # type: B[C, C] bdc = None # type: B[D, C] ad = bcc # E: Incompatible types in assignment (expression has type "B[C, C]", variable has type "A[D]") ad = bdc # E: Incompatible types in assignment (expression has type "B[D, C]", variable has type "A[D]") bcc = ac # E: Incompatible types in assignment (expression has type "A[C]", variable has type "B[C, C]") bdc = ac # E: Incompatible types in assignment (expression has type "A[C]", variable has type "B[D, C]") bcc = bcc bdc = bdc ac = bcc ac = bdc class A(Generic[T]): pass class B(A[S], Generic[T, S]): pass class C: pass class D: pass [case testSubtypingAndInheritingGenericTypeFromGenericTypeAcrossHierarchy] from typing import TypeVar, Generic T = TypeVar('T') S = TypeVar('S') X = TypeVar('X') Y = TypeVar('Y') ae = None # type: A[A[E]] af = None # type: A[A[F]] cef = None # type: C[E, F] cff = None # type: C[F, F] cfe = None # type: C[F, E] ae = cef # E: Incompatible types in assignment (expression has type "C[E, F]", variable has type "A[A[E]]") af = cfe # E: Incompatible types in assignment (expression has type "C[F, E]", variable has type "A[A[F]]") ae = cfe af = cef af = cff class A(Generic[T]): pass class B(A[S], Generic[T, S]): pass class C(B[A[X], A[Y]], Generic[X, Y]): pass class E: pass class F: pass [case testIncludingBaseClassTwice] from typing import TypeVar, Generic t = TypeVar('t') class I(Generic[t]): pass class A(I[C], I[object]): pass # E: Duplicate base class "I" class C: pass -- Accessing inherited generic members -- ----------------------------------- [case testAccessingMethodInheritedFromGenericType] from typing import TypeVar, Generic T = TypeVar('T') S = TypeVar('S') b = None # type: B[C, D] c, d = None, None # type: (C, D) b.f(c) # E: Argument 1 to "f" of "A" has incompatible type "C"; expected "D" b.f(d) class A(Generic[T]): def f(self, a: T) -> None: pass class B(A[S], Generic[T, S]): pass class C: pass class D: pass [case testAccessingMethodInheritedFromGenericTypeInNonGenericType] from typing import TypeVar, Generic T = TypeVar('T') b, c, d = None, None, None # type: (B, C, D) b.f(c) # E: Argument 1 to "f" of "A" has incompatible type "C"; expected "D" b.f(d) class C: pass class D: pass class A(Generic[T]): def f(self, a: T) -> None: pass class B(A[D]): pass [case testAccessingMemberVarInheritedFromGenericType] from typing import TypeVar, Generic T = TypeVar('T') S = TypeVar('S') class A(Generic[T]): def __init__(self, a: T) -> None: self.a = a b = None # type: B[C, D] c, d = None, None # type: (C, D) b.a = c # E: Incompatible types in assignment (expression has type "C", variable has type "D") b.a = d class B(A[S], Generic[T, S]): pass class C: pass class D: pass -- Overriding with generic types -- ----------------------------- [case testOverridingMethodInSimpleTypeInheritingGenericType] from typing import TypeVar, Generic T = TypeVar('T') class B(Generic[T]): def f(self, a: T) -> None: pass def g(self, a: T) -> None: pass class C: pass class D: pass class A(B[C]): def f(self, a: D) -> None: pass \ # E: Argument 1 of "f" incompatible with supertype "B" def g(self, a: C) -> None: pass [out] [case testOverridingMethodInGenericTypeInheritingSimpleType] from typing import TypeVar, Generic T = TypeVar('T') class C: pass class B: def f(self, a: C) -> None: pass def g(self, a: C) -> None: pass class A(B, Generic[T]): def f(self, a: T) -> None: pass \ # E: Argument 1 of "f" incompatible with supertype "B" def g(self, a: 'C') -> None: pass [out] [case testOverridingMethodInGenericTypeInheritingGenericType] from typing import TypeVar, Generic T = TypeVar('T') S = TypeVar('S') class B(Generic[T]): def f(self, a: T) -> None: pass def g(self, a: T) -> None: pass class A(B[S], Generic[T, S]): def f(self, a: T) -> None: pass \ # E: Argument 1 of "f" incompatible with supertype "B" def g(self, a: S) -> None: pass [out] [case testOverridingMethodInMultilevelHierarchyOfGenericTypes] from typing import TypeVar, Generic T = TypeVar('T') S = TypeVar('S') U = TypeVar('U') V = TypeVar('V') class D: pass class C(Generic[T, U, V]): def f(self, a: V) -> None: pass def g(self, a: V) -> None: pass class B(C[D, D, T], Generic[T]): pass class A(B[S], Generic[T, S]): def f(self, a: T) -> None: pass \ # E: Argument 1 of "f" incompatible with supertype "C" def g(self, a: S) -> None: pass [out] [case testOverrideGenericMethodInNonGenericClass] from typing import TypeVar T = TypeVar('T') S = TypeVar('S') class A: def f(self, x: T, y: S) -> None: pass class B(A): def f(self, x: S, y: T) -> None: pass class C(A): # Okay, because T = object allows any type for the arguments. def f(self, x: T, y: T) -> None: pass [case testOverrideGenericMethodInNonGenericClassLists] from typing import TypeVar, List T = TypeVar('T') S = TypeVar('S') class A: def f(self, x: List[T], y: List[S]) -> None: pass class B(A): def f(self, x: List[S], y: List[T]) -> None: pass class C(A): def f(self, x: List[T], y: List[T]) -> None: pass # E: Signature of "f" incompatible with supertype "A" [builtins fixtures/list.pyi] [out] [case testOverrideGenericMethodInNonGenericClassGeneralize] from typing import TypeVar T = TypeVar('T') T1 = TypeVar('T1', bound=str) S = TypeVar('S') class A: def f(self, x: int, y: S) -> None: pass class B(A): def f(self, x: T, y: S) -> None: pass class C(A): def f(self, x: T, y: str) -> None: pass class D(A): def f(self, x: T1, y: S) -> None: pass # TODO: This error could be more specific. [out] main:12: error: Argument 2 of "f" incompatible with supertype "A" main:14: error: Signature of "f" incompatible with supertype "A" -- Inheritance from generic types with implicit dynamic supertype -- -------------------------------------------------------------- [case testInheritanceFromGenericWithImplicitDynamicAndSubtyping] from typing import TypeVar, Generic T = TypeVar('T') a = None # type: A bc = None # type: B[C] bd = None # type: B[D] a = bc # E: Incompatible types in assignment (expression has type "B[C]", variable has type "A") bc = a bd = a class B(Generic[T]): pass class A(B): pass class C: pass class D: pass [out] [case testInheritanceFromGenericWithImplicitDynamicAndExternalAccess] from typing import TypeVar, Generic T = TypeVar('T') class B(Generic[T]): def f(self, a: 'B[T]') -> None: pass def __init__(self, x: 'B[T]') -> None: self.x = x class A(B): pass class C: pass a = None # type: A c = None # type: C bc = None # type: B[C] a.x = c # E: Incompatible types in assignment (expression has type "C", variable has type "B[Any]") a.f(c) # E: Argument 1 to "f" of "B" has incompatible type "C"; expected "B[Any]" a.x = bc a.f(bc) [out] [case testInheritanceFromGenericWithImplicitDynamic] from typing import TypeVar, Generic T = TypeVar('T') a = None # type: A c = None # type: C bc = None # type: B[C] class B(Generic[T]): def f(self, a: 'B[T]') -> None: pass def __init__(self, x: 'B[T]') -> None: self.x = x class A(B): def g(self) -> None: self.x = c # E: Incompatible types in assignment (expression has type "C", variable has type "B[Any]") self.f(c) # E: Argument 1 to "f" of "B" has incompatible type "C"; expected "B[Any]" self.x = bc self.f(bc) class C: pass [out] [case testInheritanceFromGenericWithImplicitDynamicAndOverriding] from typing import TypeVar, Generic, Tuple T = TypeVar('T') class B(Generic[T]): def f(self, a: T, b: 'Tuple[T, B[T]]') -> None: pass class A(B): def f(self, a, b): pass [builtins fixtures/tuple.pyi] [out] -- Inheritance from generic types and super expressions -- ---------------------------------------------------- [case testSuperExpressionsWhenInheritingFromGenericType] from typing import TypeVar, Generic T = TypeVar('T') S = TypeVar('S') class B(Generic[T]): def f(self, a: T) -> None: pass class A(B[S], Generic[T, S]): def g(self, t: T, s: S) -> None: super().f(t) # E: Argument 1 to "f" of "B" has incompatible type "T"; expected "S" super().f(s) [out] [case testSuperExpressionsWhenInheritingFromGenericTypeAndDeepHierarchy] from typing import TypeVar, Generic T = TypeVar('T') S = TypeVar('S') U = TypeVar('U') V = TypeVar('V') class C(Generic[T, U, V]): def f(self, a: V) -> None: pass class D: pass class B(C[D, D, T], Generic[T]): pass class A(B[S], Generic[T, S]): def g(self, t: T, s: S) -> None: super().f(t) # E: Argument 1 to "f" of "C" has incompatible type "T"; expected "S" super().f(s) [out] -- Type of inherited constructor -- ----------------------------- [case testInheritedConstructor] from typing import TypeVar, Generic T = TypeVar('T') class A(Generic[T]): def __init__(self, x: T) -> None: pass class B(A[T], Generic[T]): pass class C(A[int]): pass class D(A[A[T]], Generic[T]): pass B(1) C(1) C('a') # E: Argument 1 to "C" has incompatible type "str"; expected "int" D(A(1)) D(1) # E: Argument 1 to "D" has incompatible type "int"; expected "A[]" [case testInheritedConstructor2] from typing import TypeVar, Generic T = TypeVar('T') U = TypeVar('U') Z = TypeVar('Z') class A(Generic[T, U]): def __init__(self, x: T, y: U, z: Z) -> None: pass class B(A[int, T], Generic[T]): pass class C(B[A[T, str]], Generic[T, U]): pass # C[T, U] <: B[A[T, str]] <: A[int, A[T, str]] C(1, A(1, 'a', 0), 'z') C(1, A('1', 'a', 0), 'z') C('1', A(1, 'a', 0), 'z') # E: Argument 1 to "C" has incompatible type "str"; expected "int" C(1, A(1, 1, 0), 'z') # E: Argument 2 to "A" has incompatible type "int"; expected "str" -- Subtyping with a generic abstract base class -- -------------------------------------------- [case testSubtypingWithGenericTypeSubclassingGenericAbstractClass] from typing import TypeVar, Generic from abc import abstractmethod T = TypeVar('T') S = TypeVar('S') acd = None # type: A[C, D] adc = None # type: A[D, C] ic = None # type: I[C] id = None # type: I[D] ic = acd # E: Incompatible types in assignment (expression has type "A[C, D]", variable has type "I[C]") id = adc # E: Incompatible types in assignment (expression has type "A[D, C]", variable has type "I[D]") adc = ic # E: Incompatible types in assignment (expression has type "I[C]", variable has type "A[D, C]") ic = adc id = acd class I(Generic[T]): @abstractmethod def f(self): pass class A(I[S], Generic[T, S]): pass class C: pass class D: pass [case testSubtypingWithTypeImplementingGenericABCViaInheritance] from typing import TypeVar, Generic S = TypeVar('S') a, b = None, None # type: (A, B) ic, id, ie = None, None, None # type: (I[C], I[D], I[E]) class I(Generic[S]): pass class B(I[C]): pass class A(B): pass ie = a # E: Incompatible types in assignment (expression has type "A", variable has type "I[E]") a = ic # E: Incompatible types in assignment (expression has type "I[C]", variable has type "A") a = id # E: Incompatible types in assignment (expression has type "I[D]", variable has type "A") a = b # E: Incompatible types in assignment (expression has type "B", variable has type "A") id = a # E: Incompatible types in assignment (expression has type "A", variable has type "I[D]") ic = a b = a class C: pass class D: pass class E: pass [out] [case testSubtypingWithTypeImplementingGenericABCViaInheritance2-skip] from typing import TypeVar, Generic T = TypeVar('T') class I(Generic[T]): pass class A(I[C]): pass class B(A, I[D]): pass # Fail class C: pass class D: pass [out] main:5: error: Class "B" has base "I" duplicated inconsistently [case testSubtypingAndABCExtension] from typing import TypeVar, Generic from abc import abstractmethod, ABCMeta t = TypeVar('t') a, i, j = None, None, None # type: (A[object], I[object], J[object]) (ii, jj) = (i, j) ii = a jj = a jj = i a = i # E: Incompatible types in assignment (expression has type "I[object]", variable has type "A[object]") a = j # E: Incompatible types in assignment (expression has type "J[object]", variable has type "A[object]") class J(Generic[t]): pass class X(metaclass=ABCMeta): pass class I(X, J[t], Generic[t]): pass class A(I[t], Generic[t]): pass -- Subclassing a generic ABC -- ------------------------- [case testSubclassingGenericABC1] from typing import TypeVar, Generic from abc import abstractmethod T = TypeVar('T') class I(Generic[T]): @abstractmethod def f(self, a: T) -> None: pass @abstractmethod def g(self, a: T) -> None: pass class A(I[C]): def f(self, a: 'D') -> None: pass \ # E: Argument 1 of "f" incompatible with supertype "I" def g(self, a: 'C') -> None: pass class C: pass class D: pass [out] -- Extending a generic ABC with deep type hierarchy -- ------------------------------------------------ [case testSubclassingGenericABCWithDeepHierarchy] from typing import Any, TypeVar, Generic from abc import abstractmethod T = TypeVar('T') a = None # type: A ic, id = None, None # type: (I[C], I[D]) id = a # Fail ic = a class I(Generic[T]): @abstractmethod def f(self, a: T, b: T) -> None: pass @abstractmethod def g(self, a: T, b: 'D') -> None: pass class B(I[C]): def f(self, a: 'C', b: 'C') -> None: pass def g(self, a: 'C', b: Any) -> None: pass class A(B): def g(self, a: 'C', b: 'C') -> None: pass \ # E: Argument 2 of "g" incompatible with supertype "I" def f(self, a: 'C', b: 'C') -> None: pass class C: pass class D: pass [out] main:7: error: Incompatible types in assignment (expression has type "A", variable has type "I[D]") [case testSubclassingGenericABCWithDeepHierarchy2] from typing import Any, TypeVar, Generic from abc import abstractmethod T = TypeVar('T') class I(Generic[T]): @abstractmethod def f(self, a: T, b: T) -> None: pass class B(I[C]): def f(self, a: 'C', b: Any) -> None: pass class A(B): def f(self, a: 'C', b: 'D') -> None: pass \ # E: Argument 2 of "f" incompatible with supertype "I" class C: pass class D: pass [out] -- Implicit Any types and subclassing generic ABC -- ---------------------------------------------- [case testSubclassingGenericABCWithImplicitAny] from typing import Any, TypeVar, Generic from abc import abstractmethod T = TypeVar('T') a = None # type: Any ic = None # type: I[C] id = None # type: I[D] ic = a id = a class I(Generic[T]): @abstractmethod def f(self, a: T) -> None: pass class A(I): def f(self, a): pass class C: pass class D: pass [case testSubclassingGenericABCWithImplicitAnyAndDeepHierarchy] from typing import Any, TypeVar, Generic from abc import abstractmethod T = TypeVar('T') a = None # type: Any ic = None # type: I[C] id = None # type: I[D] ic = a id = a class I(Generic[T]): @abstractmethod def f(self, a: T, b: T) -> None: pass class B(I): def f(self, a, b): pass class A(B): def f(self, a: 'C', b: 'D') -> None: pass class C: pass class D: pass [case testImplementingGenericABCWithImplicitAnyAndDeepHierarchy2] from typing import Any, TypeVar, Generic from abc import abstractmethod T = TypeVar('T') a = None # type: Any jc = None # type: J[C] jd = None # type: J[D] jc = a jd = a class J(Generic[T]): @abstractmethod def f(self, a: T, b: T) -> None: pass class I(J): @abstractmethod def f(self, a, b): pass class A(I): def f(self, a: 'C', b: 'D') -> None: pass class C: pass class D: pass -- Accessing generic ABC members -- ----------------------------- [case testAccessingGenericABCMembers] from typing import TypeVar, Generic from abc import abstractmethod T = TypeVar('T') class I(Generic[T]): @abstractmethod def f(self, a: T) -> None: pass class A: pass class B: pass a, b = None, None # type: (A, B) ia = None # type: I[A] ia.f(b) # E: Argument 1 to "f" of "I" has incompatible type "B"; expected "A" ia.f(a) [case testAccessingInheritedGenericABCMembers] from typing import TypeVar, Generic from abc import abstractmethod T = TypeVar('T') class J(Generic[T]): @abstractmethod def f(self, a: T) -> None: pass class I(J[T], Generic[T]): pass class A: pass class B: pass a, b = None, None # type: (A, B) ia = None # type: I[A] ia.f(b) # E: Argument 1 to "f" of "J" has incompatible type "B"; expected "A" ia.f(a) -- Misc -- ---- [case testMultipleAssignmentAndGenericSubtyping] from typing import Iterable n, s = None, None # type: int, str class Nums(Iterable[int]): def __iter__(self): pass def __next__(self): pass n, n = Nums() s, s = Nums() # E: Incompatible types in assignment (expression has type "int", variable has type "str") [builtins fixtures/for.pyi] [out] -- Variance -- -------- [case testCovariant] from typing import TypeVar, Generic T = TypeVar('T', covariant=True) class G(Generic[T]): pass class A: pass class B(A): pass class C(B): pass a = None # type: G[A] b = None # type: G[B] c = None # type: G[C] b = a # E: Incompatible types in assignment (expression has type "G[A]", variable has type "G[B]") b = c [builtins fixtures/bool.pyi] [out] [case testContravariant] from typing import TypeVar, Generic T = TypeVar('T', contravariant=True) class G(Generic[T]): pass class A: pass class B(A): pass class C(B): pass a = None # type: G[A] b = None # type: G[B] c = None # type: G[C] b = a b = c # E: Incompatible types in assignment (expression has type "G[C]", variable has type "G[B]") [builtins fixtures/bool.pyi] [out] [case testInvariant] from typing import TypeVar, Generic T = TypeVar('T') # invariant (default) class G(Generic[T]): pass class A: pass class B(A): pass class C(B): pass a = None # type: G[A] b = None # type: G[B] c = None # type: G[C] b = a # E: Incompatible types in assignment (expression has type "G[A]", variable has type "G[B]") b = c # E: Incompatible types in assignment (expression has type "G[C]", variable has type "G[B]") [builtins fixtures/bool.pyi] [out] [case testTypeVarSubtypeUnion] from typing import Union, TypeVar, Generic class U: pass class W: pass T = TypeVar('T', bound=Union[U, W]) class Y(Generic[T]): def __init__(self) -> None: pass def f(self) -> T: return U() # E: Incompatible return value type (got "U", expected "T") [out] mypy-0.560/test-data/unit/check-generics.test0000644€tŠÔÚ€2›s®0000014052413215007205025315 0ustar jukkaDROPBOX\Domain Users00000000000000-- Simple generic types -- -------------------- [case testGenericMethodReturnType] from typing import TypeVar, Generic T = TypeVar('T') a, b, c = None, None, None # type: (A[B], B, C) c = a.f() # Fail b = a.f() class A(Generic[T]): def f(self) -> T: pass class B: pass class C: pass [out] main:4: error: Incompatible types in assignment (expression has type "B", variable has type "C") [case testGenericMethodArgument] from typing import TypeVar, Generic T = TypeVar('T') a.f(c) # Fail a.f(b) a = None # type: A[B] b = None # type: B c = None # type: C class A(Generic[T]): def f(self, a: T) -> None: pass class B: pass class C: pass [out] main:3: error: Argument 1 to "f" of "A" has incompatible type "C"; expected "B" [case testGenericMemberVariable] from typing import TypeVar, Generic T = TypeVar('T') class A(Generic[T]): def __init__(self, v: T) -> None: self.v = v a, b, c = None, None, None # type: (A[B], B, C) a.v = c # Fail a.v = b class B: pass class C: pass [out] main:8: error: Incompatible types in assignment (expression has type "C", variable has type "B") [case testGenericMemberVariable] from typing import TypeVar, Generic T = TypeVar('T') a, b, c = None, None, None # type: (A[B], B, C) a.v = c # Fail a.v = b class A(Generic[T]): v = None # type: T class B: pass class C: pass [out] main:4: error: Incompatible types in assignment (expression has type "C", variable has type "B") [case testSimpleGenericSubtyping] from typing import TypeVar, Generic T = TypeVar('T') b, bb, c = None, None, None # type: (A[B], A[B], A[C]) c = b # Fail b = c # Fail b = b b = bb class A(Generic[T]): pass class B: pass class C(B): pass [out] main:4: error: Incompatible types in assignment (expression has type "A[B]", variable has type "A[C]") main:5: error: Incompatible types in assignment (expression has type "A[C]", variable has type "A[B]") [case testGenericTypeCompatibilityWithAny] from typing import Any, TypeVar, Generic T = TypeVar('T') b, c, d = None, None, None # type: (A[B], A[C], A[Any]) b = d c = d d = b d = c class A(Generic[T]): pass class B: pass class C(B): pass [out] [case testTypeVariableAsTypeArgument] from typing import TypeVar, Generic T = TypeVar('T') a = None # type: A[B] b = None # type: A[B] c = None # type: A[C] a.v = c # Fail c = a.v # Fail a.v = b b = a.v class A(Generic[T]): v = None # type: A[T] class B: pass class C: pass [out] main:7: error: Incompatible types in assignment (expression has type "A[C]", variable has type "A[B]") main:8: error: Incompatible types in assignment (expression has type "A[B]", variable has type "A[C]") [case testMultipleGenericTypeParametersWithMemberVars] from typing import TypeVar, Generic S = TypeVar('S') T = TypeVar('T') a = None # type: A[B, C] s = None # type: B t = None # type: C t = a.s # Fail s = a.t # Fail s = a.s t = a.t class A(Generic[S, T]): s = None # type: S t = None # type: T class B: pass class C: pass [out] main:8: error: Incompatible types in assignment (expression has type "B", variable has type "C") main:9: error: Incompatible types in assignment (expression has type "C", variable has type "B") [case testMultipleGenericTypeParametersWithMethods] from typing import TypeVar, Generic S = TypeVar('S') T = TypeVar('T') a = None # type: A[B, C] s = None # type: B t = None # type: C a.f(s, s) # Fail a.f(t, t) # Fail a.f(s, t) class A(Generic[S, T]): def f(self, s: S, t: T) -> None: pass class B: pass class C: pass [out] main:8: error: Argument 2 to "f" of "A" has incompatible type "B"; expected "C" main:9: error: Argument 1 to "f" of "A" has incompatible type "C"; expected "B" [case testMultipleGenericTypeParametersAndSubtyping] from typing import TypeVar, Generic S = TypeVar('S') T = TypeVar('T') bc = None # type: A[B, C] bb = None # type: A[B, B] cb = None # type: A[C, B] bb = bc # Fail bb = cb # Fail bc = bb # Fail bb = bb bc = bc class A(Generic[S, T]): s = None # type: S t = None # type: T class B: pass class C(B):pass [out] main:8: error: Incompatible types in assignment (expression has type "A[B, C]", variable has type "A[B, B]") main:9: error: Incompatible types in assignment (expression has type "A[C, B]", variable has type "A[B, B]") main:10: error: Incompatible types in assignment (expression has type "A[B, B]", variable has type "A[B, C]") -- Simple generic type bodies -- -------------------------- [case testGenericTypeBody1] from typing import TypeVar, Generic T = TypeVar('T') class A(Generic[T]): a = None # type: T def f(self, b: T) -> T: self.f(x) # Fail d = self # type: A[B] # Fail self.a = self.f(self.a) return self.a c = self # type: A[T] x = None # type: B class B: pass [out] main:7: error: Argument 1 to "f" of "A" has incompatible type "B"; expected "T" main:8: error: Incompatible types in assignment (expression has type "A[T]", variable has type "A[B]") [case testGenericTypeBodyWithMultipleVariables] from typing import TypeVar, Generic S = TypeVar('S') T = TypeVar('T') class A(Generic[S, T]): def f(self) -> None: s = None # type: S t = None # type: T s = t # Fail t = s # Fail a = self # type: A[S, B] # Fail b = self # type: A[T, T] # Fail c = self # type: A[S, T] t = t class B: pass [out] main:8: error: Incompatible types in assignment (expression has type "T", variable has type "S") main:9: error: Incompatible types in assignment (expression has type "S", variable has type "T") main:10: error: Incompatible types in assignment (expression has type "A[S, T]", variable has type "A[S, B]") main:11: error: Incompatible types in assignment (expression has type "A[S, T]", variable has type "A[T, T]") [case testCompatibilityOfNoneWithTypeVar] from typing import TypeVar, Generic T = TypeVar('T') class A(Generic[T]): def f(self) -> None: a = None # type: T a = None [out] [case testCompatibilityOfTypeVarWithObject] from typing import TypeVar, Generic T = TypeVar('T') class A(Generic[T]): def f(self) -> T: a = object() # type: T # Fail a = object() # Fail b = self.f() # type: object b = self.f() return None [out] main:5: error: Incompatible types in assignment (expression has type "object", variable has type "T") main:6: error: Incompatible types in assignment (expression has type "object", variable has type "T") -- Operations with generic types -- ----------------------------- [case testGenericOperations] from typing import TypeVar, Generic S = TypeVar('S') T = TypeVar('T') a = None # type: A[B, C] b = None # type: B c = None # type: C b = a + b # Fail c = a + c # Fail c = a[c] # Fail b = a[b] # Fail c = a + b b = a[c] class A(Generic[S, T]): def __add__(self, a: S) -> T: pass def __getitem__(self, i: T) -> S: pass class B: pass class C: pass [out] main:8: error: Incompatible types in assignment (expression has type "C", variable has type "B") main:9: error: Unsupported operand types for + ("A[B, C]" and "C") main:10: error: Incompatible types in assignment (expression has type "B", variable has type "C") main:11: error: Invalid index type "B" for "A[B, C]"; expected type "C" [case testOperatorAssignmentWithIndexLvalue1] from typing import TypeVar, Generic T = TypeVar('T') b = None # type: B c = None # type: C ac = None # type: A[C] ac[b] += b # Fail ac[c] += c # Fail ac[b] += c ac[b] = ac[b] + c class A(Generic[T]): def __getitem__(self, i: 'B') -> T: pass def __setitem__(self, i: 'B', v: T) -> None: pass class B: pass class C: def __add__(self, o: 'C') -> 'C': pass [out] main:7: error: Unsupported operand types for + ("C" and "B") main:8: error: Invalid index type "C" for "A[C]"; expected type "B" [case testOperatorAssignmentWithIndexLvalue2] from typing import TypeVar, Generic T = TypeVar('T') b = None # type: B c = None # type: C ac = None # type: A[C] ac[b] += c # Fail ac[c] += c # Fail ac[b] = ac[b] + c # Fail class A(Generic[T]): def __getitem__(self, i: 'B') -> T: pass def __setitem__(self, i: 'C', v: T) -> None: pass class B: pass class C: def __add__(self, o: 'C') -> 'C': pass [out] main:7: error: Invalid index type "B" for "A[C]"; expected type "C" main:8: error: Invalid index type "C" for "A[C]"; expected type "B" main:9: error: Invalid index type "B" for "A[C]"; expected type "C" -- Nested generic types -- -------------------- [case testNestedGenericTypes] from typing import TypeVar, Generic T = TypeVar('T') aab = None # type: A[A[B]] aac = None # type: A[A[C]] ab = None # type: A[B] ac = None # type: A[C] ac = aab.x # Fail ac.y = aab # Fail ab = aab.x ac = aac.x ab.y = aab ac.y = aac class A(Generic[T]): x = None # type: T y = None # type: A[A[T]] class B: pass class C: pass [out] main:8: error: Incompatible types in assignment (expression has type "A[B]", variable has type "A[C]") main:9: error: Incompatible types in assignment (expression has type "A[A[B]]", variable has type "A[A[C]]") -- Generic functions -- ----------------- [case testTypeCheckingGenericFunctionBody] from typing import TypeVar, Generic S = TypeVar('S') T = TypeVar('T') class A: pass class p(Generic[T, S]): def __init__(self, t: T, a: S) -> None: pass def f(s: S, t: T) -> p[T, A]: a = t # type: S # E: Incompatible types in assignment (expression has type "T", variable has type "S") s = t # E: Incompatible types in assignment (expression has type "T", variable has type "S") p_s_a = None # type: p[S, A] if s: return p_s_a # E: Incompatible return value type (got "p[S, A]", expected "p[T, A]") b = t # type: T c = s # type: S p_t_a = None # type: p[T, A] return p_t_a [out] [case testTypeCheckingGenericMethodBody] from typing import TypeVar, Generic T = TypeVar('T') S = TypeVar('S') class p(Generic[T, S]): def __init__(self, t: T, a: S) -> None: pass class A(Generic[T]): def f(self, s: S, t: T) -> p[S, T]: s = t # E: Incompatible types in assignment (expression has type "T", variable has type "S") p_s_s = None # type: p[S, S] if s: return p_s_s # E: Incompatible return value type (got "p[S, S]", expected "p[S, T]") p_t_t = None # type: p[T, T] if t: return p_t_t # E: Incompatible return value type (got "p[T, T]", expected "p[S, T]") t = t s = s p_s_t = None # type: p[S, T] return p_s_t [out] [case testProhibitTypeApplicationToGenericFunctions] from typing import TypeVar T = TypeVar('T') def f(x: T) -> T: pass y = f[int] # E: Type application is only supported for generic classes [out] -- Generic types in expressions -- ---------------------------- [case testTypeApplicationArgs] from typing import TypeVar, Generic T = TypeVar('T') class Node(Generic[T]): def __init__(self, x: T) -> None: ... Node[int]() # E: Too few arguments for "Node" Node[int](1, 1, 1) # E: Too many arguments for "Node" [out] [case testTypeApplicationTvars] from typing import TypeVar, Generic T = TypeVar('T') S = TypeVar('S') class A(Generic[T, S]): pass A[int]() # E: Type application has too few types (2 expected) A[int, str, int]() # E: Type application has too many types (2 expected) [out] [case testInvalidTypeApplicationType] a = None # type: A class A: pass a[A]() # E: Value of type "A" is not indexable A[A]() # E: The type "Type[A]" is not generic and not indexable [out] [case testTypeApplicationArgTypes] from typing import TypeVar, Generic T = TypeVar('T') class Node(Generic[T]): def __init__(self, x: T) -> None: ... Node[int](1) Node[int]('a') # E: Argument 1 to "Node" has incompatible type "str"; expected "int" class Dummy(Generic[T]): def meth(self, x: T) -> None: ... def methout(self) -> T: ... Dummy[int]().meth(1) Dummy[int]().meth('a') # E: Argument 1 to "meth" of "Dummy" has incompatible type "str"; expected "int" reveal_type(Dummy[int]()) # E: Revealed type is '__main__.Dummy[builtins.int*]' reveal_type(Dummy[int]().methout()) # E: Revealed type is 'builtins.int*' [out] [case testTypeApplicationArgTypesSubclasses] from typing import TypeVar, Generic T = TypeVar('T') S = TypeVar('S') class C(Generic[T, S]): def __init__(self, x: T, y: S) -> None: ... class D(C[int, T]): ... D[str](1, 'a') D[str](1, 1) # E: Argument 2 to "D" has incompatible type "int"; expected "str" class E(D[str]): ... E(1, 'a') E(1, 1) # E: Argument 2 to "E" has incompatible type "int"; expected "str" [out] [case testTypeApplicationAlias] from typing import TypeVar, Generic T = TypeVar('T') class Node(Generic[T]): def __init__(self, x: T) -> None: ... Alias = Node Alias[int](1) Alias[int]("a") # E: Argument 1 to "Node" has incompatible type "str"; expected "int" [out] [case testTypeApplicationCrash] type[int] # this was crashing, see #2302 (comment) # E: The type "Type[type]" is not generic and not indexable [out] -- Generic type aliases -- -------------------- [case testGenericTypeAliasesBasic] from typing import TypeVar, Generic T = TypeVar('T') S = TypeVar('S') class Node(Generic[T, S]): def __init__(self, x: T, y: S) -> None: ... IntNode = Node[int, S] IntIntNode = Node[int, int] SameNode = Node[T, T] n = Node(1, 1) # type: IntIntNode n1 = Node(1, 'a') # type: IntIntNode # E: Argument 2 to "Node" has incompatible type "str"; expected "int" m = Node(1, 1) # type: IntNode m1 = Node('x', 1) # type: IntNode # E: Argument 1 to "Node" has incompatible type "str"; expected "int" m2 = Node(1, 1) # type: IntNode[str] # E: Argument 2 to "Node" has incompatible type "int"; expected "str" s = Node(1, 1) # type: SameNode[int] reveal_type(s) # E: Revealed type is '__main__.Node[builtins.int, builtins.int]' s1 = Node(1, 'x') # type: SameNode[int] # E: Argument 2 to "Node" has incompatible type "str"; expected "int" [out] [case testGenericTypeAliasesBasic2] from typing import TypeVar, Generic T = TypeVar('T') S = TypeVar('S') class Node(Generic[T, S]): def __init__(self, x: T, y: S) -> None: ... IntNode = Node[int, S] IntIntNode = Node[int, int] SameNode = Node[T, T] def output_bad() -> IntNode[str]: return Node(1, 1) # Eroor - bad return type, see out def input(x: IntNode[str]) -> None: pass input(Node(1, 's')) input(Node(1, 1)) # E: Argument 2 to "Node" has incompatible type "int"; expected "str" def output() -> IntNode[str]: return Node(1, 'x') reveal_type(output()) # E: Revealed type is '__main__.Node[builtins.int, builtins.str]' def func(x: IntNode[T]) -> IntNode[T]: return x reveal_type(func) # E: Revealed type is 'def [T] (x: __main__.Node[builtins.int, T`-1]) -> __main__.Node[builtins.int, T`-1]' func(1) # E: Argument 1 to "func" has incompatible type "int"; expected "Node[int, ]" func(Node('x', 1)) # E: Argument 1 to "Node" has incompatible type "str"; expected "int" reveal_type(func(Node(1, 'x'))) # E: Revealed type is '__main__.Node[builtins.int, builtins.str*]' def func2(x: SameNode[T]) -> SameNode[T]: return x reveal_type(func2) # E: Revealed type is 'def [T] (x: __main__.Node[T`-1, T`-1]) -> __main__.Node[T`-1, T`-1]' func2(Node(1, 'x')) # E: Cannot infer type argument 1 of "func2" y = func2(Node('x', 'x')) reveal_type(y) # E: Revealed type is '__main__.Node[builtins.str*, builtins.str*]' def wrap(x: T) -> IntNode[T]: return Node(1, x) z = None # type: str reveal_type(wrap(z)) # E: Revealed type is '__main__.Node[builtins.int, builtins.str*]' [out] main:13: error: Argument 2 to "Node" has incompatible type "int"; expected "str" [case testGenericTypeAliasesWrongAliases] # flags: --show-column-numbers --python-version 3.6 from typing import TypeVar, Generic, List, Callable, Tuple, Union T = TypeVar('T') S = TypeVar('S') class Node(Generic[T, S]): def __init__(self, x: T, y: S) -> None: ... A = Node[T] # Error B = Node[T, T] C = Node[T, T, T] # Error D = Node[T, S] E = Node[Node[T, T], List[T]] F = Node[List[T, T], S] # Error G = Callable[..., List[T, T]] # Error H = Union[int, Tuple[T, Node[T]]] # Error h: H # Error h1: H[int, str] # Two errors here, wrong number of args for H, and for Node x = None # type: D[int, str] reveal_type(x) y = None # type: E[int] reveal_type(y) X = T # Error [builtins fixtures/list.pyi] [out] main:9:4: error: "Node" expects 2 type arguments, but 1 given main:11:4: error: "Node" expects 2 type arguments, but 3 given main:15:9: error: "list" expects 1 type argument, but 2 given main:16:18: error: "list" expects 1 type argument, but 2 given main:17:24: error: "Node" expects 2 type arguments, but 1 given main:18:3: error: "Node" expects 2 type arguments, but 1 given main:19:4: error: Bad number of arguments for type alias, expected: 1, given: 2 main:19:4: error: "Node" expects 2 type arguments, but 1 given main:22:0: error: Revealed type is '__main__.Node[builtins.int, builtins.str]' main:24:0: error: Revealed type is '__main__.Node[__main__.Node[builtins.int, builtins.int], builtins.list[builtins.int]]' main:26:4: error: Type variable "__main__.T" is invalid as target for type alias [case testGenericTypeAliasesForAliases] from typing import TypeVar, Generic, List, Union T = TypeVar('T') S = TypeVar('S') class Node(Generic[T, S]): def __init__(self, x: T, y: S) -> None: pass ListedNode = Node[List[T], List[S]] Second = ListedNode[int, T] Third = Union[int, Second[str]] def f2(x: T) -> Second[T]: return Node([1], [x]) reveal_type(f2('a')) # E: Revealed type is '__main__.Node[builtins.list[builtins.int], builtins.list[builtins.str*]]' def f3() -> Third: return Node([1], ['x']) reveal_type(f3()) # E: Revealed type is 'Union[builtins.int, __main__.Node[builtins.list[builtins.int], builtins.list[builtins.str]]]' [builtins fixtures/list.pyi] [case testGenericTypeAliasesAny] from typing import TypeVar, Generic T = TypeVar('T') S = TypeVar('S') class Node(Generic[T, S]): def __init__(self, x: T, y: S) -> None: self.x = x self.y = y IntNode = Node[int, S] AnyNode = Node[S, T] def output() -> IntNode[str]: return Node(1, 'x') x = output() # type: IntNode # This is OK (implicit Any) y = None # type: IntNode y.x = 1 y.x = 'x' # E: Incompatible types in assignment (expression has type "str", variable has type "int") y.y = 1 # Both are OK (implicit Any) y.y = 'x' z = Node(1, 'x') # type: AnyNode reveal_type(z) # E: Revealed type is '__main__.Node[Any, Any]' [out] [case testGenericTypeAliasesAcessingMethods] from typing import TypeVar, Generic, List T = TypeVar('T') class Node(Generic[T]): def __init__(self, x: T) -> None: self.x = x def meth(self) -> T: return self.x ListedNode = Node[List[T]] l = None # type: ListedNode[int] l.x.append(1) l.meth().append(1) reveal_type(l.meth()) # E: Revealed type is 'builtins.list*[builtins.int]' l.meth().append('x') # E: Argument 1 to "append" of "list" has incompatible type "str"; expected "int" ListedNode[str]([]).x = 1 # E: Incompatible types in assignment (expression has type "int", variable has type "List[str]") [builtins fixtures/list.pyi] [case testGenericTypeAliasesSubclassing] from typing import TypeVar, Generic, Tuple, List T = TypeVar('T') class Node(Generic[T]): def __init__(self, x: T) -> None: ... TupledNode = Node[Tuple[T, T]] class D(TupledNode[T]): ... class L(List[TupledNode[T]]): ... def f_bad(x: T) -> D[T]: return D(1) # Error, see out L[int]().append(Node((1, 1))) L[int]().append(5) # E: Argument 1 to "append" of "list" has incompatible type "int"; expected "Node[Tuple[int, int]]" x = D((1, 1)) # type: D[int] y = D(5) # type: D[int] # E: Argument 1 to "D" has incompatible type "int"; expected "Tuple[int, int]" def f(x: T) -> D[T]: return D((x, x)) reveal_type(f('a')) # E: Revealed type is '__main__.D[builtins.str*]' [builtins fixtures/list.pyi] [out] main:15: error: Argument 1 to "D" has incompatible type "int"; expected "Tuple[T, T]" [case testGenericTypeAliasesSubclassingBad] from typing import TypeVar, Generic, Tuple, Union T = TypeVar('T') class Node(Generic[T]): def __init__(self, x: T) -> None: ... TupledNode = Node[Tuple[T, T]] UNode = Union[int, Node[T]] class C(TupledNode): ... # Same as TupledNode[Any] class D(TupledNode[T]): ... class E(Generic[T], UNode[T]): ... # E: Invalid base class reveal_type(D((1, 1))) # E: Revealed type is '__main__.D[builtins.int*]' [builtins fixtures/list.pyi] [case testGenericTypeAliasesUnion] from typing import TypeVar, Generic, Union, Any T = TypeVar('T') class Node(Generic[T]): def __init__(self, x: T) -> None: self.x = x UNode = Union[int, Node[T]] x = 1 # type: UNode[int] x + 1 # E: Unsupported left operand type for + (some union) if not isinstance(x, Node): x + 1 if not isinstance(x, int): x.x = 1 x.x = 'a' # E: Incompatible types in assignment (expression has type "str", variable has type "int") def f(x: T) -> UNode[T]: if 1: return Node(x) else: return 1 reveal_type(f(1)) # E: Revealed type is 'Union[builtins.int, __main__.Node[builtins.int*]]' TNode = Union[T, Node[int]] s = 1 # type: TNode[str] # E: Incompatible types in assignment (expression has type "int", variable has type "Union[str, Node[int]]") if not isinstance(s, str): s.x = 1 z = None # type: TNode # Same as TNode[Any] z.x z.foo() # E: Item "Node[int]" of "Union[Any, Node[int]]" has no attribute "foo" [builtins fixtures/isinstance.pyi] [case testGenericTypeAliasesTuple] from typing import TypeVar, Tuple T = TypeVar('T') SameTP = Tuple[T, T] IntTP = Tuple[int, T] def f1(x: T) -> SameTP[T]: return x, x a, b, c = f1(1) # E: Need more than 2 values to unpack (3 expected) x, y = f1(1) reveal_type(x) # E: Revealed type is 'builtins.int' def f2(x: IntTP[T]) -> IntTP[T]: return x f2((1, 2, 3)) # E: Argument 1 to "f2" has incompatible type "Tuple[int, int, int]"; expected "Tuple[int, ]" reveal_type(f2((1, 'x'))) # E: Revealed type is 'Tuple[builtins.int, builtins.str*]' [builtins fixtures/for.pyi] [case testGenericTypeAliasesCallable] from typing import TypeVar, Generic, Callable T = TypeVar('T') class Node(Generic[T]): def __init__(self, x: T) -> None: ... BadC = Callable[T] # E: Please use "Callable[[], ]" or "Callable" C = Callable[..., T] C2 = Callable[[T, T], Node[T]] def make_cb(x: T) -> C[T]: return lambda *args: x reveal_type(make_cb(1)) # E: Revealed type is 'def (*Any, **Any) -> builtins.int*' def use_cb(arg: T, cb: C2[T]) -> Node[T]: return cb(arg, arg) use_cb(1, 1) # E: Argument 2 to "use_cb" has incompatible type "int"; expected "Callable[[int, int], Node[int]]" my_cb = None # type: C2[int] use_cb('x', my_cb) # E: Argument 2 to "use_cb" has incompatible type "Callable[[int, int], Node[int]]"; expected "Callable[[str, str], Node[str]]" reveal_type(use_cb(1, my_cb)) # E: Revealed type is '__main__.Node[builtins.int]' [out] [case testGenericTypeAliasesPEPBasedExample] from typing import TypeVar, List, Tuple T = TypeVar('T', int, bool) Vec = List[Tuple[T, T]] vec = [] # type: Vec[bool] vec.append('x') # E: Argument 1 to "append" of "list" has incompatible type "str"; expected "Tuple[bool, bool]" reveal_type(vec[0]) # E: Revealed type is 'Tuple[builtins.bool, builtins.bool]' def fun1(v: Vec[T]) -> T: return v[0][0] def fun2(v: Vec[T], scale: T) -> Vec[T]: return v reveal_type(fun1([(1, 1)])) # E: Revealed type is 'builtins.int*' fun1(1) # E: Argument 1 to "fun1" has incompatible type "int"; expected "List[Tuple[int, int]]" fun1([(1, 'x')]) # E: Cannot infer type argument 1 of "fun1" reveal_type(fun2([(1, 1)], 1)) # E: Revealed type is 'builtins.list[Tuple[builtins.int*, builtins.int*]]' fun2([('x', 'x')], 'x') # E: Value of type variable "T" of "fun2" cannot be "str" [builtins fixtures/list.pyi] [case testGenericTypeAliasesImporting] from typing import TypeVar from a import Node, TupledNode T = TypeVar('T') n = None # type: TupledNode[int] n.x = 1 n.y = (1, 1) n.y = 'x' # E: Incompatible types in assignment (expression has type "str", variable has type "Tuple[int, int]") def f(x: Node[T, T]) -> TupledNode[T]: return Node(x.x, (x.x, x.x)) f(1) # E: Argument 1 to "f" has incompatible type "int"; expected "Node[, ]" f(Node(1, 'x')) # E: Cannot infer type argument 1 of "f" reveal_type(Node('x', 'x')) # E: Revealed type is 'a.Node[builtins.str*, builtins.str*]' [file a.py] from typing import TypeVar, Generic, Tuple T = TypeVar('T') S = TypeVar('S') class Node(Generic[T, S]): def __init__(self, x: T, y: S) -> None: self.x = x self.y = y TupledNode = Node[T, Tuple[T, T]] [builtins fixtures/list.pyi] [case testGenericTypeAliasesImportingWithoutTypeVar] from typing import Tuple from lib import Transform def int_tf(m: int) -> Transform[int, str]: def transform(i: int, pos: int) -> Tuple[int, str]: pass return transform var: Transform[int, str] reveal_type(var) # E: Revealed type is 'def (builtins.int, builtins.int) -> Tuple[builtins.int, builtins.str]' [file lib.py] from typing import Callable, TypeVar, Tuple T = TypeVar('T') R = TypeVar('R') Transform = Callable[[T, int], Tuple[T, R]] [out] [case testGenericTypeAliasesImportingWithoutTypeVarError] from a import Alias x: Alias[int, str] # E: Bad number of arguments for type alias, expected: 1, given: 2 reveal_type(x) # E: Revealed type is 'builtins.list[builtins.list[Any]]' [file a.py] from typing import TypeVar, List T = TypeVar('T') Alias = List[List[T]] [builtins fixtures/list.pyi] [out] [case testGenericAliasWithTypeVarsFromDifferentModules] from mod import Alias, TypeVar S = TypeVar('S') NewAlias = Alias[int, int, S, S] class C: pass x: NewAlias[str] reveal_type(x) # E: Revealed type is 'builtins.list[Tuple[builtins.int, builtins.int, builtins.str, builtins.str]]' y: Alias[int, str, C, C] reveal_type(y) # E: Revealed type is 'builtins.list[Tuple[builtins.int, builtins.str, __main__.C, __main__.C]]' [file mod.py] from typing import TypeVar, List, Tuple import a import b T = TypeVar('T') Alias = List[Tuple[T, a.T, b.T, b.B.T]] # alias_tvars here will be ['T', 'a.T', 'b.T', 'b.B.T'] [file a.py] from typing import TypeVar T = TypeVar('T') [file b.py] from typing import TypeVar T = TypeVar('T') class B: T = TypeVar('T') [builtins fixtures/list.pyi] [out] [case testTypeAliasesResultingInPlainInstance] from typing import Optional, Union O = Optional[int] U = Union[int] x: O y: U reveal_type(x) # E: Revealed type is 'Union[builtins.int, builtins.None]' reveal_type(y) # E: Revealed type is 'builtins.int' U[int] # E: Bad number of arguments for type alias, expected: 0, given: 1 O[int] # E: Bad number of arguments for type alias, expected: 0, given: 1 [out] [case testAliasesInClassBodyNormalVsSubscripted] from typing import Union, Type, Iterable class A: pass class B(A): pass class C: a = A # This is a variable b = Union[int, str] # This is an alias c: Type[object] = Iterable[int] # This is however also a variable a = B b = int # E: Cannot assign multiple types to name "b" without an explicit "Type[...]" annotation \ # E: Incompatible types in assignment (expression has type "Type[int]", variable has type "Type alias to Union") c = int def f(self, x: a) -> None: pass # E: Invalid type "__main__.C.a" def g(self, x: b) -> None: pass def h(self, x: c) -> None: pass # E: Invalid type "__main__.C.c" x: b reveal_type(x) # E: Revealed type is 'Union[builtins.int, builtins.str]' [out] [case testGenericTypeAliasesRuntimeExpressionsInstance] from typing import TypeVar, Generic T = TypeVar('T') S = TypeVar('S') class Node(Generic[T, S]): def __init__(self, x: T, y: S) -> None: ... IntNode = Node[int, T] IntNode[int](1, 1) IntNode[int](1, 'a') # E: Argument 2 to "Node" has incompatible type "str"; expected "int" SameNode = Node[T, T] ff = SameNode[T](1, 1) # E: Need type annotation for variable a = SameNode(1, 'x') reveal_type(a) # E: Revealed type is '__main__.Node[Any, Any]' b = SameNode[int](1, 1) reveal_type(b) # E: Revealed type is '__main__.Node[builtins.int*, builtins.int*]' SameNode[int](1, 'x') # E: Argument 2 to "Node" has incompatible type "str"; expected "int" [out] [case testGenericTypeAliasesRuntimeExpressionsOther] from typing import TypeVar, Union, Tuple, Callable, Any T = TypeVar('T') CA = Callable[[T], int] TA = Tuple[T, int] UA = Union[T, int] cs = CA[str] + 1 # E: Unsupported left operand type for + ("Type alias to Callable") reveal_type(cs) # E: Revealed type is 'Any' ts = TA[str]() # E: "Type alias to Tuple" not callable reveal_type(ts) # E: Revealed type is 'Any' us = UA[str].x # E: "Type alias to Union" has no attribute "x" reveal_type(us) # E: Revealed type is 'Any' [out] [case testGenericTypeAliasesTypeVarBinding] from typing import TypeVar, Generic, List T = TypeVar('T') S = TypeVar('S') class A(Generic[T, S]): def __init__(self, x: T, y: S) -> None: ... class B(Generic[T, S]): def __init__(self, x: List[T], y: List[S]) -> None: ... SameA = A[T, T] SameB = B[T, T] class C(Generic[T]): a = None # type: SameA[T] b = SameB[T]([], []) reveal_type(C[int]().a) # E: Revealed type is '__main__.A[builtins.int*, builtins.int*]' reveal_type(C[str]().b) # E: Revealed type is '__main__.B[builtins.str*, builtins.str*]' [builtins fixtures/list.pyi] [case testGenericTypeAliasesTypeVarConstraints] # flags: --show-column-numbers from typing import TypeVar, Generic T = TypeVar('T', int, list) S = TypeVar('S', int, list) class A(Generic[T, S]): def __init__(self, x: T, y: S) -> None: ... BadA = A[str, T] # One error here SameA = A[T, T] x = None # type: SameA[int] y = None # type: SameA[str] # Two errors here, for both args of A [builtins fixtures/list.pyi] [out] main:9:7: error: Value of type variable "T" of "A" cannot be "str" main:13: error: Value of type variable "T" of "A" cannot be "str" main:13: error: Value of type variable "S" of "A" cannot be "str" [case testGenericTypeAliasesIgnoredPotentialAlias] class A: ... Bad = A[int] # type: ignore reveal_type(Bad) # E: Revealed type is 'Any' [out] [case testNoSubscriptionOfBuiltinAliases] from typing import List, TypeVar list[int]() # E: "list" is not subscriptable ListAlias = List def fun() -> ListAlias[int]: pass reveal_type(fun()) # E: Revealed type is 'builtins.list[builtins.int]' BuiltinAlias = list BuiltinAlias[int]() # E: "list" is not subscriptable #check that error is reported only once, and type is still stored T = TypeVar('T') BadGenList = list[T] # E: "list" is not subscriptable reveal_type(BadGenList[int]()) # E: Revealed type is 'builtins.list[builtins.int*]' reveal_type(BadGenList()) # E: Revealed type is 'builtins.list[Any]' [builtins fixtures/list.pyi] [out] [case testImportedTypeAliasInRuntimeContext] from m import Alias n = Alias[int]([1]) reveal_type(n) # E: Revealed type is 'm.Node[builtins.list*[builtins.int]]' bad = Alias[str]([1]) # E: List item 0 has incompatible type "int"; expected "str" n2 = Alias([1]) # Same as Node[List[Any]] reveal_type(n2) # E: Revealed type is 'm.Node[builtins.list*[Any]]' [file m.py] from typing import TypeVar, Generic, List T = TypeVar('T') class Node(Generic[T]): def __init__(self, x: T) -> None: self.x = x Alias = Node[List[T]] [builtins fixtures/list.pyi] [out] -- Simplified declaration of generics -- ---------------------------------- [case testSimplifiedGenericSimple] from typing import TypeVar, Generic T = TypeVar('T') S = TypeVar('S') class B(Generic[T]): def b(self) -> T: ... class C(Generic[T]): def c(self) -> T: ... class D(B[T], C[S]): ... reveal_type(D[str, int]().b()) # E: Revealed type is 'builtins.str*' reveal_type(D[str, int]().c()) # E: Revealed type is 'builtins.int*' [builtins fixtures/list.pyi] [out] [case testSimplifiedGenericCallable] from typing import TypeVar, Generic, Callable T = TypeVar('T') S = TypeVar('S') class B(Generic[T]): def b(self) -> T: ... class D(B[Callable[[T], S]]): ... reveal_type(D[str, int]().b()) # E: Revealed type is 'def (builtins.str*) -> builtins.int*' [builtins fixtures/list.pyi] [out] [case testSimplifiedGenericComplex] from typing import TypeVar, Generic, Tuple T = TypeVar('T') S = TypeVar('S') U = TypeVar('U') class A(Generic[T, S]): pass class B(Generic[T, S]): def m(self) -> Tuple[T, S]: pass class C(A[S, B[T, int]], B[U, A[int, T]]): pass c = C[object, int, str]() reveal_type(c.m()) # E: Revealed type is 'Tuple[builtins.str*, __main__.A*[builtins.int, builtins.int*]]' [builtins fixtures/tuple.pyi] [out] [case testSimplifiedGenericOrder] from typing import TypeVar, Generic T = TypeVar('T') S = TypeVar('S') class B(Generic[T]): def b(self) -> T: ... class C(Generic[T]): def c(self) -> T: ... class D(B[T], C[S], Generic[S, T]): ... reveal_type(D[str, int]().b()) # E: Revealed type is 'builtins.int*' reveal_type(D[str, int]().c()) # E: Revealed type is 'builtins.str*' [builtins fixtures/list.pyi] [out] [case testSimplifiedGenericDuplicate] from typing import TypeVar, Generic T = TypeVar('T') class A(Generic[T, T]): # E: Duplicate type variables in Generic[...] or Protocol[...] pass a = A[int]() [builtins fixtures/list.pyi] [out] [case testSimplifiedGenericNotAll] from typing import TypeVar, Generic T = TypeVar('T') S = TypeVar('S') class A(Generic[T]): pass class B(Generic[T]): pass class C(A[T], B[S], Generic[T]): # E: If Generic[...] or Protocol[...] is present it should list all type variables pass c = C[int, str]() [builtins fixtures/list.pyi] [out] [case testSimplifiedGenericInvalid] from typing import TypeVar, Generic T = TypeVar('T') class A(Generic[T]): pass class B(A[S]): # E: Name 'S' is not defined pass [builtins fixtures/list.pyi] [out] -- Multiple assignment with lists -- ------------------------------ [case testMultipleAssignmentWithLists] from typing import List class A: pass class B: pass class B2(B): pass a = None # type: A b = None # type: B b2 = None # type: B2 list_a = [a] list_b = [b] list_b2 = [b2] a, b = list_a # E: Incompatible types in assignment (expression has type "A", variable has type "B") b, a = list_a # E: Incompatible types in assignment (expression has type "A", variable has type "B") b2, b2 = list_b # E: Incompatible types in assignment (expression has type "B", variable has type "B2") a, a = list_a b, b2, b = list_b2 [builtins fixtures/for.pyi] [case testMultipleAssignmentWithListsInInitialization] from typing import List class A: pass list_object = [object()] list_a = [A()] a, b = list_object # type: (A, object) # E: Incompatible types in assignment (expression has type "object", variable has type "A") c, d = list_object # type: (object, A) # E: Incompatible types in assignment (expression has type "object", variable has type "A") e, f = list_a # type: (A, object) [builtins fixtures/for.pyi] [case testMultipleAssignmentWithListAndIndexing] from typing import List a = None # type: List[A] b = None # type: List[int] a[1], b[1] = a # E: Incompatible types in assignment (expression has type "A", target has type "int") a[1], a[2] = a class A: pass [file builtins.py] from typing import TypeVar, Generic, Iterable T = TypeVar('T') class object: pass class list(Iterable[T]): def __setitem__(self, x: int, v: T) -> None: pass class int: pass class type: pass class tuple: pass class function: pass class str: pass [case testMultipleAssignmentWithIterable] from typing import Iterable, TypeVar a = None # type: int b = None # type: str T = TypeVar('T') def f(x: T) -> Iterable[T]: pass a, b = f(a) # E: Incompatible types in assignment (expression has type "int", variable has type "str") b, b = f(a) # E: Incompatible types in assignment (expression has type "int", variable has type "str") a, a = f(a) b, b = f(b) [builtins fixtures/for.pyi] -- Error messages -- -------------- [case testErrorWithLongGenericTypeName] from typing import TypeVar, Generic B = TypeVar('B') C = TypeVar('C') D = TypeVar('D') E = TypeVar('E') F = TypeVar('F') G = TypeVar('G') H = TypeVar('H') I = TypeVar('I') J = TypeVar('J') K = TypeVar('K') L = TypeVar('L') M = TypeVar('M') N = TypeVar('N') O = TypeVar('O') P = TypeVar('P') Q = TypeVar('Q') R = TypeVar('R') S = TypeVar('S') T = TypeVar('T') U = TypeVar('U') V = TypeVar('V') W = TypeVar('W') X = TypeVar('X') Y = TypeVar('Y') Z = TypeVar('Z') class OO: pass a = None # type: A[object, object, object, object, object, object, object, object, object, object, object, object, object, object, object, object, object, object, object, object, object, object, object, object, object] f(a) # E: Argument 1 to "f" has incompatible type "A[...]"; expected "OO" def f(a: OO) -> None: pass class A(Generic[B, C, D, E, F, G, H, I, J, K, L, M, N, O, P, Q, R, S, T, U, V, W, X, Y, Z]): pass [case testErrorWithShorterGenericTypeName] from typing import TypeVar, Generic S = TypeVar('S') T = TypeVar('T') a = None # type: A[object, B] f(a) # E: Argument 1 to "f" has incompatible type "A[object, B]"; expected "B" def f(a: 'B') -> None: pass class A(Generic[S, T]): pass class B: pass [case testErrorWithShorterGenericTypeName2] from typing import Callable, TypeVar, Generic S = TypeVar('S') T = TypeVar('T') a = None # type: A[object, Callable[[], None]] f(a) # E: Argument 1 to "f" has incompatible type "A[object, Callable[[], None]]"; expected "B" def f(a: 'B') -> None: pass class A(Generic[S, T]): pass class B: pass -- Overloads + generics -- -------------------- [case testGenericArgumentInOverload] from foo import * [file foo.pyi] from typing import overload, List class A: pass class B: pass a, b = None, None # type: (A, B) @overload def f(a: List[A]) -> A: pass @overload def f(a: B) -> B: pass b = f([a]) # E: Incompatible types in assignment (expression has type "A", variable has type "B") a = f([b]) # E: List item 0 has incompatible type "B"; expected "A" a = f(b) # E: Incompatible types in assignment (expression has type "B", variable has type "A") a = f([a]) b = f(b) [builtins fixtures/list.pyi] [case testGenericFunctionAsOverloadItem] from foo import * [file foo.pyi] from typing import overload, TypeVar, List T = TypeVar('T') class A: pass class B: pass @overload def f(a: B) -> B: pass @overload def f(a: List[T]) -> T: pass a, b = None, None # type: (A, B) b = f([a]) # E: Incompatible types in assignment (expression has type "A", variable has type "B") a = f([b]) # E: Incompatible types in assignment (expression has type "B", variable has type "A") a = f(b) # E: Incompatible types in assignment (expression has type "B", variable has type "A") a = f([a]) b = f([b]) b = f(b) [builtins fixtures/list.pyi] -- Type variable scoping -- --------------------- [case testLocalTypeVariable] from typing import TypeVar def f() -> None: T = TypeVar('T') def g(x: T) -> T: pass a = g(1) a = 1 a = '' # E: Incompatible types in assignment (expression has type "str", variable has type "int") [out] [case testClassLevelTypeVariable] from typing import TypeVar class A: T = TypeVar('T') def g(self, x: T) -> T: pass a = A().g(1) a = 1 a = '' # E: Incompatible types in assignment (expression has type "str", variable has type "int") [case testGenericInnerClass] from typing import TypeVar, Generic T = TypeVar('T') class A: class B(Generic[T]): def meth(self) -> T: ... B[int]() reveal_type(B[int]().meth) # E: Revealed type is 'def () -> builtins.int*' A.B[int]() reveal_type(A.B[int]().meth) # E: Revealed type is 'def () -> builtins.int*' [case testGenericClassInnerFunctionTypeVariable] from typing import TypeVar, Generic T = TypeVar('T') class A(Generic[T]): def __init__(self, a: T) -> None: self.a = a def f(self, n: int) -> None: def g(a: T): self.a = a g(self.a) g(n) # E: Argument 1 to "g" has incompatible type "int"; expected "T" [case testFunctionInGenericInnerClassTypeVariable] from typing import TypeVar, Generic T = TypeVar('T') class Outer(Generic[T]): class Inner: x: T # E: Invalid type "__main__.T" def f(self, x: T) -> T: ... # E: Type variable 'T' is bound by an outer class def g(self) -> None: y: T # E: Invalid type "__main__.T" -- Callable subtyping with generic functions -- ----------------------------------------- [case testSubtypingWithGenericFunctions] from typing import TypeVar A = TypeVar('A') B = TypeVar('B') def f1(x: A) -> A: ... def f2(x: A) -> B: ... def f3(x: B) -> B: ... def f4(x: int) -> A: ... y1 = f1 y1 = f1 y1 = f2 y1 = f3 y1 = f4 # E: Incompatible types in assignment (expression has type "Callable[[int], A]", variable has type "Callable[[A], A]") y2 = f2 y2 = f2 y2 = f1 # E: Incompatible types in assignment (expression has type "Callable[[A], A]", variable has type "Callable[[A], B]") y2 = f3 # E: Incompatible types in assignment (expression has type "Callable[[B], B]", variable has type "Callable[[A], B]") y2 = f4 # E: Incompatible types in assignment (expression has type "Callable[[int], A]", variable has type "Callable[[A], B]") y3 = f3 y3 = f3 y3 = f1 y3 = f2 y3 = f4 # E: Incompatible types in assignment (expression has type "Callable[[int], A]", variable has type "Callable[[B], B]") y4 = f4 y4 = f4 y4 = f1 # E: Incompatible types in assignment (expression has type "Callable[[A], A]", variable has type "Callable[[int], A]") y4 = f2 y4 = f3 # E: Incompatible types in assignment (expression has type "Callable[[B], B]", variable has type "Callable[[int], A]") [case testSubtypingWithGenericInnerFunctions] from typing import TypeVar A = TypeVar('A') B = TypeVar('B') T = TypeVar('T') def outer(t: T) -> None: def f1(x: A) -> A: ... def f2(x: A) -> B: ... def f3(x: T) -> A: ... def f4(x: A) -> T: ... def f5(x: T) -> T: ... y1 = f1 y1 = f2 y1 = f3 # E: Incompatible types in assignment (expression has type "Callable[[T], A]", variable has type "Callable[[A], A]") y1 = f4 # E: Incompatible types in assignment (expression has type "Callable[[A], T]", variable has type "Callable[[A], A]") y1 = f5 # E: Incompatible types in assignment (expression has type "Callable[[T], T]", variable has type "Callable[[A], A]") y2 = f2 y2 = f1 # E: Incompatible types in assignment (expression has type "Callable[[A], A]", variable has type "Callable[[A], B]") y3 = f3 y3 = f1 # E: Incompatible types in assignment (expression has type "Callable[[A], A]", variable has type "Callable[[T], A]") y3 = f2 y3 = f4 # E: Incompatible types in assignment (expression has type "Callable[[A], T]", variable has type "Callable[[T], A]") y3 = f5 # E: Incompatible types in assignment (expression has type "Callable[[T], T]", variable has type "Callable[[T], A]") y4 = f4 y4 = f1 # E: Incompatible types in assignment (expression has type "Callable[[A], A]", variable has type "Callable[[A], T]") y4 = f2 y4 = f3 # E: Incompatible types in assignment (expression has type "Callable[[T], A]", variable has type "Callable[[A], T]") y4 = f5 # E: Incompatible types in assignment (expression has type "Callable[[T], T]", variable has type "Callable[[A], T]") y5 = f5 y5 = f1 y5 = f2 y5 = f3 y5 = f4 [out] [case testSubtypingWithGenericFunctionUsingTypevarWithValues] from typing import TypeVar, Callable T = TypeVar('T', int, str) def f(x: T) -> T: pass def g1(f: Callable[[str], str]) -> None: pass g1(f) def g2(f: Callable[[int], int]) -> None: pass g2(f) def g3(f: Callable[[object], object]) -> None: pass g3(f) # E: Argument 1 to "g3" has incompatible type "Callable[[T], T]"; \ expected "Callable[[object], object]" [case testSubtypingWithGenericFunctionUsingTypevarWithValues2-skip] from typing import TypeVar, Callable T = TypeVar('T', int, str) def f(x: T) -> T: pass g = f g = f --Operations on type variable types -- --------------------------------- [case testTypeVariableTypeEquality] from typing import TypeVar T = TypeVar('T') def f(a: T, b: T) -> T: a.__ne__(b) if a == b: return a else: return b [builtins fixtures/ops.pyi] [case testTypeVariableTypeIs] from typing import TypeVar T = TypeVar('T') def f(a: T, b: T) -> T: if a is b or a is 1: return a else: return b [builtins fixtures/ops.pyi] [case testTypeVariableTypeLessThan] from typing import TypeVar T = TypeVar('T') def f(a: T, b: T) -> T: if a < b: return a else: return b [builtins fixtures/ops.pyi] [out] main:4: error: Unsupported left operand type for < ("T") -- Subtyping generic callables -- --------------------------- [case testSubtypingGenericTypeObject] from typing import Callable, Generic, TypeVar T = TypeVar('T') class C(Generic[T]): def __init__(self) -> None: pass x = C # type: Callable[[], C[int]] y = C # type: Callable[[], int] # E: Incompatible types in assignment (expression has type "Type[C[Any]]", variable has type "Callable[[], int]") -- Special cases -- ------------- [case testIdentityHigherOrderFunction] from typing import Callable, TypeVar A = TypeVar('A') B = TypeVar('B') def square(n: int) -> int: return n def id(f: Callable[[A], B]) -> Callable[[A], B]: return f g = id(square) g(1) g('x') # E: Argument 1 has incompatible type "str"; expected "int" [case testIdentityHigherOrderFunction2] from typing import Callable, TypeVar A = TypeVar('A') def voidify(n: int) -> None: pass def identity(f: Callable[[A], None]) -> Callable[[A], None]: return f identity(voidify)(3) [case testIdentityHigherOrderFunction3] from typing import Callable, TypeVar A = TypeVar('A') B = TypeVar('B') def fn(n: B) -> None: pass def identity(f: A) -> A: return f identity(fn) identity(fn)('x') [case testTypeVariableUnionAndCallableInTypeInference] from typing import Union, Callable, TypeVar T = TypeVar('T') def f(x: T, y: Union[T, Callable[[T], None]]) -> None: pass f('', '') [case testGenericFunctionsWithUnalignedIds] from typing import TypeVar A = TypeVar('A') B = TypeVar('B') def f1(x: int, y: A) -> A: ... def f2(x: int, y: A) -> B: ... def f3(x: A, y: B) -> B: ... g = f1 g = f2 g = f3 [case testTypeVariableWithContainerAndTuple] from typing import TypeVar, Container T = TypeVar('T') def f(x: Container[T]) -> T: ... reveal_type(f((1, 2))) # E: Revealed type is 'builtins.int*' [case testClassMethodInGenericClassWithGenericConstructorArg] from typing import TypeVar, Generic T = TypeVar('T') class A(Generic[T]): def __init__(self, a: T) -> None: pass @classmethod def f(cls) -> None: pass [builtins fixtures/classmethod.pyi] [case testClassMethodInClassWithGenericConstructor] from typing import TypeVar, Generic T = TypeVar('T') class A: def __init__(self, a: T) -> None: pass @classmethod def f(cls) -> None: pass [builtins fixtures/classmethod.pyi] [case testGenericOperatorMethodOverlapping] from typing import TypeVar, Generic, Tuple T = TypeVar('T') T2 = TypeVar('T2') S = TypeVar('S', bound=str) S2 = TypeVar('S2', bound=str) class G(Generic[T]): pass class A: def __or__(self, x: G[T]) -> G[T]: pass def __ior__(self, x: G[T2]) -> G[T2]: pass class B: def __or__(self, x: G[T]) -> G[T]: pass def __ior__(self, x: G[S]) -> G[S]: pass \ # E: Signatures of "__ior__" and "__or__" are incompatible class C: def __or__(self, x: G[S]) -> G[S]: pass def __ior__(self, x: G[S2]) -> G[S2]: pass [case testGenericOperatorMethodOverlapping2] from typing import TypeVar, Generic, Tuple X = TypeVar('X') T = TypeVar('T', int, str) T2 = TypeVar('T2', int, str) S = TypeVar('S', float, str) S2 = TypeVar('S2', float, str) class G(Generic[X]): pass class A: def __or__(self, x: G[T]) -> G[T]: pass def __ior__(self, x: G[T2]) -> G[T2]: pass class B: def __or__(self, x: G[T]) -> G[T]: pass def __ior__(self, x: G[S]) -> G[S]: pass \ # E: Signatures of "__ior__" and "__or__" are incompatible class C: def __or__(self, x: G[S]) -> G[S]: pass def __ior__(self, x: G[S2]) -> G[S2]: pass class D: def __or__(self, x: G[X]) -> G[X]: pass def __ior__(self, x: G[S2]) -> G[S2]: pass \ # E: Signatures of "__ior__" and "__or__" are incompatible [case testConstraintInferenceForAnyAgainstTypeT] from typing import Type, Any, TypeVar T = TypeVar('T') def f(c: Type[T]) -> T: ... x: Any reveal_type(f(x)) # E: Revealed type is 'Any' [case testQualifiedTypeVariableName] import b def f(x: b.T) -> b.T: return x reveal_type(f) reveal_type(b.g) [file b.py] from typing import TypeVar T = TypeVar('T') def g(x: T) -> T: return x [out] main:3: error: Revealed type is 'def [b.T] (x: b.T`-1) -> b.T`-1' main:4: error: Revealed type is 'def [T] (x: T`-1) -> T`-1' [case testPartiallyQualifiedTypeVariableName] from p import b def f(x: b.T) -> b.T: return x reveal_type(f) reveal_type(b.g) [file p/__init__.py] [file p/b.py] from typing import TypeVar T = TypeVar('T') def g(x: T) -> T: return x [out] main:3: error: Revealed type is 'def [b.T] (x: b.T`-1) -> b.T`-1' main:4: error: Revealed type is 'def [T] (x: T`-1) -> T`-1' mypy-0.560/test-data/unit/check-ignore.test0000644€tŠÔÚ€2›s®0000001042713215007205024777 0ustar jukkaDROPBOX\Domain Users00000000000000[case testIgnoreTypeError] x = 1 x() # type: ignore x() # E: "int" not callable [case testIgnoreUndefinedName] x = 1 y # type: ignore z # E: Name 'z' is not defined [case testIgnoreImportError] import xyz_m # type: ignore xyz_m.foo 1() # E: "int" not callable [case testIgnoreImportFromError] from xyz_m import a, b # type: ignore a.foo b() 1() # E: "int" not callable [case testIgnoreImportFromErrorMultiline] from xyz_m import ( # type: ignore a, b ) a.foo b() 1() # E: "int" not callable [case testIgnoreImportAllError] from xyz_m import * # type: ignore x # E: Name 'x' is not defined 1() # E: "int" not callable [case testIgnoreImportBadModule] import m # type: ignore from m import a # type: ignore [file m.py] + [out] tmp/m.py:1: error: invalid syntax [case testIgnoreAppliesOnlyToMissing] import a # type: ignore import b # type: ignore reveal_type(a.foo) # E: Revealed type is 'Any' reveal_type(b.foo) # E: Revealed type is 'builtins.int' a.bar() b.bar() # E: Module has no attribute "bar" [file b.py] foo = 3 [builtins fixtures/module_all.pyi] [out] [case testIgnoreImportStarFromBadModule] from m import * # type: ignore [file m.py] + [out] tmp/m.py:1: error: invalid syntax [case testIgnoreAssignmentTypeError] x = 1 x = '' # type: ignore x = '' # E: Incompatible types in assignment (expression has type "str", variable has type "int") [case testIgnoreInvalidOverride] class A: def f(self) -> int: pass class B(A): def f(self) -> str: pass # type: ignore [case testIgnoreMissingModuleAttribute] import m m.x = object # type: ignore m.f() # type: ignore m.y # E: Module has no attribute "y" [file m.py] [builtins fixtures/module.pyi] [case testIgnoreTypeInferenceError] x = [] # type: ignore y = x x.append(1) [builtins fixtures/list.pyi] [case testIgnoreTypeInferenceError2] def f() -> None: pass x = f() # type: ignore y = x x = 1 [builtins fixtures/list.pyi] [case testIgnoreTypeInferenceErrorAndMultipleAssignment] x, y = [], [] # type: ignore z = x z = y [builtins fixtures/list.pyi] [case testIgnoreSomeStarImportErrors] from m1 import * from m2 import * # type: ignore # We should still import things that don't conflict. y() # E: "str" not callable z() # E: "int" not callable x() # E: "int" not callable [file m1.py] x = 1 y = '' [file m2.py] x = '' z = 1 [case testIgnoredModuleDefinesBaseClass1] from m import B # type: ignore class C(B): def f(self) -> None: self.f(1) # E: Too many arguments for "f" of "C" self.g(1) [out] [case testIgnoredModuleDefinesBaseClass2] import m # type: ignore class C(m.B): def f(self) -> None: ... c = C() c.f(1) # E: Too many arguments for "f" of "C" c.g(1) c.x = 1 [out] [case testIgnoredModuleDefinesBaseClassAndClassAttribute] import m # type: ignore class C(m.B): @staticmethod def f() -> None: pass C.f(1) # E: Too many arguments for "f" of "C" C.g(1) C.x = 1 [builtins fixtures/staticmethod.pyi] [out] [case testIgnoredModuleDefinesBaseClassWithInheritance1] from m import B # type: ignore class C: pass class D(C, B): def f(self) -> None: self.f(1) # E: Too many arguments for "f" of "D" self.g(1) [out] [case testIgnoredModuleDefinesBaseClassWithInheritance2] from m import B # type: ignore class C(B): pass class D(C): def f(self) -> None: self.f(1) # E: Too many arguments for "f" of "D" self.g(1) [out] [case testIgnoreWithFollowingIndentedComment] if 1: # type: ignore # blah pass [out] [case testIgnoreTooManyTypeArguments] from typing import TypeVar, Generic T = TypeVar('T') U = TypeVar('U') class Base(Generic[T, U]): pass class PartialBase(Base[T, int], Generic[T]): pass class Child(PartialBase[str, int]): # type: ignore pass def foo(x: Base[str, int]) -> None: pass foo(Child()) def bar(x: Base[str, str]) -> None: pass bar(Child()) [out] main:19: error: Argument 1 to "bar" has incompatible type "Child"; expected "Base[str, str]" [case testTypeIgnoreLineNumberWithinFile] import m pass # type: ignore m.f(kw=1) [file m.py] pass def f() -> None: pass [out] main:3: error: Unexpected keyword argument "kw" for "f" tmp/m.py:2: note: "f" defined here [case testIgnoreUnexpectedKeywordArgument] import m m.f(kw=1) # type: ignore [file m.py] def f() -> None: pass [out] [case testCannotIgnoreBlockingError] yield # type: ignore # E: 'yield' outside function mypy-0.560/test-data/unit/check-incomplete-fixture.test0000644€tŠÔÚ€2›s®0000000675213215007205027345 0ustar jukkaDROPBOX\Domain Users00000000000000-- Test cases for reporting errors when a test case uses a fixture with -- missing definitions. At least in the most common cases this should not -- result in an uncaught exception. These tests make sure that this behavior -- does not regress. -- -- NOTE: These tests do NOT test behavior of mypy outside tests. [case testVariableUndefinedUsingDefaultFixture] import m # This used to cause a crash since types.ModuleType is not available # by default. We fall back to 'object' now. m.x # E: "object" has no attribute "x" [file m.py] [case testListMissingFromStubs] from typing import List def f(x: List[int]) -> None: pass [out] main:1: error: Name '__builtins__.list' is not defined main:1: note: Maybe your test fixture does not define "typing.List"? main:1: note: Consider adding [builtins fixtures/list.pyi] to your test description [case testDictMissingFromStubs] from typing import Dict def f(x: Dict[int]) -> None: pass [out] main:1: error: Name '__builtins__.dict' is not defined main:1: note: Maybe your test fixture does not define "typing.Dict"? main:1: note: Consider adding [builtins fixtures/dict.pyi] to your test description [case testSetMissingFromStubs] from typing import Set def f(x: Set[int]) -> None: pass [out] main:1: error: Name '__builtins__.set' is not defined main:1: note: Maybe your test fixture does not define "typing.Set"? main:1: note: Consider adding [builtins fixtures/set.pyi] to your test description [case testBoolMissingFromStubs] x: bool [out] main:1: error: Name 'bool' is not defined main:1: note: Maybe your test fixture does not define "builtins.bool"? main:1: note: Consider adding [builtins fixtures/bool.pyi] to your test description [case testBaseExceptionMissingFromStubs] e: BaseException [out] main:1: error: Name 'BaseException' is not defined main:1: note: Maybe your test fixture does not define "builtins.BaseException"? main:1: note: Consider adding [builtins fixtures/exception.pyi] to your test description [case testExceptionMissingFromStubs] e: Exception [out] main:1: error: Name 'Exception' is not defined main:1: note: Maybe your test fixture does not define "builtins.Exception"? main:1: note: Consider adding [builtins fixtures/exception.pyi] to your test description [case testIsinstanceMissingFromStubs] if isinstance(1, int): pass [out] main:1: error: Name 'isinstance' is not defined main:1: note: Maybe your test fixture does not define "builtins.isinstance"? main:1: note: Consider adding [builtins fixtures/isinstancelist.pyi] to your test description [case testInvalidTupleDefinitionFromStubs] from typing import Tuple x: Tuple[int, ...] x[0] for y in x: pass [out] -- These errors are pretty bad, but keeping this test anyway to -- avoid things getting worse. main:2: error: "tuple" expects no type arguments, but 1 given main:3: error: Value of type "tuple" is not indexable main:4: error: Iterable expected main:4: error: "tuple" has no attribute "__iter__" [case testClassmethodMissingFromStubs] class A: @classmethod def f(cls): pass [out] main:2: error: Name 'classmethod' is not defined main:2: note: Maybe your test fixture does not define "builtins.classmethod"? main:2: note: Consider adding [builtins fixtures/classmethod.pyi] to your test description [case testPropertyMissingFromStubs] class A: @property def f(self): pass [out] main:2: error: Name 'property' is not defined main:2: note: Maybe your test fixture does not define "builtins.property"? main:2: note: Consider adding [builtins fixtures/property.pyi] to your test description mypy-0.560/test-data/unit/check-incremental.test0000644€tŠÔÚ€2›s®0000017646213215007205026031 0ustar jukkaDROPBOX\Domain Users00000000000000-- Checks for incremental mode (see testcheck.py). -- Each test is run at least twice, once with a cold cache, once with a warm cache. -- Before the tests are run again, in step N any *.py.N files are copied to -- *.py. There are at least two runs; more as long as there are *.py.N files. -- -- You can add an empty section like `[delete mod.py.2]` to delete `mod.py` -- before the second run. -- -- Errors expected in the first run should be in the `[out1]` section, and -- errors expected in the second run should be in the `[out2]` section, and so on. -- If a section is omitted, it is expected there are no errors on that run. -- The number of runs is determined by the highest N in all [outN] sections, but -- there are always at least two runs. (Note that [out] is equivalent to [out1].) -- -- The list of modules to be checked can be specified using -- # cmd: mypy -m mod1 mod2 mod3 -- To check a different list on the second run, use -- # cmd2: mypy -m mod1 mod3 -- (and cmd3 for the third run, and so on). -- -- Extra command line flags may be specified using -- # flags: --some-flag -- If the second run requires different flags, those can be specified using -- # flags2: --another-flag -- (and flags3 for the third run, and so on). -- -- Any files that we expect to be rechecked should be annotated in the [rechecked] -- annotation, and any files expect to be stale (aka have a modified interface) -- should be annotated in the [stale] annotation. Note that a file that ends up -- producing an error has its caches deleted and is marked stale automatically. -- Such files don't need to be included in [stale ...] list. -- -- The test suite will automatically assume that __main__ is stale and rechecked in -- all cases so we can avoid constantly having to annotate it. The list of -- rechecked/stale files can be in any arbitrary order, or can be left empty -- if no files should be rechecked/stale. -- -- There are additional incremental mode test cases in check-serialize.test. [case testIncrementalEmpty] [rechecked] [stale] [case testIncrementalBasics] import m [file m.py] def foo(): pass [file m.py.2] def foo() -> None: pass [rechecked m] [stale m] [case testIncrementalError] import m [file m.py] def foo() -> None: pass [file m.py.2] def foo() -> None: bar() [rechecked m] [stale] [out2] tmp/m.py:2: error: Name 'bar' is not defined [case testIncrementalSimpleImportSequence] import mod1 mod1.func1() [file mod1.py] import mod2 def func1() -> None: mod2.func2() [file mod2.py] import mod3 def func2() -> None: mod3.func3() [file mod3.py] def func3() -> None: pass [rechecked] [stale] [case testIncrementalInternalChangeOnly] import mod1 mod1.func1() [file mod1.py] import mod2 def func1() -> None: mod2.func2() [file mod2.py] import mod3 def func2() -> None: mod3.func3() [file mod3.py] def func3() -> None: pass [file mod3.py.2] def func3() -> None: 3 + 2 [rechecked mod3] [stale] [case testIncrementalImportGone] import mod1 [file mod1.py] from mod2 import A def func1() -> A: pass [file mod2.py] class A: pass [file mod1.py.2] def func1() -> A: pass [rechecked mod1] [stale] [out2] tmp/mod1.py:1: error: Name 'A' is not defined [case testIncrementalCallable] import mod1 [file mod1.py] from typing import Callable from mypy_extensions import Arg def func1() -> Callable[[Arg(int, 'x')], int]: pass [file mod1.py.2] from typing import Callable from mypy_extensions import Arg def func1() -> Callable[[Arg(int, 'x')], int]: ... [rechecked mod1] [stale] [builtins fixtures/dict.pyi] [case testIncrementalSameNameChange] import mod1 [file mod1.py] from mod2 import A def func1() -> A: pass [file mod2.py] class A: pass [file mod2.py.2] class Parent: pass class A(Parent): pass [rechecked mod1, mod2] [stale mod2] [case testIncrementalPartialInterfaceChange] import mod1 mod1.func1() [file mod1.py] import mod2 def func1() -> None: mod2.func2() [file mod2.py] import mod3 def func2() -> None: mod3.func3() [file mod3.py] def func3() -> None: pass [file mod3.py.2] def func3() -> int: return 2 [rechecked mod2, mod3] [stale mod3] [case testIncrementalInternalFunctionDefinitionChange] import mod1 [file mod1.py] import mod2 def accepts_int(a: int) -> int: return a accepts_int(mod2.foo()) [file mod2.py] def foo() -> int: def inner() -> int: return 42 return inner() [file mod2.py.2] def foo() -> int: def inner2() -> str: return "foo" return inner2() [rechecked mod1, mod2] [stale] [out2] tmp/mod2.py:4: error: Incompatible return value type (got "str", expected "int") [case testIncrementalInternalScramble] import mod1 [file mod1.py] import mod2 mod2.foo() [file mod2.py] def baz() -> int: return 3 def bar() -> int: return baz() def foo() -> int: return bar() [file mod2.py.2] def foo() -> int: return baz() def bar() -> int: return bar() def baz() -> int: return 42 [rechecked mod2] [stale] [case testIncrementalMethodInterfaceChange] import mod1 [file mod1.py] import mod2 [file mod2.py] class Foo: def bar(self, a: str) -> str: return "a" [file mod2.py.2] class Foo: def bar(self, a: float) -> str: return "a" [rechecked mod1, mod2] [stale mod2] [case testIncrementalBaseClassChange] import mod1 [file mod1.py] from mod2 import Child Child().good_method() [file mod2.py] class Good: def good_method(self) -> int: return 1 class Bad: pass class Child(Good): pass [file mod2.py.2] class Good: def good_method(self) -> int: return 1 class Bad: pass class Child(Bad): pass [rechecked mod1, mod2] [stale mod2] [out2] tmp/mod1.py:2: error: "Child" has no attribute "good_method" [case testIncrementalCascadingChange] import mod1 [file mod1.py] from mod2 import A def accepts_int(a: int) -> None: pass accepts_int(A) [file mod2.py] from mod3 import B A = B [file mod3.py] from mod4 import C B = C [file mod4.py] C = 3 [file mod4.py.2] C = "A" [rechecked mod1, mod2, mod3, mod4] [stale mod2, mod3, mod4] [out2] tmp/mod1.py:3: error: Argument 1 to "accepts_int" has incompatible type "str"; expected "int" [case testIncrementalBrokenCascade] import mod1 [file mod1.py] import mod2 def accept_int(a: int) -> int: return a accept_int(mod2.mod3.mod4.const) [file mod2.py] import mod3 [file mod3.py] import mod4 [file mod4.py] const = 3 [file mod3.py.2] # Import to mod4 is gone! [rechecked mod1, mod2, mod3] [stale mod3] [builtins fixtures/module.pyi] [out2] tmp/mod1.py:3: error: Module has no attribute "mod4" [case testIncrementalLongBrokenCascade] import mod1 [file mod1.py] import mod2 def accept_int(a: int) -> int: return a accept_int(mod2.mod3.mod4.mod5.mod6.mod7.const) [file mod2.py] import mod3 [file mod3.py] import mod4 [file mod4.py] import mod5 [file mod5.py] import mod6 [file mod6.py] import mod7 [file mod7.py] const = 3 [file mod6.py.2] # Import to mod7 is gone! [rechecked mod1, mod5, mod6] [stale mod6] [builtins fixtures/module.pyi] [out2] tmp/mod1.py:3: error: Module has no attribute "mod7" [case testIncrementalNestedBrokenCascade] import mod1 [file mod1.py] import mod2 def accept_int(a: int) -> int: return a accept_int(mod2.mod3.mod4.const) [file mod2/__init__.py] import mod2.mod3 as mod3 [file mod2/mod3/__init__.py] import mod2.mod3.mod4 as mod4 [file mod2/mod3/__init__.py.2] # Import is gone! [file mod2/mod3/mod4.py] const = 3 [rechecked mod1, mod2, mod2.mod3] [stale mod2.mod3] [builtins fixtures/module.pyi] [out2] tmp/mod1.py:3: error: Module has no attribute "mod4" [case testIncrementalNestedBrokenCascadeWithType1] import mod1, mod2.mod3.mod5 [file mod1.py] import mod2 def accept_int(x: int) -> None: pass def produce() -> mod2.CustomType: return mod2.CustomType() a = produce() accept_int(a.foo()) [file mod2/__init__.py] from mod2.mod3 import CustomType [file mod2/mod3/__init__.py] from mod2.mod3.mod4 import CustomType [file mod2/mod3/__init__.py.2] # Import a different class that also happens to be called 'CustomType' from mod2.mod3.mod5 import CustomType def produce() -> CustomType: return CustomType() [file mod2/mod3/mod4.py] class CustomType: def foo(self) -> int: return 1 [file mod2/mod3/mod5.py] class CustomType: def foo(self) -> str: return "a" [rechecked mod1, mod2, mod2.mod3] [stale mod2, mod2.mod3] [builtins fixtures/module.pyi] [out1] [out2] tmp/mod1.py:6: error: Argument 1 to "accept_int" has incompatible type "str"; expected "int" [case testIncrementalNestedBrokenCascadeWithType2] import mod1, mod2.mod3.mod5 [file mod1.py] from mod2 import produce def accept_int(x: int) -> None: pass a = produce() accept_int(a.foo()) [file mod2/__init__.py] from mod2.mod3 import produce [file mod2/mod3/__init__.py] from mod2.mod3.mod4 import CustomType def produce() -> CustomType: return CustomType() [file mod2/mod3/__init__.py.2] # Import a different class that also happens to be called 'CustomType' from mod2.mod3.mod5 import CustomType def produce() -> CustomType: return CustomType() [file mod2/mod3/mod4.py] class CustomType: def foo(self) -> int: return 1 [file mod2/mod3/mod5.py] class CustomType: def foo(self) -> str: return "a" [rechecked mod1, mod2, mod2.mod3] [stale mod2.mod3] [builtins fixtures/module.pyi] [out1] [out2] tmp/mod1.py:4: error: Argument 1 to "accept_int" has incompatible type "str"; expected "int" [case testIncrementalRemoteChange] import mod1 [file mod1.py] import mod2 def accepts_int(a: int) -> None: pass accepts_int(mod2.mod3.mod4.const) [file mod2.py] import mod3 [file mod3.py] import mod4 [file mod4.py] const = 3 [file mod4.py.2] const = "foo" [rechecked mod1, mod3, mod4] [stale mod4] [out2] tmp/mod1.py:3: error: Argument 1 to "accepts_int" has incompatible type "str"; expected "int" [case testIncrementalBadChange] import mod1 [file mod1.py] from mod2 import func2 def func1() -> int: return func2() [file mod2.py] def func2() -> int: return 1 [file mod2.py.2] def func2() -> str: return "foo" [rechecked mod1, mod2] [stale mod2] [out2] tmp/mod1.py:4: error: Incompatible return value type (got "str", expected "int") [case testIncrementalBadChangeWithSave] import mod0 [file mod0.py] import mod1 A = mod1.func2() [file mod1.py] from mod2 import func2 def func1() -> int: return func2() [file mod2.py] def func2() -> int: return 1 [file mod2.py.2] def func2() -> str: return "foo" [rechecked mod0, mod1, mod2] [stale mod2] [out2] tmp/mod1.py:4: error: Incompatible return value type (got "str", expected "int") [case testIncrementalOkChangeWithSave] import mod0 [file mod0.py] import mod1 A = mod1.func2() [file mod1.py] from mod2 import func2 def func1() -> int: func2() return 1 [file mod2.py] def func2() -> int: return 1 [file mod2.py.2] def func2() -> str: return "foo" [rechecked mod0, mod1, mod2] [stale mod0, mod2] [out2] [case testIncrementalWithComplexDictExpression] import mod1 [file mod1.py] import mod1_private [file mod1_private.py] my_dict = { 'a': [1, 2, 3], 'b': [4, 5, 6] } [file mod1_private.py.2] my_dict = { 'a': [1, 2, 3], 'b': [4, 5, 'a'] } [rechecked mod1, mod1_private] [stale mod1_private] [builtins fixtures/dict.pyi] [case testIncrementalWithComplexConstantExpressionNoAnnotation] import mod1 [file mod1.py] import mod1_private [file mod1_private.py] def foobar() -> int: return 1 def baz() -> int: return 2 const = 1 + foobar() [file mod1_private.py.2] def foobar() -> int: return 1 def baz() -> int: return 2 const = 1 + baz() [rechecked mod1_private] [stale] [case testIncrementalWithComplexConstantExpressionWithAnnotation] import mod1 [file mod1.py] import mod1_private [file mod1_private.py] def foobar() -> int: return 1 def baz() -> int: return 2 const = 1 + foobar() # type: int [file mod1_private.py.2] def foobar() -> int: return 1 def baz() -> int: return 2 const = 1 + baz() # type: int [rechecked mod1_private] [stale] [case testIncrementalSmall] import mod1 [file mod1.py] import mod1_private def accepts_int(a: int) -> None: pass accepts_int(mod1_private.some_func(12)) [file mod1_private.py] def some_func(a: int) -> int: return 1 [file mod1_private.py.2] def some_func(a: int) -> str: return "a" [rechecked mod1, mod1_private] [stale mod1_private] [builtins fixtures/ops.pyi] [out2] tmp/mod1.py:3: error: Argument 1 to "accepts_int" has incompatible type "str"; expected "int" [case testIncrementalWithDecorators] import mod1 [file mod1.py] import mod1_private def accepts_int(a: int) -> None: pass accepts_int(mod1_private.some_func(12)) [file mod1_private.py] from typing import Callable def multiply(f: Callable[[int], int]) -> Callable[[int], int]: return lambda a: f(a) * 10 def stringify(f: Callable[[int], int]) -> Callable[[int], str]: return lambda a: str(f(a)) @multiply def some_func(a: int) -> int: return a + 2 [file mod1_private.py.2] from typing import Callable def multiply(f: Callable[[int], int]) -> Callable[[int], int]: return lambda a: f(a) * 10 def stringify(f: Callable[[int], int]) -> Callable[[int], str]: return lambda a: str(f(a)) @stringify def some_func(a: int) -> int: return a + 2 [rechecked mod1, mod1_private] [stale mod1_private] [builtins fixtures/ops.pyi] [out2] tmp/mod1.py:3: error: Argument 1 to "accepts_int" has incompatible type "str"; expected "int" [case testIncrementalChangingClassAttributes] import mod1 [file mod1.py] import mod2 mod2.Foo.A [file mod2.py] class Foo: A = 3 [file mod2.py.2] class Foo: A = "hello" [rechecked mod1, mod2] [stale mod2] [case testIncrementalChangingFields] import mod1 [file mod1.py] import mod2 f = mod2.Foo() f.A [file mod2.py] class Foo: def __init__(self) -> None: self.A = 3 [file mod2.py.2] class Foo: def __init__(self) -> None: self.A = "hello" [rechecked mod1, mod2] [stale mod2] [out2] [case testIncrementalChangingFieldsWithAssignment] import mod1 [file mod1.py] import mod2 f = mod2.Foo() B = f.A [file mod2.py] class Foo: def __init__(self) -> None: self.A = 3 [file mod2.py.2] class Foo: def __init__(self) -> None: self.A = "hello" [rechecked mod1, mod2] [stale mod1, mod2] [case testIncrementalCheckingChangingFields] import mod1 [file mod1.py] import mod2 def accept_int(a: int) -> int: return a f = mod2.Foo() accept_int(f.A) [file mod2.py] class Foo: def __init__(self) -> None: self.A = 3 [file mod2.py.2] class Foo: def __init__(self) -> None: self.A = "hello" [rechecked mod1, mod2] [stale mod2] [out2] tmp/mod1.py:4: error: Argument 1 to "accept_int" has incompatible type "str"; expected "int" [case testIncrementalNestedClassDefinition] import mod1 [file mod1.py] import mod2 b = mod2.Foo.Bar() b.attr [file mod2.py] class Foo: class Bar: attr = 3 [file mod2.py.2] class Foo: class Bar: attr = "foo" [rechecked mod1, mod2] [stale mod2] [case testIncrementalSimpleBranchingModules] import mod1 import mod2 [file mod1.py] def func() -> None: pass [file mod2.py] def func() -> None: pass [file mod1.py.2] def func() -> int: return 1 [rechecked mod1] [stale mod1] [case testIncrementalSubmoduleImport] from parent.childA import Foo def func1() -> Foo: return Foo() [file parent/__init__.py] from parent.childA import Foo from parent.childB import Bar __all__ = ['Foo', 'Bar'] [file parent/childA.py] import parent class Foo: def test(self) -> int: return parent.Bar().test() [file parent/childB.py] class Bar: def test(self) -> int: return 3 [builtins fixtures/module_all.pyi] [rechecked] [stale] [case testIncrementalSubmoduleWithAttr] import mod.child x = mod.child.Foo() x.bar() [file mod/__init__.py] [file mod/child.py] class Foo: def bar(self) -> None: pass [builtins fixtures/module.pyi] [rechecked] [stale] [case testIncrementalNestedSubmoduleImportFromWithAttr] from mod1.mod2 import mod3 def accept_int(a: int) -> None: pass accept_int(mod3.val3) [file mod1/__init__.py] val1 = 1 [file mod1/mod2/__init__.py] val2 = 1 [file mod1/mod2/mod3.py] val3 = 1 [builtins fixtures/module.pyi] [rechecked] [stale] [case testIncrementalNestedSubmoduleWithAttr] import mod1.mod2.mod3 def accept_int(a: int) -> None: pass accept_int(mod1.mod2.mod3.val3) accept_int(mod1.mod2.val2) accept_int(mod1.val1) [file mod1/__init__.py] val1 = 1 [file mod1/mod2/__init__.py] val2 = 1 [file mod1/mod2/mod3.py] val3 = 1 [builtins fixtures/module.pyi] [rechecked] [stale] [case testIncrementalSubmoduleParentWithImportFrom] import parent [file parent/__init__.py] from parent import a [file parent/a.py] val = 3 [builtins fixtures/args.pyi] [stale] [case testIncrementalSubmoduleParentBackreference] import parent [file parent/__init__.py] from parent import a [file parent/a.py] import parent.b [file parent/b.py] [builtins fixtures/args.pyi] [stale] [case testIncrementalSubmoduleParentBackreferenceComplex] import parent [file parent/__init__.py] import parent.a [file parent/a.py] import parent.b import parent.c [file parent/b.py] import parent.a [file parent/c.py] import parent.a [builtins fixtures/args.pyi] [stale] [case testIncrementalReferenceNewFileWithImportFrom] from parent import a [file parent/__init__.py] [file parent/a.py] [file parent/a.py.2] from parent import b [file parent/b.py.2] [stale parent, parent.a, parent.b] [case testIncrementalReferenceExistingFileWithImportFrom] from parent import a, b [file parent/__init__.py] [file parent/a.py] [file parent/b.py] [file parent/a.py.2] from parent import b [stale parent.a] [case testIncrementalWithTypeIgnoreOnDirectImport] import a, b [file a.py] import b # type: ignore [file b.py] import c [file c.py] [stale] [case testIncrementalWithTypeIgnoreOnImportFrom] import a, b [file a.py] from b import something # type: ignore [file b.py] import c something = 3 [file c.py] [stale] [case testIncrementalWithPartialTypeIgnore] import a # type: ignore import a.b [file a/__init__.py] [file a/b.py] [stale] [case testIncrementalAnyIsDifferentFromIgnore] import b [file b.py] from typing import Any import a.b [file b.py.2] from typing import Any a = 3 # type: Any import a.b [file a/__init__.py] [file a/b.py] [rechecked b] [stale] [out2] tmp/b.py:4: error: Name 'a' already defined [case testIncrementalSilentImportsAndImportsInClass] # flags: --ignore-missing-imports class MyObject(object): from bar import FooBar [stale] [case testIncrementalSameFileSize] import m [file m.py] def foo(a: int) -> None: pass def bar(a: str) -> None: pass foo(3) [file m.py.2] def foo(a: int) -> None: pass def bar(a: str) -> None: pass bar(3) [rechecked m] [stale] [out2] tmp/m.py:4: error: Argument 1 to "bar" has incompatible type "int"; expected "str" [case testIncrementalUnsilencingModule] # cmd: mypy -m main package.subpackage.mod2 # cmd2: mypy -m main package.subpackage.mod1 # flags: --follow-imports=skip [file main.py] from package.subpackage.mod1 import Class def handle(c: Class) -> None: c.some_attribute [file package/__init__.py] # empty [file package/subpackage/__init__.py] # empty [file package/subpackage/mod1.py] import collections # Any previously unloaded package works here class Class: pass [file package/subpackage/mod2.py] # empty [builtins fixtures/args.pyi] [rechecked collections, main, package.subpackage.mod1] [stale collections, package.subpackage.mod1] [out2] tmp/main.py:4: error: "Class" has no attribute "some_attribute" [case testIncrementalWithIgnores] import foo # type: ignore [builtins fixtures/module.pyi] [stale] [case testIncrementalWithSilentImportsAndIgnore] # cmd: mypy -m main b # cmd2: mypy -m main c c.submodule # flags: --follow-imports=skip [file main.py] import a # type: ignore import b import c a.A().foo() b.B().foo() c.C().foo() [file b.py] class B: def foo(self) -> None: pass [file b.py.2] [file c/__init__.py] class C: pass [file c/submodule.py] val = 3 # type: int val = "foo" [builtins fixtures/module_all.pyi] [rechecked main, c, c.submodule] [stale c] [out2] tmp/c/submodule.py:2: error: Incompatible types in assignment (expression has type "str", variable has type "int") tmp/main.py:7: error: "C" has no attribute "foo" [case testIncrementalRemoteError] import m m.C().foo().bar() [file m.py] import n class C: def foo(self) -> n.A: pass [file n.py] class A: def bar(self): pass [file n.py.2] class A: pass [rechecked m, n] [stale n] [out2] main:2: error: "A" has no attribute "bar" [case testIncrementalRemoteErrorFixed] import m m.C().foo().bar() [file m.py] import n class C: def foo(self) -> n.A: pass [file n.py] class A: pass [file n.py.2] class A: def bar(self): pass [rechecked m, n] [stale n] [out1] main:2: error: "A" has no attribute "bar" [case testIncrementalChangedError] import m [file m.py] import n def accept_int(x: int) -> None: pass accept_int(n.foo) [file n.py] foo = "hello" reveal_type(foo) [file n.py.2] foo = 3.14 reveal_type(foo) [rechecked m, n] [stale] [out1] tmp/n.py:2: error: Revealed type is 'builtins.str' tmp/m.py:3: error: Argument 1 to "accept_int" has incompatible type "str"; expected "int" [out2] tmp/n.py:2: error: Revealed type is 'builtins.float' tmp/m.py:3: error: Argument 1 to "accept_int" has incompatible type "float"; expected "int" [case testIncrementalReplacingImports] import good, bad, client [file good.py] def foo(a: int) -> None: pass [file bad.py] def foo(a: str) -> None: pass [file client.py] import good import bad from good import foo foo(3) [file client.py.2] import good import bad from bad import foo foo(3) [rechecked client] [stale] [out2] tmp/client.py:4: error: Argument 1 to "foo" has incompatible type "int"; expected "str" [case testIncrementalChangingAlias] import m1, m2, m3, m4, m5 [file m1.py] from m2 import A def accepts_int(x: int) -> None: pass accepts_int(A()) [file m2.py] from m3 import A [file m3.py] from m4 import B A = B [file m3.py.2] from m5 import C A = C [file m4.py] def B() -> int: return 42 [file m5.py] def C() -> str: return "hello" [rechecked m1, m2, m3] [stale m3] [out2] tmp/m1.py:3: error: Argument 1 to "accepts_int" has incompatible type "str"; expected "int" [case testIncrementalStoresAliasTypeVars] import a [file mod.py] from typing import TypeVar, Union T = TypeVar('T') Alias = Union[int, T] x: Alias[str] [file a.py] from mod import Alias, x [file a.py.2] from mod import Alias, x reveal_type(x) y: Alias[int] reveal_type(y) [out2] tmp/a.py:3: error: Revealed type is 'Union[builtins.int, builtins.str]' tmp/a.py:5: error: Revealed type is 'Union[builtins.int, builtins.int]' [case testIncrementalSilentImportsWithBlatantError] # cmd: mypy -m main # flags: --follow-imports=skip [file main.py] from evil import Hello [file main.py.2] from evil import Hello reveal_type(Hello()) [file evil.py] def accept_int(x: int) -> None: pass accept_int("not an int") [rechecked main] [stale] [out2] tmp/main.py:2: error: Revealed type is 'Any' [case testIncrementalImportIsNewlySilenced] # cmd: mypy -m main foo # cmd2: mypy -m main # flags: --follow-imports=skip [file main.py] from foo import bar def accept_int(x: int) -> None: pass accept_int(bar) [file foo.py] bar = 3 [file foo.py.2] # Empty! [rechecked main] [stale main] [case testIncrementalSilencedModuleNoLongerCausesError] # cmd: mypy -m main evil # cmd2: mypy -m main # flags: --follow-imports=skip [file main.py] from evil import bar def accept_int(x: int) -> None: pass accept_int(bar) reveal_type(bar) [file evil.py] bar = "str" [rechecked main] [stale] [out1] tmp/main.py:3: error: Argument 1 to "accept_int" has incompatible type "str"; expected "int" tmp/main.py:4: error: Revealed type is 'builtins.str' [out2] tmp/main.py:4: error: Revealed type is 'Any' [case testIncrementalFixedBugCausesPropagation] import mod1 [file mod1.py] from mod2 import A val = A().makeB().makeC().foo() reveal_type(val) [file mod2.py] from mod3 import B class A: def makeB(self) -> B: return B() [file mod3.py] from mod4 import C class B: def makeC(self) -> C: val = 3 # type: int val = "str" # deliberately triggering error return C() [file mod3.py.2] from mod4 import C class B: def makeC(self) -> C: return C() [file mod4.py] class C: def foo(self) -> int: return 1 [rechecked mod3, mod2, mod1] [stale mod3, mod2] [out1] tmp/mod3.py:5: error: Incompatible types in assignment (expression has type "str", variable has type "int") tmp/mod1.py:3: error: Revealed type is 'builtins.int' [out2] tmp/mod1.py:3: error: Revealed type is 'builtins.int' [case testIncrementalIncidentalChangeWithBugCausesPropagation] import mod1 [file mod1.py] from mod2 import A val = A().makeB().makeC().foo() reveal_type(val) [file mod2.py] from mod3 import B class A: def makeB(self) -> B: return B() [file mod3.py] from mod4 import C class B: def makeC(self) -> C: val = 3 # type: int val = "str" # deliberately triggering error return C() [file mod4.py] class C: def foo(self) -> int: return 1 [file mod4.py.2] class C: def foo(self) -> str: return 'a' [rechecked mod4, mod3, mod2, mod1] [stale mod4] [out1] tmp/mod3.py:5: error: Incompatible types in assignment (expression has type "str", variable has type "int") tmp/mod1.py:3: error: Revealed type is 'builtins.int' [out2] tmp/mod3.py:5: error: Incompatible types in assignment (expression has type "str", variable has type "int") tmp/mod1.py:3: error: Revealed type is 'builtins.str' [case testIncrementalIncidentalChangeWithBugFixCausesPropagation] import mod1 [file mod1.py] from mod2 import A val = A().makeB().makeC().foo() reveal_type(val) [file mod2.py] from mod3 import B class A: def makeB(self) -> B: return B() [file mod3.py] from mod4 import C class B: def makeC(self) -> C: val = 3 # type: int val = "str" # deliberately triggering error return C() [file mod3.py.2] from mod4 import C class B: def makeC(self) -> C: return C() [file mod4.py] class C: def foo(self) -> int: return 1 [file mod4.py.2] class C: def foo(self) -> str: return 'a' [rechecked mod4, mod3, mod2, mod1] [stale mod4, mod3, mod2] [out1] tmp/mod3.py:5: error: Incompatible types in assignment (expression has type "str", variable has type "int") tmp/mod1.py:3: error: Revealed type is 'builtins.int' [out2] tmp/mod1.py:3: error: Revealed type is 'builtins.str' [case testIncrementalSilentImportsWithInnerImports] # cmd: mypy -m main foo # flags: --ignore-missing-imports [file main.py] from foo import MyClass m = MyClass() [file main.py.2] from foo import MyClass m = MyClass() reveal_type(m.val) [file foo.py] class MyClass: def __init__(self) -> None: import unrelated self.val = unrelated.test() [rechecked main] [stale] [out2] tmp/main.py:3: error: Revealed type is 'Any' [case testIncrementalSilentImportsWithInnerImportsAndNewFile] # cmd: mypy -m main foo # cmd2: mypy -m main foo unrelated # flags: --follow-imports=skip [file main.py] from foo import MyClass m = MyClass() [file main.py.2] from foo import MyClass m = MyClass() reveal_type(m.val) [file foo.py] class MyClass: def __init__(self) -> None: import unrelated self.val = unrelated.test() [file unrelated.py] def test() -> str: return "foo" [rechecked main, foo, unrelated] [stale foo, unrelated] [out2] tmp/main.py:3: error: Revealed type is 'builtins.str' [case testIncrementalWorksWithNestedClasses] import foo [file foo.py] class MyClass: class NestedClass: pass class_attr = NestedClass() [rechecked] [stale] [case testIncrementalWorksWithBasicProtocols] import a [file a.py] from b import P x: int y: P[int] x = y.meth() class C: def meth(self) -> int: pass y = C() [file a.py.2] from b import P x: str y: P[str] x = y.meth() class C: def meth(self) -> str: pass y = C() [file b.py] from typing import Protocol, TypeVar T = TypeVar('T', covariant=True) class P(Protocol[T]): def meth(self) -> T: pass [case testIncrementalSwitchFromNominalToStructural] import a [file a.py] from b import B, fun class C(B): def x(self) -> int: pass def y(self) -> int: pass fun(C()) [file b.py] from typing import Protocol class B: def x(self) -> float: pass def fun(arg: B) -> None: arg.x() [file b.py.2] from typing import Protocol class B(Protocol): def x(self) -> float: pass def fun(arg: B) -> None: arg.x() [file a.py.3] from b import fun class C: def x(self) -> int: pass def y(self) -> int: pass fun(C()) [out1] [out2] [out3] [case testIncrementalSwitchFromStructuralToNominal] import a [file a.py] from b import fun class C: def x(self) -> int: pass def y(self) -> int: pass fun(C()) [file b.py] from typing import Protocol class B(Protocol): def x(self) -> float: pass def fun(arg: B) -> None: arg.x() [file b.py.2] from typing import Protocol class B: def x(self) -> float: pass def fun(arg: B) -> None: arg.x() [out1] [out2] tmp/a.py:5: error: Argument 1 to "fun" has incompatible type "C"; expected "B" [case testIncrementalWorksWithNamedTuple] import foo [file foo.py] from mid import MyTuple def accept_int(x: int) -> None: pass accept_int(MyTuple(1, "b", "c").a) [file mid.py] from bar import MyTuple [file bar.py] from typing import NamedTuple MyTuple = NamedTuple('MyTuple', [ ('a', int), ('b', str), ('c', str) ]) [file bar.py.2] from typing import NamedTuple MyTuple = NamedTuple('MyTuple', [ ('b', int), # a and b are swapped ('a', str), ('c', str) ]) [rechecked bar, mid, foo] [stale bar] [out2] tmp/foo.py:3: error: Argument 1 to "accept_int" has incompatible type "str"; expected "int" [case testIncrementalWorksWithNestedNamedTuple] import foo [file foo.py] from mid import Outer def accept_int(x: int) -> None: pass accept_int(Outer.MyTuple(1, "b", "c").a) [file mid.py] from bar import Outer [file bar.py] from typing import NamedTuple class Outer: MyTuple = NamedTuple('MyTuple', [ ('a', int), ('b', str), ('c', str) ]) [file bar.py.2] from typing import NamedTuple class Outer: MyTuple = NamedTuple('MyTuple', [ ('b', int), # a and b are swapped ('a', str), ('c', str) ]) [rechecked bar, mid, foo] [stale bar] [out2] tmp/foo.py:3: error: Argument 1 to "accept_int" has incompatible type "str"; expected "int" [case testIncrementalPartialSubmoduleUpdate] # cmd: mypy -m a # cmd2: mypy -m a a.c # flags: --follow-imports=skip [file a/__init__.py] from .b import B from .c import C [file a/b.py] class B: pass [file a/c.py] class C: pass [file a/c.py.2] class C: pass pass [rechecked a, a.c] [stale a, a.c] [out] [case testIncrementalNestedClassRef] import top [file top.py] from funcs import callee from classes import Outer def caller(a: Outer.Inner) -> None: callee(a) [file funcs.py] from classes import Outer def callee(a: Outer.Inner) -> None: pass [file classes.py] class Outer: class Inner: pass [file top.py.2] from funcs import callee from classes import Outer def caller(a: Outer.Inner) -> int: callee(a) return 0 [case testIncrementalLoadsParentAfterChild] # cmd: mypy -m r.s [file r/__init__.py] from . import s [file r/m.py] class R: pass [file r/s.py] from . import m R = m.R a = None # type: R [file r/s.py.2] from . import m R = m.R a = None # type: R [case testIncrementalBaseClassAttributeConflict] class A: pass class B: pass class X: attr = None # type: A class Y: attr = None # type: B class Z(X, Y): pass [stale] [out] main:8: error: Definition of "attr" in base class "X" is incompatible with definition in base class "Y" [out2] main:8: error: Definition of "attr" in base class "X" is incompatible with definition in base class "Y" [case testIncrementalFollowImportsSilent] # flags: --follow-imports=silent import a [file a.py] x = 0 [file a.py.2] x = 0 x + '' [case testIncrementalFollowImportsSkip] # flags: --follow-imports=skip import a reveal_type(a.x) [file a.py] / [file a.py.2] // [out] main:3: error: Revealed type is 'Any' [out2] main:3: error: Revealed type is 'Any' [case testIncrementalFollowImportsError] # flags: --follow-imports=error import a [file a.py] / [file a.py.2] // [out1] main:2: note: Import of 'a' ignored main:2: note: (Using --follow-imports=error, module not passed on command line) [out2] main:2: note: Import of 'a' ignored main:2: note: (Using --follow-imports=error, module not passed on command line) [case testIncrementalFollowImportsVariable] # flags: --config-file tmp/mypy.ini import a reveal_type(a.x) [file a.py] x = 0 [file mypy.ini] [[mypy] follow_imports = normal [file mypy.ini.2] [[mypy] follow_imports = skip [out1] main:3: error: Revealed type is 'builtins.int' [out2] main:3: error: Revealed type is 'Any' [case testIncrementalNamedTupleInMethod] from ntcrash import nope [file ntcrash.py] from typing import NamedTuple class C: def f(self) -> None: A = NamedTuple('A', [('x', int), ('y', int)]) [out1] main:1: error: Module 'ntcrash' has no attribute 'nope' [out2] main:1: error: Module 'ntcrash' has no attribute 'nope' [case testIncrementalNamedTupleInMethod2] from ntcrash import nope [file ntcrash.py] from typing import NamedTuple class C: class D: def f(self) -> None: A = NamedTuple('A', [('x', int), ('y', int)]) [out1] main:1: error: Module 'ntcrash' has no attribute 'nope' [out2] main:1: error: Module 'ntcrash' has no attribute 'nope' [case testIncrementalNamedTupleInMethod3] from ntcrash import nope [file ntcrash.py] from typing import NamedTuple class C: def a(self): class D: def f(self) -> None: A = NamedTuple('A', [('x', int), ('y', int)]) [out1] main:1: error: Module 'ntcrash' has no attribute 'nope' [out2] main:1: error: Module 'ntcrash' has no attribute 'nope' [case testIncrementalTypedDictInMethod] from tdcrash import nope [file tdcrash.py] from mypy_extensions import TypedDict class C: def f(self) -> None: A = TypedDict('A', {'x': int, 'y': int}) [builtins fixtures/dict.pyi] [out1] main:1: error: Module 'tdcrash' has no attribute 'nope' [out2] main:1: error: Module 'tdcrash' has no attribute 'nope' [case testIncrementalTypedDictInMethod2] from tdcrash import nope [file tdcrash.py] from mypy_extensions import TypedDict class C: class D: def f(self) -> None: A = TypedDict('A', {'x': int, 'y': int}) [builtins fixtures/dict.pyi] [out1] main:1: error: Module 'tdcrash' has no attribute 'nope' [out2] main:1: error: Module 'tdcrash' has no attribute 'nope' [case testIncrementalTypedDictInMethod3] from tdcrash import nope [file tdcrash.py] from mypy_extensions import TypedDict class C: def a(self): class D: def f(self) -> None: A = TypedDict('A', {'x': int, 'y': int}) [builtins fixtures/dict.pyi] [out1] main:1: error: Module 'tdcrash' has no attribute 'nope' [out2] main:1: error: Module 'tdcrash' has no attribute 'nope' [case testIncrementalInnerClassAttrInMethod] import crash nonexisting [file crash.py] class C: def f(self) -> None: class A: pass self.a = A() [out1] main:2: error: Name 'nonexisting' is not defined [out2] main:2: error: Name 'nonexisting' is not defined [case testIncrementalInnerClassAttrInMethodReveal] import crash reveal_type(crash.C().a) reveal_type(crash.D().a) [file crash.py] from typing import TypeVar, Generic T = TypeVar('T') class C: def f(self) -> None: class A: pass self.a = A() reveal_type(C().a) class D: def f(self) -> None: class A: def g(self) -> None: class B(Generic[T]): pass self.b = B[int]() self.a = A().b reveal_type(D().a) [out1] tmp/crash.py:8: error: Revealed type is 'crash.A@5' tmp/crash.py:17: error: Revealed type is 'crash.B@13[builtins.int*]' main:2: error: Revealed type is 'crash.A@5' main:3: error: Revealed type is 'crash.B@13[builtins.int*]' [out2] tmp/crash.py:8: error: Revealed type is 'crash.A@5' tmp/crash.py:17: error: Revealed type is 'crash.B@13[builtins.int*]' main:2: error: Revealed type is 'crash.A@5' main:3: error: Revealed type is 'crash.B@13[builtins.int*]' [case testGenericMethodRestoreMetaLevel] from typing import Dict d = {} # type: Dict[str, int] g = d.get # This should not crash: see https://github.com/python/mypy/issues/2804 [builtins fixtures/dict.pyi] [case testGenericMethodRestoreMetaLevel2] from typing import TypeVar T = TypeVar('T') class D: def m(self, x: T) -> T: return x g = D().m # This should not crash: see https://github.com/python/mypy/issues/2804 [builtins fixtures/dict.pyi] [case testGenericMethodRestoreMetaLevel3] from typing import TypeVar T = TypeVar('T') class C: def m(self, x: T) -> T: return x class D(C): def __init__(self) -> None: self.d = super().m # This should not crash: see https://github.com/python/mypy/issues/2804 [builtins fixtures/dict.pyi] [case testIncrementalPerFileFlags] # flags: --config-file tmp/mypy.ini import a [file a.py] pass [file mypy.ini] [[mypy] warn_no_return = False [[mypy-a] warn_no_return = True [rechecked] [case testIncrementalClassVar] from typing import ClassVar class A: x = None # type: ClassVar A().x = 0 [out1] main:4: error: Cannot assign to class variable "x" via instance [out2] main:4: error: Cannot assign to class variable "x" via instance [case testIncrementalClassVarGone] import m m.A().x = 0 [file m.py] from typing import ClassVar class A: x = None # type: ClassVar[int] [file m.py.2] class A: x = None # type: int [out1] main:2: error: Cannot assign to class variable "x" via instance [case testCachingClassVar] import b [file a.py] from typing import ClassVar class A: x = None # type: ClassVar[int] [file b.py] import a [file b.py.2] import a a.A().x = 0 [out2] tmp/b.py:2: error: Cannot assign to class variable "x" via instance [case testSerializeTypedDict] import b reveal_type(b.x) y: b.A reveal_type(y) [file b.py] from mypy_extensions import TypedDict A = TypedDict('A', {'x': int, 'y': str}) x: A [builtins fixtures/dict.pyi] [out1] main:2: error: Revealed type is 'TypedDict('b.A', {'x': builtins.int, 'y': builtins.str})' main:4: error: Revealed type is 'TypedDict('b.A', {'x': builtins.int, 'y': builtins.str})' [out2] main:2: error: Revealed type is 'TypedDict('b.A', {'x': builtins.int, 'y': builtins.str})' main:4: error: Revealed type is 'TypedDict('b.A', {'x': builtins.int, 'y': builtins.str})' [case testSerializeMetaclass] import b reveal_type(b.A.f()) m: b.M = b.A reveal_type(b.a.f()) [file b.py] from typing import Type class M(type): def f(cls) -> int: return 0 class A(metaclass=M): pass a: Type[A] [out] main:2: error: Revealed type is 'builtins.int' main:4: error: Revealed type is 'builtins.int' [out2] main:2: error: Revealed type is 'builtins.int' main:4: error: Revealed type is 'builtins.int' [case testSerializeMetaclassInImportCycle1] import b import c reveal_type(b.A.f()) m: c.M = b.A reveal_type(b.a.f()) [file b.py] from typing import Type from c import M class A(metaclass=M): pass a: Type[A] [file c.py] class M(type): def f(cls) -> int: return 0 [out] main:3: error: Revealed type is 'builtins.int' main:5: error: Revealed type is 'builtins.int' [out2] main:3: error: Revealed type is 'builtins.int' main:5: error: Revealed type is 'builtins.int' -- TODO: Add another test for metaclass in import cycle (reversed from the above test). -- This currently does not work. [case testDeleteFile] import n [file n.py] import m [file m.py] x = 1 [delete m.py.2] [rechecked n] [stale] [out2] tmp/n.py:1: error: Cannot find module named 'm' tmp/n.py:1: note: (Perhaps setting MYPYPATH or using the "--ignore-missing-imports" flag would help) [case testDeleteFileWithinCycle] import a [file a.py] import b [file b.py] import c [file c.py] import a [file a.py.2] import c [delete b.py.2] [rechecked a, c] [stale a] [out2] [case testThreePassesBasic] import m [file m.py] def foo(): pass [file m.py.2] def foo() -> None: pass [file m.py.3] def foo(): pass [rechecked m] [stale m] [rechecked2 m] [stale2 m] [out3] [case testThreePassesErrorInThirdPass] import m [file m.py] def foo(): pass [file m.py.2] def foo() -> None: pass [file m.py.3] def foo() -> int: return '' [rechecked m] [stale m] [rechecked2 m] [stale2] [out3] tmp/m.py:2: error: Incompatible return value type (got "str", expected "int") [case testThreePassesThirdPassFixesError] import n [file n.py] import m x = m.foo(1) [file m.py] def foo(x): pass [file m.py.2] def foo() -> str: pass [file m.py.3] def foo(x) -> int: pass [rechecked m, n] [stale m] [rechecked2 m, n] [stale2 m, n] [out2] tmp/n.py:2: error: Too many arguments for "foo" [out3] -- -- Quick mode -- [case testQuickAndDirtyInterfaceChangeDoesNotPropagate] # flags: --quick-and-dirty import b, c [file a.py] def a(): pass [file b.py] import a import c [file c.py] import a import b [file a.py.2] def a(x): pass [rechecked a] [stale a] [case testQuickAndDirtyDoesNotInvalidateImportCycle] # flags: --quick-and-dirty import b, c [file a.py] def a(): pass [file b.py] import a import c [file c.py] import a import b [file b.py.2] import a import c x = 0 [rechecked b] [stale b] [case testQuickAndDirtySwitchToIncrementalMode] # flags: --quick-and-dirty # flags2: --incremental import a, b [file a.py] import b [file b.py] import a [rechecked a, b, builtins] [stale a, b, builtins] [case testQuickAndDirtyFixErrorInExistingFunction] # flags: --quick-and-dirty import a, b [file a.py] import b def foo() -> int: return '' [file b.py] import a [file a.py.2] def foo() -> int: return 0 [out1] tmp/a.py:2: error: Incompatible return value type (got "str", expected "int") [out2] [rechecked a] [stale a] [case testQuickAndDirtyIntroduceErrorInNewFunction] # flags: --quick-and-dirty import a, b [file a.py] import b [file b.py] import a [file a.py.2] import b def foo() -> int: return '' [out1] [out2] tmp/a.py:2: error: Incompatible return value type (got "str", expected "int") [rechecked a] [stale] [case testQuickAndDirtyPersistingError] # flags: --quick-and-dirty import a, b [file a.py] import b def foo() -> int: return '' [file b.py] import a [file a.py.2] import b def foo() -> int: return 0.5 [out1] tmp/a.py:2: error: Incompatible return value type (got "str", expected "int") [out2] tmp/a.py:2: error: Incompatible return value type (got "float", expected "int") [rechecked a] [stale] [case testQuickAndDirtyIntroduceReferencesWithinCycle] # flags: --quick-and-dirty import a, b [file a.py] import b [file b.py] import a class C: pass def f() -> int: pass [file a.py.2] import b reveal_type(b.C) reveal_type(b.f) [out1] [out2] tmp/a.py:2: error: Revealed type is 'def () -> b.C' tmp/a.py:3: error: Revealed type is 'def () -> builtins.int' [rechecked a] [stale] [case testQuickAndDirtyIntroduceReferencesWithinCycle2] # flags: --quick-and-dirty import a, b [file a.py] import b class C: pass def f() -> int: pass [file b.py] import a [file b.py.2] import a reveal_type(a.C) reveal_type(a.f) [out1] [out2] tmp/b.py:2: error: Revealed type is 'def () -> a.C' tmp/b.py:3: error: Revealed type is 'def () -> builtins.int' [rechecked b] [stale] [case testQuickAndDirtyIntroduceReferencesWithinCycleNoError] # flags: --quick-and-dirty import a, b, c [file a.py] import b [file b.py] import a class C: pass def f() -> int: pass [file c.py] [file a.py.2] import b def g() -> b.C: pass h = b.f [file c.py.3] import a reveal_type(a.g) reveal_type(a.h) [out1] [out2] [out3] tmp/c.py:2: error: Revealed type is 'def () -> b.C' tmp/c.py:3: error: Revealed type is 'def () -> builtins.int' [rechecked a] [stale a] [rechecked2 c] [stale2] [case testQuickAndDirtyIntroduceReferencesWithinCycleNoError2] # flags: --quick-and-dirty import a, b, c [file a.py] import b class C: pass def f() -> int: pass [file b.py] import a [file c.py] [file b.py.2] import a def g() -> a.C: pass h = a.f [file c.py.3] import b reveal_type(b.g) reveal_type(b.h) [out1] [out2] [out3] tmp/c.py:2: error: Revealed type is 'def () -> a.C' tmp/c.py:3: error: Revealed type is 'def () -> builtins.int' [rechecked b] [stale b] [rechecked2 c] [stale2] -- (The behavior for blockers is actually no different than in regular incremental mode) [case testQuickAndDirtyBlockerOnFirstRound] # flags: --quick-and-dirty import a, b [file a.py] import b class B(C): pass class C(B): pass # blocker [file b.py] import a [file a.py.2] import b class B: pass class C(B): pass [out1] tmp/a.py:3: error: Cycle in inheritance hierarchy [out2] [rechecked a, b] [stale a, b] [case testQuickAndDirtyBlockerOnSecondRound] # flags: --quick-and-dirty import a, b [file a.py] import b class B: pass class C(B): pass [file b.py] import a [file a.py.2] import b class B(C): pass class C(B): pass # blocker [out1] [out2] tmp/a.py:3: error: Cycle in inheritance hierarchy [rechecked a, b] [stale a, b] [case testQuickAndDirtyRenameFunctionInTwoModules] # flags: --quick-and-dirty import a, b, c, d [file a.py] import d def f(): pass [file b.py] from a import f [file c.py] from b import f [file d.py] from c import f [file a.py.2] import d def g(): pass # renamed f to g [file c.py.2] from a import g [case testQuickAndDirtyUnmodifiedModuleDoesNotGenerateError] # flags: --quick-and-dirty import a, b, c, d [file a.py] import d class C: def f(self): pass [file b.py] from a import C [file c.py] from b import C [file d.py] from c import C C().f() # no error because unmodified [file a.py.2] import d class C: def g(self): pass # renamed f to g [file c.py.2] from a import C [out1] [out2] [case testQuickAndDirtyUnmodifiedModuleDoesNotGenerateError2] # flags: --quick-and-dirty import a, b, c [file a.py] import c class C: x = 0 [file b.py] import a x = a.C.x # type: int [file c.py] import b x = b.x [file a.py.2] import c class C: pass # Removed x [out1] [out2] [rechecked a] [stale a] [case testQuickAndDirtyTypeAliasReference] # flags: --quick-and-dirty import a, b [file a.py] import b def f(x: b.S) -> b.S: return x [file b.py] import a S = str [file a.py.2] import b def f(x: b.S) -> int: return 0 [case testQuickAndDirtyNamedTupleReference] # flags: --quick-and-dirty import a, b [file a.py] import b def f(x: b.P) -> b.P: return x [file b.py] from typing import NamedTuple import a P = NamedTuple('P', (('x', int),)) [file a.py.2] import b def f(x: b.P) -> int: return 0 [case testQuickAndDirtyTypeVarReference] # flags: --quick-and-dirty import a, b [file a.py] import b def f(x: b.T) -> b.T: return x [file b.py] from typing import TypeVar import a T = TypeVar('T') [file a.py.2] import b def f(x: b.T) -> int: return 0 [case testQuickAndDirtyDeleteFunctionUsedByOtherModule] # flags: --quick-and-dirty import a [file a.py] from b import f [file b.py] import a def f() -> int: pass a.f() [file b.py.2] import a reveal_type(a.f) [out2] tmp/b.py:2: error: Revealed type is 'Any' [case testQuickAndDirtyDeleteClassUsedInAnnotation] # flags: --quick-and-dirty import a [file a.py] import b def f() -> b.C: pass [file b.py] import a class C: pass [file b.py.2] import a reveal_type(a.f) a.f().x [out2] tmp/b.py:2: error: Revealed type is 'def () -> ' tmp/b.py:3: error: "" has no attribute "x" [case testQuickAndDirtyDeleteClassUsedAsBase] # flags: --quick-and-dirty import a [file a.py] import b class D(b.C): pass [file b.py] import a class C: pass [file b.py.2] import a reveal_type(a.D) a.D().x [out2] tmp/b.py:2: error: Revealed type is 'Any' [case testQuickAndDirtyDeleteNestedClassUsedInAnnotation] # flags: --quick-and-dirty import a [file a.py] import b def f() -> b.C.D: pass [file b.py] import a class C: class D: pass [file b.py.2] import a class C: pass reveal_type(a.f) a.f().x [out2] tmp/b.py:4: error: Revealed type is 'def () -> ' tmp/b.py:5: error: "" has no attribute "x" [case testQuickAndDirtyTurnGenericClassIntoNonGeneric-skip] # flags: --quick-and-dirty import a [file a.py] import b def f() -> b.C[int]: pass [file b.py] from typing import TypeVar, Generic import a T = TypeVar('T') class C(Generic[T]): pass [file b.py.2] import a class C: pass reveal_type(a.f) c: C d = a.f() c = d d = c [out2] # TODO: Crashes (https://github.com/python/mypy/issues/3279) [case testQuickAndDirtyTurnClassIntoGenericOne-skip] # flags: --quick-and-dirty import a [file a.py] import b def f() -> b.C: pass [file b.py] import a class C: pass [file b.py.2] from typing import TypeVar, Generic import a T = TypeVar('T') class C(Generic[T]): pass reveal_type(a.f) c: C[int] d = a.f() d = c c = d [out2] # TODO: Crashes (https://github.com/python/mypy/issues/3279) [case testQuickAndDirtyDeleteTypeVarUsedInAnnotation] # flags: --quick-and-dirty import a [file a.py] import b def f(x: b.T) -> b.T: return x [file b.py] from typing import TypeVar import a T = TypeVar('T') [file b.py.2] import a reveal_type(a.f) reveal_type(a.f(1)) [out2] tmp/b.py:2: error: Revealed type is 'def [b.T] (x: b.T`-1) -> b.T`-1' tmp/b.py:3: error: Revealed type is 'builtins.int*' [case testQuickAndDirtyDeleteNewTypeUsedInAnnotation] # flags: --quick-and-dirty import a [file a.py] import b def f() -> b.C: pass [file b.py] from typing import NewType import a C = NewType('C', int) [file b.py.2] import a reveal_type(a.f) a.f().x [out2] tmp/b.py:2: error: Revealed type is 'def () -> ' tmp/b.py:3: error: "" has no attribute "x" [case testQuickAndDirtyChangeClassIntoFunction] # flags: --quick-and-dirty import a [file a.py] import b def f() -> b.C: pass [file b.py] import a class C: pass [file b.py.2] import a def C() -> None: pass reveal_type(a.f) a.f().x [out2] tmp/b.py:3: error: Revealed type is 'def () -> ' tmp/b.py:4: error: "" has no attribute "x" [case testQuickAndDirtyChangeClassIntoVariable] # flags: --quick-and-dirty import a [file a.py] import b def f() -> b.C: pass [file b.py] import a class C: pass [file b.py.2] import a C = 0 reveal_type(a.f) a.f().x [out2] tmp/b.py:3: error: Revealed type is 'def () -> ' tmp/b.py:4: error: "" has no attribute "x" [case testQuickAndDirtyAddFile] # flags: --quick-and-dirty import a [file a.py] import b x = '' [file b.py] import a [file b.py.2] import c reveal_type(c.x) [file c.py.2] import a x = 1 reveal_type(a.x) [rechecked b, c] [stale] [out2] tmp/c.py:3: error: Revealed type is 'builtins.str' tmp/b.py:2: error: Revealed type is 'builtins.int' [case testQuickAndDirtyDeleteFile] # flags: --quick-and-dirty import b [file a.py] def f() -> None: pass [file b.py] import a a.f() [delete a.py.2] [file b.py.3] import a a.f() # Comment change [file b.py.4] # Remove import [rechecked b] [stale] [rechecked2 b] [stale2] [rechecked3 b] [stale3 b] [out2] tmp/b.py:1: error: Cannot find module named 'a' tmp/b.py:1: note: (Perhaps setting MYPYPATH or using the "--ignore-missing-imports" flag would help) [out3] tmp/b.py:1: error: Cannot find module named 'a' tmp/b.py:1: note: (Perhaps setting MYPYPATH or using the "--ignore-missing-imports" flag would help) [out4] [case testQuickAndDirtyRenameModule] # flags: --quick-and-dirty import a [file a.py] import b b.f() [file b.py] def f() -> None: pass [delete b.py.2] [file c.py.2] def f() -> None: pass [file a.py.2] import c c.f(1) [file c.py.3] def f() -> None: pass # comment change [file c.py.4] def f(x) -> None: pass [out] [out2] tmp/a.py:2: error: Too many arguments for "f" [out3] tmp/a.py:2: error: Too many arguments for "f" [out4] [rechecked a, c] [stale c] [rechecked2 a, c] [stale2] [rechecked3 a, c] [stale3 a, c] [case testQuickAndDirtyMultiplePasses] # flags: --quick-and-dirty import a [file a.py] import b b.f() [file b.py] def f() -> None: pass [file b.py.2] # Write cache file but the error in a is not caught yet. def f(x) -> None: pass [file a.py.3] # Editing a triggers the error. import b b.f() [rechecked b] [rechecked2 a] [out2] [out3] tmp/a.py:3: error: Too few arguments for "f" [case testQuickAndDirtySerializeStaleType] # flags: --quick-and-dirty import a, c [file a.py] import b def f() -> b.C: pass [file b.py] import a class C: pass [file c.py] [file b.py.2] import a x = a.f() [file c.py.3] import b reveal_type(b.x) def g(x: object) -> None: pass g(b.x) b.x.y [rechecked b] [stale b] [rechecked2 c] [stale2] [out3] tmp/c.py:2: error: Revealed type is '' tmp/c.py:5: error: "" has no attribute "y" [case testCacheDeletedAfterErrorsFound] import a [file a.py] from b import x [file b.py] from c import x [file c.py] x = 1 [file c.py.2] 1 + 1 [file a.py.3] from b import x 1 + 1 [out] [out2] tmp/b.py:1: error: Module 'c' has no attribute 'x' [out3] tmp/b.py:1: error: Module 'c' has no attribute 'x' [case testCacheDeletedAfterErrorsFound2] import a [file a.py] from b import x [file b.py] from c import C x: C [file c.py] class C: pass [file c.py.2] def C(): pass [file a.py.3] from b import x 1 + 1 [out] [out2] tmp/b.py:2: error: Invalid type "c.C" [out3] tmp/b.py:2: error: Invalid type "c.C" [case testCacheDeletedAfterErrorsFound3] import a [file a.py] import b b.f() [file b.py] def f() -> None: pass [file b.py.2] def f(x) -> None: pass [out] [out2] tmp/a.py:2: error: Too few arguments for "f" [out3] tmp/a.py:2: error: Too few arguments for "f" [case testCacheDeletedAfterErrorsFound4] import a [file a.py] from b import x [file b.py] from c import x [file c.py] from d import x [file d.py] x = 1 [file d.py.2] 1 + 1 [file a.py.3] from b import x 1 + 1 [out] [out2] tmp/c.py:1: error: Module 'd' has no attribute 'x' [out3] tmp/c.py:1: error: Module 'd' has no attribute 'x' [case testNoCrashOnDoubleImportAliasQuick] # cmd: mypy -m e # cmd2: mypy -m c # cmd3: mypy -m e # flags: --quick [file c.py] from typing import List Alias = List[int] [file c.py.2] from typing import List Alias = int [file d.py] from c import Alias [file e.py] from d import Alias [file e.py.3] from d import Alias x: Alias [out3] [builtins fixtures/list.pyi] [case testSerializeAbstractPropertyIncremental] from abc import abstractmethod import typing class A: @property def f(self) -> int: return 1 @f.setter # type: ignore @abstractmethod def f(self, x: int) -> None: pass a = A() [builtins fixtures/property.pyi] [case testSerializeAbstractPropertyDisallowUntypedIncremental] # flags: --disallow-untyped-defs from abc import abstractmethod import typing class A: @property def f(self) -> int: return 1 @f.setter # type: ignore @abstractmethod def f(self, x: int) -> None: pass a = A() [builtins fixtures/property.pyi] [case testClassNamesResolutionCrashAccess] import mod [file mod.py] class C: def __init__(self) -> None: self.int = '' def f(self, f: int) -> None: pass [file mod.py.2] class C: def __init__(self) -> None: self.int = '' def f(self, f: int) -> None: f.x [out] [out2] tmp/mod.py:6: error: "int" has no attribute "x" [case testClassNamesResolutionCrashReadCache] import mod [file mod.py] import submod [file mod.py.2] from submod import C c = C() reveal_type(c.int) reveal_type(c.y) [file submod.py] from typing import List class C: def __init__(self) -> None: self.int = [] # type: List[int] def f(self, f: int) -> None: self.y = f [builtins fixtures/list.pyi] [out] [out2] tmp/mod.py:4: error: Revealed type is 'builtins.list[builtins.int]' tmp/mod.py:5: error: Revealed type is 'builtins.int' [case testClassNamesResolutionCrashReveal] import mod [file mod.py] class Foo(object): def __init__(self) -> None: self.bytes = b"foo" def bar(self, f: bytes): pass foo = Foo() foo.bar(b"test") [file mod.py.2] class Foo(object): def __init__(self) -> None: self.bytes = b"foo" def bar(self, f: bytes): reveal_type(f) foo = Foo() foo.bar(b"test") [out] [out2] tmp/mod.py:7: error: Revealed type is 'builtins.bytes' [case testIncrementalWithSilentImports] # cmd: mypy -m a # cmd2: mypy -m b # flags: --follow-imports=silent [file a.py] import b b.foo(1, 2) [file b.py] def foo(a: int, b: int) -> str: return a + b [out1] [out2] tmp/b.py:2: error: Incompatible return value type (got "int", expected "str") [case testForwardNamedTupleToUnionWithOtherNamedTUple] from typing import NamedTuple, Union class Person(NamedTuple): name: Union[str, "Pair"] class Pair(NamedTuple): first: str last: str Person(name=Pair(first="John", last="Doe")) [out] [case testNoCrashForwardRefToBrokenDoubleNewTypeIncremental] from typing import Any, List, NewType Foo = NewType('NotFoo', int) # type: ignore Foos = NewType('Foos', List[Foo]) # type: ignore def frob(foos: List[Foos]) -> None: pass [builtins fixtures/list.pyi] [out] [case testNoCrashForwardRefOverloadIncremental] from typing import overload, List @overload def f(x: int) -> int: ... @overload def f(x: F) -> F: ... def f(x): pass F = List[int] [builtins fixtures/list.pyi] [out] [case testNoCrashForwardRefOverloadIncrementalClass] from typing import overload, Tuple, NamedTuple x: C class C: @overload def f(self, x: str) -> N: pass @overload def f(self, x: int) -> int: pass def f(self, x): pass class N(NamedTuple): x: A A = Tuple[int] [builtins fixtures/tuple.pyi] [out] [case testNewTypeFromForwardNamedTupleIncremental] from typing import NewType, NamedTuple, Tuple NT = NewType('NT', N) class N(NamedTuple): x: int x: NT = N(1) # type: ignore x = NT(N(1)) [out] [case testNewTypeFromForwardTypedDictIncremental] from typing import NewType, Tuple, Dict from mypy_extensions import TypedDict NT = NewType('NT', N) # type: ignore class N(TypedDict): x: A A = Dict[str, int] [builtins fixtures/dict.pyi] [out] -- Some crazy selef-referential named tuples, types dicts, and aliases -- to be sure that everything can be _serialized_ (i.e. ForwardRef's are removed). -- For this reason errors are silenced (tests with # type: ignore have equivalents in other files) [case testForwardTypeAliasInBase1] from typing import List class C(List['A']): pass A = List[int] x: int = C()[0][0] [builtins fixtures/list.pyi] [out] [case testForwardTypeAliasInBase2] from typing import List, Generic, TypeVar, NamedTuple T = TypeVar('T') class C(A, B): #type: ignore pass class G(Generic[T]): pass A = G[C] class B(NamedTuple): x: int C().x C()[0] [builtins fixtures/list.pyi] [out] [case testSerializeRecursiveAliases1] from typing import Type, Callable, Union A = Union[A, int] # type: ignore B = Callable[[B], int] # type: ignore C = Type[C] # type: ignore [out] [case testSerializeRecursiveAliases2] from typing import Type, Callable, Union A = Union[B, int] # type: ignore B = Callable[[C], int] # type: ignore C = Type[A] # type: ignore [out] [case testSerializeRecursiveAliases3] from typing import Type, Callable, Union, NamedTuple A = Union[B, int] # type: ignore B = Callable[[C], int] # type: ignore class C(NamedTuple): # type: ignore x: A [out] [case testGenericTypeAliasesForwardAnyIncremental1] from typing import TypeVar, Generic T = TypeVar('T') S = TypeVar('S') IntNode = Node[int, S] AnyNode = Node[S, T] class Node(Generic[T, S]): def __init__(self, x: T, y: S) -> None: self.x = x self.y = y def output() -> IntNode[str]: return Node(1, 'x') x = output() # type: IntNode y = None # type: IntNode y.x = 1 y.y = 1 y.y = 'x' z = Node(1, 'x') # type: AnyNode [out] [case testGenericTypeAliasesForwardAnyIncremental2] from typing import TypeVar, Generic T = TypeVar('T') S = TypeVar('S') class Node(Generic[T, S]): def __init__(self, x: T, y: S) -> None: self.x = x self.y = y def output() -> IntNode[str]: return Node(1, 'x') x = output() # type: IntNode y = None # type: IntNode y.x = 1 y.y = 1 y.y = 'x' z = Node(1, 'x') # type: AnyNode IntNode = Node[int, S] AnyNode = Node[S, T] [out] [case testNamedTupleForwardAsUpperBoundSerialization] from typing import NamedTuple, TypeVar, Generic T = TypeVar('T', bound='M') class G(Generic[T]): x: T yg: G[M] z: int = G[M]().x.x z = G[M]().x[0] M = NamedTuple('M', [('x', int)]) [out] [case testSelfRefNTIncremental1] from typing import Tuple, NamedTuple Node = NamedTuple('Node', [ # type: ignore ('name', str), ('children', Tuple['Node', ...]), ]) n: Node [builtins fixtures/tuple.pyi] [case testSelfRefNTIncremental2] from typing import Tuple, NamedTuple A = NamedTuple('A', [ # type: ignore ('x', str), ('y', Tuple['B', ...]), ]) class B(NamedTuple): # type: ignore x: A y: int n: A [builtins fixtures/tuple.pyi] [case testSelfRefNTIncremental3] from typing import NamedTuple, Tuple class B(NamedTuple): # type: ignore x: Tuple[A, int] y: int A = NamedTuple('A', [ # type: ignore ('x', str), ('y', 'B'), ]) n: B m: A lst = [m, n] [builtins fixtures/tuple.pyi] [case testSelfRefNTIncremental4] from typing import NamedTuple class B(NamedTuple): # type: ignore x: A y: int class A(NamedTuple): # type: ignore x: str y: B n: A [builtins fixtures/tuple.pyi] [case testSelfRefNTIncremental5] from typing import NamedTuple B = NamedTuple('B', [ # type: ignore ('x', A), ('y', int), ]) A = NamedTuple('A', [ # type: ignore ('x', str), ('y', 'B'), ]) n: A def f(m: B) -> None: pass [builtins fixtures/tuple.pyi] [case testCrashWithPartialGlobalAndCycle] import bar [file foo.py] import bar my_global_dict = {} # type: ignore def external_func_0() -> None: global my_global_dict bar.external_list my_global_dict[12] = 0 [file bar.py] import foo external_list = [0] [builtins fixtures/dict.pyi] [case testIncrementalCrashOnTypeWithFunction] import a [file a.py] import b [file a.py.2] from b import x [file b.py] from typing import TypeVar, Type T = TypeVar('T') def tp(arg: T) -> Type[T]: pass def func(x: int) -> int: pass x = tp(func) [out] [out2] [case testReprocessModuleEvenIfInterfaceHashDoesNotChange] import a import d [file a.py] import b x: b.c.A x = b.c.A() [file b.py] import c [file c.py] class A: x = 1 [file d.py] import a def f() -> None: pass [file a.py.2] import b x: b.c.A [file c.py.3] class A: x = 2 [file d.py.4] import a def f() -> None: from c import A a.x = [A(), a.x][0] [builtins fixtures/list.pyi] [stale] [rechecked a] [stale2] [rechecked2 c] [stale3] [rechecked3 d] [out1] [out2] [out3] [out4] [case testTreeShadowingViaParentPackage] import m.semanal [file m/__init__.py] pass [file m/nodes.py] if False: import m.types import m.semanal class Node: line: int class FuncBase(Node): type: m.types.Type class OverloadedFuncDef(FuncBase): pass [file m/types.py] from m.nodes import Node class Type(Node): pass class Overloaded(Type): pass [file m/semanal.py] from m.nodes import OverloadedFuncDef from m.types import Overloaded class C: def func(self, defn: OverloadedFuncDef): defn.type = Overloaded() defn.type.line = 0 [file m/nodes.py.2] if False: import m.types import m.semanal class Node: line: int class FuncBase(Node): type: m.types.Type class OverloadedFuncDef(FuncBase): pass extra = 1 [file m/types.py.2] from m.nodes import Node class Type(Node): pass class Overloaded(Type): pass extra = 1 [builtins fixtures/list.pyi] [file m/semanal.py.2] from m.nodes import OverloadedFuncDef from m.types import Overloaded class C: def func(self, defn: OverloadedFuncDef): defn.type = Overloaded() defn.type.line = 0 extra = 1 [out1] [out2] [case testErrorsAffectDependentsOnly] # cmd: mypy -m m.a m.b m.c [file m/__init__.py] [file m/a.py] 1 + '' # Deliberate error [file m/b.py] import m.a # Depends on module with error [file m/c.py] import m # No error here [rechecked m.a, m.b] [out1] tmp/m/a.py:1: error: Unsupported operand types for + ("int" and "str") [out2] tmp/m/a.py:1: error: Unsupported operand types for + ("int" and "str") mypy-0.560/test-data/unit/check-inference-context.test0000644€tŠÔÚ€2›s®0000006070713215007205027142 0ustar jukkaDROPBOX\Domain Users00000000000000 -- Basic test cases -- ---------------- [case testBasicContextInference] from typing import TypeVar, Generic T = TypeVar('T') ab = None # type: A[B] ao = None # type: A[object] b = None # type: B ao = f() ab = f() b = f() # E: Incompatible types in assignment (expression has type "A[]", variable has type "B") def f() -> 'A[T]': pass class A(Generic[T]): pass class B: pass [case testBasicContextInferenceForConstructor] from typing import TypeVar, Generic T = TypeVar('T') ab = None # type: A[B] ao = None # type: A[object] b = None # type: B ao = A() ab = A() b = A() # E: Incompatible types in assignment (expression has type "A[]", variable has type "B") class A(Generic[T]): pass class B: pass [case testIncompatibleContextInference] from typing import TypeVar, Generic T = TypeVar('T') b = None # type: B c = None # type: C ab = None # type: A[B] ao = None # type: A[object] ac = None # type: A[C] ac = f(b) # E: Argument 1 to "f" has incompatible type "B"; expected "C" ab = f(c) # E: Argument 1 to "f" has incompatible type "C"; expected "B" ao = f(b) ab = f(b) ao = f(c) ac = f(c) def f(a: T) -> 'A[T]': pass class A(Generic[T]): pass class B: pass class C: pass -- Local variables -- --------------- [case testInferGenericLocalVariableTypeWithEmptyContext] from typing import TypeVar, Generic T = TypeVar('T') def g() -> None: ao = None # type: A[object] ab = None # type: A[B] o = None # type: object b = None # type: B x = f(o) ab = x # E: Incompatible types in assignment (expression has type "A[object]", variable has type "A[B]") ao = x y = f(b) ao = y # E: Incompatible types in assignment (expression has type "A[B]", variable has type "A[object]") ab = y def f(a: T) -> 'A[T]': pass class A(Generic[T]): pass class B: pass [out] [case testInferLocalVariableTypeWithUnderspecifiedGenericType] from typing import TypeVar, Generic T = TypeVar('T') def g() -> None: x = f() # E: Need type annotation for variable def f() -> 'A[T]': pass class A(Generic[T]): pass [out] [case testInferMultipleLocalVariableTypesWithTupleRvalue] from typing import TypeVar, Generic T = TypeVar('T') def g() -> None: ao = None # type: A[object] ab = None # type: A[B] b = None # type: B x, y = f(b), f(b) ao = x # E: Incompatible types in assignment (expression has type "A[B]", variable has type "A[object]") ao = y # E: Incompatible types in assignment (expression has type "A[B]", variable has type "A[object]") ab = x ab = y def f(a: T) -> 'A[T]': pass class A(Generic[T]): pass class B: pass [out] [case testInferMultipleLocalVariableTypesWithArrayRvalueAndNesting] from typing import TypeVar, List, Generic T = TypeVar('T') def h() -> None: ao = None # type: A[object] ab = None # type: A[B] b = None # type: B x, y = g(f(b)) ao = x # E: Incompatible types in assignment (expression has type "A[B]", variable has type "A[object]") ao = y # E: Incompatible types in assignment (expression has type "A[B]", variable has type "A[object]") ab = x ab = y def f(a: T) -> 'A[T]': pass def g(a: T) -> List[T]: pass class A(Generic[T]): pass class B: pass [builtins fixtures/for.pyi] [out] -- Return types with multiple tvar instances -- ----------------------------------------- [case testInferenceWithTypeVariableTwiceInReturnType] from typing import TypeVar, Tuple, Generic T = TypeVar('T') b = None # type: B o = None # type: object ab = None # type: A[B] ao = None # type: A[object] ab, ao = f(b) # Fail ao, ab = f(b) # Fail ao, ao = f(b) ab, ab = f(b) ao, ao = f(o) def f(a: T) -> 'Tuple[A[T], A[T]]': pass class A(Generic[T]): pass class B: pass [builtins fixtures/tuple.pyi] [out] main:8: error: Incompatible types in assignment (expression has type "A[B]", variable has type "A[object]") main:9: error: Incompatible types in assignment (expression has type "A[B]", variable has type "A[object]") [case testInferenceWithTypeVariableTwiceInReturnTypeAndMultipleVariables] from typing import TypeVar, Tuple, Generic S = TypeVar('S') T = TypeVar('T') b = None # type: B o = None # type: object ab = None # type: A[B] ao = None # type: A[object] ao, ao, ab = f(b, b) # Fail ao, ab, ao = g(b, b) # Fail ao, ab, ab, ab = h(b, b) # Fail ab, ab, ao, ab = h(b, b) # Fail ao, ab, ab = f(b, b) ab, ab, ao = g(b, b) ab, ab, ab, ab = h(b, b) def f(a: S, b: T) -> 'Tuple[A[S], A[T], A[T]]': pass def g(a: S, b: T) -> 'Tuple[A[S], A[S], A[T]]': pass def h(a: S, b: T) -> 'Tuple[A[S], A[S], A[T], A[T]]': pass class A(Generic[T]): pass class B: pass [builtins fixtures/tuple.pyi] [out] main:9: error: Incompatible types in assignment (expression has type "A[B]", variable has type "A[object]") main:10: error: Incompatible types in assignment (expression has type "A[B]", variable has type "A[object]") main:11: error: Incompatible types in assignment (expression has type "A[B]", variable has type "A[object]") main:12: error: Incompatible types in assignment (expression has type "A[B]", variable has type "A[object]") -- Multiple tvar instances in arguments -- ------------------------------------ [case testMultipleTvatInstancesInArgs] from typing import TypeVar, Generic T = TypeVar('T') ac = None # type: A[C] ab = None # type: A[B] ao = None # type: A[object] b = None # type: B c = None # type: C o = None # type: object ab = f(b, o) # E: Argument 2 to "f" has incompatible type "object"; expected "B" ab = f(o, b) # E: Argument 1 to "f" has incompatible type "object"; expected "B" ac = f(b, c) # E: Argument 1 to "f" has incompatible type "B"; expected "C" ac = f(c, b) # E: Argument 2 to "f" has incompatible type "B"; expected "C" ao = f(b, c) ao = f(c, b) ab = f(c, b) def f(a: T, b: T) -> 'A[T]': pass class A(Generic[T]): pass class B: pass class C(B): pass -- Nested generic function calls -- ----------------------------- [case testNestedGenericFunctionCall1] from typing import TypeVar, Generic T = TypeVar('T') aab = None # type: A[A[B]] aao = None # type: A[A[object]] ao = None # type: A[object] b = None # type: B o = None # type: object aab = f(f(o)) # E: Argument 1 to "f" has incompatible type "object"; expected "B" aab = f(f(b)) aao = f(f(b)) ao = f(f(b)) def f(a: T) -> 'A[T]': pass class A(Generic[T]): pass class B: pass [case testNestedGenericFunctionCall2] from typing import TypeVar, Generic T = TypeVar('T') ab = None # type: A[B] ao = None # type: A[object] b = None # type: B o = None # type: object ab = f(g(o)) # E: Argument 1 to "g" has incompatible type "object"; expected "B" ab = f(g(b)) ao = f(g(b)) def f(a: T) -> T: pass def g(a: T) -> 'A[T]': pass class A(Generic[T]): pass class B: pass [case testNestedGenericFunctionCall3] from typing import TypeVar, Generic T = TypeVar('T') ab = None # type: A[B] ao = None # type: A[object] b = None # type: B o = None # type: object ab = f(g(o), g(b)) # E: Argument 1 to "g" has incompatible type "object"; expected "B" ab = f(g(b), g(o)) # E: Argument 1 to "g" has incompatible type "object"; expected "B" ab = f(g(b), g(b)) ao = f(g(b), g(o)) ao = f(g(o), g(b)) def f(a: T, b: T) -> T: pass def g(a: T) -> 'A[T]': pass class A(Generic[T]): pass class B: pass -- Method calls -- ------------ [case testMethodCallWithContextInference] from typing import TypeVar, Generic T = TypeVar('T') o = None # type: object b = None # type: B c = None # type: C ao = None # type: A[object] ab = None # type: A[B] ac = None # type: A[C] ab.g(f(o)) # E: Argument 1 to "f" has incompatible type "object"; expected "B" ac = f(b).g(f(c)) # E: Incompatible types in assignment (expression has type "A[B]", variable has type "A[C]") ac = f(c).g(f(b)) # E: Argument 1 to "f" has incompatible type "B"; expected "C" ab = f(b).g(f(c)) ab.g(f(c)) def f(a: T) -> 'A[T]': pass class A(Generic[T]): def g(self, a: 'A[T]') -> 'A[T]': pass class B: pass class C(B): pass -- List expressions -- ---------------- [case testEmptyListExpression] from typing import List aa = None # type: List[A] ao = None # type: List[object] a = None # type: A a = [] # E: Incompatible types in assignment (expression has type "List[]", variable has type "A") aa = [] ao = [] class A: pass [builtins fixtures/list.pyi] [case testSingleItemListExpressions] from typing import List aa = None # type: List[A] ab = None # type: List[B] ao = None # type: List[object] a = None # type: A b = None # type: B aa = [b] # E: List item 0 has incompatible type "B"; expected "A" ab = [a] # E: List item 0 has incompatible type "A"; expected "B" aa = [a] ab = [b] ao = [a] aa = [None] ao = [None] class A: pass class B: pass [builtins fixtures/list.pyi] [case testMultiItemListExpressions] from typing import List aa = None # type: List[A] ab = None # type: List[B] ao = None # type: List[object] a = None # type: A b = None # type: B ab = [b, a] # E: List item 1 has incompatible type "A"; expected "B" ab = [a, b] # E: List item 0 has incompatible type "A"; expected "B" aa = [a, b, a] ao = [a, b] class A: pass class B(A): pass [builtins fixtures/list.pyi] [case testLocalVariableInferenceFromEmptyList] import typing def f() -> None: a = [] # E: Need type annotation for variable b = [None] c = [B()] c = [object()] # E: List item 0 has incompatible type "object"; expected "B" c = [B()] class B: pass [builtins fixtures/list.pyi] [out] [case testNestedListExpressions] from typing import List aao = None # type: List[List[object]] aab = None # type: List[List[B]] ab = None # type: List[B] b = None # type: B o = None # type: object aao = [[o], ab] # E: List item 1 has incompatible type "List[B]"; expected "List[object]" aab = [[], [o]] # E: List item 0 has incompatible type "object"; expected "B" aao = [[None], [b], [], [o]] aab = [[None], [b], []] aab = [ab, []] class B: pass [builtins fixtures/list.pyi] -- Complex context -- --------------- [case testParenthesesAndContext] from typing import List l = ([A()]) # type: List[object] class A: pass [builtins fixtures/list.pyi] [case testComplexTypeInferenceWithTuple] from typing import TypeVar, Tuple, Generic k = TypeVar('k') t = TypeVar('t') v = TypeVar('v') def f(x: Tuple[k]) -> 'A[k]': pass d = f((A(),)) # type: A[A[B]] class A(Generic[t]): pass class B: pass class C: pass class D(Generic[k, v]): pass [builtins fixtures/list.pyi] -- Dictionary literals -- ------------------- [case testDictionaryLiteralInContext] from typing import Dict, TypeVar, Generic t = TypeVar('t') class A(Generic[t]): pass class B: pass class C: pass a_b = A() # type: A[B] a_c = A() # type: A[C] d = {A() : a_c, a_b : A()} # type: Dict[A[B], A[C]] [builtins fixtures/dict.pyi] -- Special cases (regression tests etc.) -- ------------------------------------- [case testInitializationWithInferredGenericType] from typing import TypeVar, Generic T = TypeVar('T') c = f(A()) # type: C[A] # E: Argument 1 to "f" has incompatible type "A"; expected "C[A]" def f(x: T) -> T: pass class C(Generic[T]): pass class A: pass [case testInferredGenericTypeAsReturnValue] from typing import TypeVar, Generic T = TypeVar('T') def t() -> 'A[B]': return f(D()) # E: Argument 1 to "f" has incompatible type "D"; expected "B" return A() return f(C()) def f(a: T) -> 'A[T]': pass class A(Generic[T]): pass class B: pass class C(B): pass class D: pass [out] [case testIntersectionWithInferredGenericArgument] from foo import * [file foo.pyi] from typing import overload, TypeVar, Generic T = TypeVar('T') f(A()) @overload def f(x: 'A[B]') -> None: pass @overload def f(x: 'B') -> None: pass class A(Generic[T]): pass class B: pass [case testInferenceWithAbstractClassContext] from typing import TypeVar, Generic from abc import abstractmethod, ABCMeta t = TypeVar('t') x = A() # type: I[int] a_object = A() # type: A[object] y = a_object # type: I[int] # E: Incompatible types in assignment (expression has type "A[object]", variable has type "I[int]") class I(Generic[t]): @abstractmethod def f(self): pass class A(I[t], Generic[t]): def f(self): pass [case testInferenceWithAbstractClassContext2] from typing import TypeVar, Generic from abc import abstractmethod, ABCMeta t = TypeVar('t') a = f(A()) # type: A[int] a_int = A() # type: A[int] aa = f(a_int) class I(Generic[t]): pass class A(I[t], Generic[t]): pass def f(i: I[t]) -> A[t]: pass [case testInferenceWithAbstractClassContext3] from typing import TypeVar, Generic, Iterable t = TypeVar('t') class set(Generic[t]): def __init__(self, iterable: Iterable[t]) -> None: pass b = bool() l = set([b]) l = set([object()]) # E: List item 0 has incompatible type "object"; expected "bool" [builtins fixtures/for.pyi] -- Infer generic type in 'Any' context -- ----------------------------------- [case testInferGenericTypeInAnyContext] from typing import Any, TypeVar, Generic s = TypeVar('s') t = TypeVar('t') x = [] # type: Any y = C() # type: Any class C(Generic[s, t]): pass [builtins fixtures/list.pyi] -- Lambdas -- ------- [case testInferLambdaArgumentTypeUsingContext] from typing import Callable f = None # type: Callable[[B], A] f = lambda x: x.o f = lambda x: x.x # E: "B" has no attribute "x" class A: pass class B: o = None # type: A [case testInferLambdaReturnTypeUsingContext] from typing import List, Callable f = None # type: Callable[[], List[A]] f = lambda: [] f = lambda: [B()] # E: List item 0 has incompatible type "B"; expected "A" class A: pass class B: pass [builtins fixtures/list.pyi] [case testInferLambdaTypeUsingContext] x : str = (lambda x: x + 1)(1) # E: Incompatible types in assignment (expression has type "int", variable has type "str") reveal_type((lambda x, y: x + y)(1, 2)) # E: Revealed type is 'builtins.int' (lambda x, y: x + y)(1, "") # E: Unsupported operand types for + ("int" and "str") (lambda *, x, y: x + y)(x=1, y="") # E: Unsupported operand types for + ("int" and "str") reveal_type((lambda s, i: s)(i=0, s='x')) # E: Revealed type is 'builtins.str' reveal_type((lambda s, i: i)(i=0, s='x')) # E: Revealed type is 'builtins.int' reveal_type((lambda x, s, i: x)(1.0, i=0, s='x')) # E: Revealed type is 'builtins.float' (lambda x, s, i: x)() # E: Too few arguments (lambda: 0)(1) # E: Too many arguments -- varargs are not handled, but it should not crash reveal_type((lambda *k, s, i: i)(type, i=0, s='x')) # E: Revealed type is 'Any' reveal_type((lambda s, *k, i: i)(i=0, s='x')) # E: Revealed type is 'Any' reveal_type((lambda s, i, **k: i)(i=0, s='x')) # E: Revealed type is 'Any' [builtins fixtures/dict.pyi] [case testInferLambdaAsGenericFunctionArgument] from typing import TypeVar, List, Any, Callable t = TypeVar('t') class A: x = None # type: A def f(a: List[t], fn: Callable[[t], Any]) -> None: pass list_a = [] # type: List[A] f(list_a, lambda a: a.x) [builtins fixtures/list.pyi] [case testLambdaWithoutContext] reveal_type(lambda x: x) # E: Revealed type is 'def (x: Any) -> Any' reveal_type(lambda x: 1) # E: Revealed type is 'def (x: Any) -> builtins.int' [case testLambdaContextVararg] from typing import Callable def f(t: Callable[[str], str]) -> str: '' f(lambda *_: '') [case testInvalidContextForLambda] from typing import Callable f = lambda x: A() # type: Callable[[], A] f2 = lambda: A() # type: Callable[[A], A] class A: pass [out] main:2: error: Incompatible types in assignment (expression has type "Callable[[Any], A]", variable has type "Callable[[], A]") main:2: error: Cannot infer type of lambda main:3: error: Incompatible types in assignment (expression has type "Callable[[], A]", variable has type "Callable[[A], A]") main:3: error: Cannot infer type of lambda [case testEllipsisContextForLambda] from typing import Callable f1 = lambda x: 1 # type: Callable[..., int] f2 = lambda: 1 # type: Callable[..., int] f3 = lambda *args, **kwargs: 1 # type: Callable[..., int] f4 = lambda x: x # type: Callable[..., int] g = lambda x: 1 # type: Callable[..., str] [builtins fixtures/dict.pyi] [out] main:6: error: Incompatible types in assignment (expression has type "Callable[[Any], int]", variable has type "Callable[..., str]") main:6: error: Incompatible return value type (got "int", expected "str") [case testEllipsisContextForLambda2] from typing import TypeVar, Callable T = TypeVar('T') def foo(arg: Callable[..., T]) -> None: pass foo(lambda: 1) [case testLambdaNoneInContext] from typing import Callable def f(x: Callable[[], None]) -> None: pass def g(x: Callable[[], int]) -> None: pass f(lambda: None) g(lambda: None) [case testIsinstanceInInferredLambda] from typing import TypeVar, Callable T = TypeVar('T') S = TypeVar('S') class A: pass class B(A): pass class C(A): pass def f(func: Callable[[T], S], *z: T, r: S = None) -> S: pass f(lambda x: 0 if isinstance(x, B) else 1) # E: Cannot infer type argument 1 of "f" f(lambda x: 0 if isinstance(x, B) else 1, A())() # E: "int" not callable f(lambda x: x if isinstance(x, B) else B(), A(), r=B())() # E: "B" not callable f( # E: Argument 1 to "f" has incompatible type "Callable[[A], A]"; expected "Callable[[A], B]" lambda x: B() if isinstance(x, B) else x, # E: Incompatible return value type (got "A", expected "B") A(), r=B()) [builtins fixtures/isinstance.pyi] -- Overloads + generic functions -- ----------------------------- [case testMapWithOverloadedFunc] from foo import * [file foo.pyi] from typing import TypeVar, Callable, List, overload, Any t = TypeVar('t') s = TypeVar('s') def map(f: Callable[[t], s], seq: List[t]) -> List[s]: pass @overload def g(o: object) -> 'B': pass @overload def g(o: 'A', x: Any = None) -> 'B': pass class A: pass class B: pass m = map(g, [A()]) b = m # type: List[B] a = m # type: List[A] # E: Incompatible types in assignment (expression has type "List[B]", variable has type "List[A]") [builtins fixtures/list.pyi] -- Boolean operators -- ----------------- [case testOrOperationInferredFromContext] from typing import List a, b, c = None, None, None # type: (List[A], List[B], List[C]) a = a or [] a = [] or a b = b or [C()] a = a or b # E: Incompatible types in assignment (expression has type "Union[List[A], List[B]]", variable has type "List[A]") b = b or c # E: Incompatible types in assignment (expression has type "Union[List[B], List[C]]", variable has type "List[B]") class A: pass class B: pass class C(B): pass [builtins fixtures/list.pyi] -- Special cases -- ------------- [case testSomeTypeVarsInferredFromContext] from typing import List, TypeVar t = TypeVar('t') s = TypeVar('s') # Some type variables can be inferred using context, but not all of them. a = None # type: List[A] a = f(A(), B()) a = f(B(), B()) # E: Argument 1 to "f" has incompatible type "B"; expected "A" def f(a: s, b: t) -> List[s]: pass class A: pass class B: pass [builtins fixtures/list.pyi] [case testSomeTypeVarsInferredFromContext2] from typing import List, TypeVar s = TypeVar('s') t = TypeVar('t') # Like testSomeTypeVarsInferredFromContext, but tvars in different order. a = None # type: List[A] a = f(A(), B()) a = f(B(), B()) # E: Argument 1 to "f" has incompatible type "B"; expected "A" def f(a: s, b: t) -> List[s]: pass class A: pass class B: pass [builtins fixtures/list.pyi] [case testLambdaInListAndHigherOrderFunction] from typing import TypeVar, Callable, List t = TypeVar('t') s = TypeVar('s') map( [lambda x: x], []) def map(f: List[Callable[[t], s]], a: List[t]) -> List[s]: pass class A: pass [builtins fixtures/list.pyi] [out] [case testChainedAssignmentInferenceContexts] from typing import List i = None # type: List[int] s = None # type: List[str] i = i = [] i = s = [] # E: Incompatible types in assignment (expression has type "List[str]", variable has type "List[int]") [builtins fixtures/list.pyi] [case testContextForAttributeDeclaredInInit] from typing import List class A: def __init__(self): self.x = [] # type: List[int] a = A() a.x = [] a.x = [1] a.x = [''] # E: List item 0 has incompatible type "str"; expected "int" [builtins fixtures/list.pyi] [case testListMultiplyInContext] from typing import List a = None # type: List[int] a = [None] * 3 a = [''] * 3 # E: List item 0 has incompatible type "str"; expected "int" [builtins fixtures/list.pyi] [case testUnionTypeContext] from typing import Union, List, TypeVar T = TypeVar('T') def f(x: Union[List[T], str]) -> None: pass f([1]) f('') f(1) # E: Argument 1 to "f" has incompatible type "int"; expected "Union[List[], str]" [builtins fixtures/isinstancelist.pyi] [case testIgnoringInferenceContext] from typing import TypeVar, List T = TypeVar('T') def f(x: List[T]) -> T: pass def g(y: object) -> None: pass a = [1] g(f(a)) [builtins fixtures/list.pyi] [case testStar2Context] from typing import Any, Dict, Tuple, Iterable def f1(iterable: Iterable[Tuple[str, Any]] = None) -> None: f2(**dict(iterable)) def f2(iterable: Iterable[Tuple[str, Any]], **kw: Any) -> None: pass [builtins fixtures/dict.pyi] [out] [case testInferenceInGenericFunction] from typing import TypeVar, List T = TypeVar('T') def f(a: T) -> None: l = [] # type: List[T] l.append(a) l.append(1) # E: Argument 1 to "append" of "list" has incompatible type "int"; expected "T" [builtins fixtures/list.pyi] [out] [case testInferenceInGenericClass] from typing import TypeVar, Generic, List S = TypeVar('S') T = TypeVar('T') class A(Generic[S]): def f(self, a: T, b: S) -> None: l = [] # type: List[T] l.append(a) l.append(b) # E: Argument 1 to "append" of "list" has incompatible type "S"; expected "T" [builtins fixtures/list.pyi] [out] [case testLambdaInGenericFunction] from typing import TypeVar, Callable T = TypeVar('T') S = TypeVar('S') def f(a: T, b: S) -> None: c = lambda x: x # type: Callable[[T], S] [out] main:5: error: Incompatible types in assignment (expression has type "Callable[[T], T]", variable has type "Callable[[T], S]") main:5: error: Incompatible return value type (got "T", expected "S") [case testLambdaInGenericClass] from typing import TypeVar, Callable, Generic T = TypeVar('T') S = TypeVar('S') class A(Generic[T]): def f(self, b: S) -> None: c = lambda x: x # type: Callable[[T], S] [out] main:6: error: Incompatible types in assignment (expression has type "Callable[[T], T]", variable has type "Callable[[T], S]") main:6: error: Incompatible return value type (got "T", expected "S") [case testRevealTypeContext] from typing import TypeVar, Callable, Generic T = TypeVar('T') class A(Generic[T]): pass reveal_type(A()) # E: Revealed type is '__main__.A[]' b = reveal_type(A()) # type: A[int] # E: Revealed type is '__main__.A[builtins.int]' [case testUnionWithGenericTypeItemContext] from typing import TypeVar, Union, List T = TypeVar('T') def f(x: Union[T, List[int]]) -> Union[T, List[int]]: pass reveal_type(f(1)) # E: Revealed type is 'Union[builtins.int*, builtins.list[builtins.int]]' reveal_type(f([])) # E: Revealed type is 'builtins.list[builtins.int]' reveal_type(f(None)) # E: Revealed type is 'builtins.list[builtins.int]' [builtins fixtures/list.pyi] [case testUnionWithGenericTypeItemContextAndStrictOptional] # flags: --strict-optional from typing import TypeVar, Union, List T = TypeVar('T') def f(x: Union[T, List[int]]) -> Union[T, List[int]]: pass reveal_type(f(1)) # E: Revealed type is 'Union[builtins.int*, builtins.list[builtins.int]]' reveal_type(f([])) # E: Revealed type is 'builtins.list[builtins.int]' reveal_type(f(None)) # E: Revealed type is 'Union[builtins.None, builtins.list[builtins.int]]' [builtins fixtures/list.pyi] [case testUnionWithGenericTypeItemContextInMethod] from typing import TypeVar, Union, List, Generic T = TypeVar('T') S = TypeVar('S') class C(Generic[T]): def f(self, x: Union[T, S]) -> Union[T, S]: pass c = C[List[int]]() reveal_type(c.f('')) # E: Revealed type is 'Union[builtins.list[builtins.int], builtins.str*]' reveal_type(c.f([1])) # E: Revealed type is 'builtins.list[builtins.int]' reveal_type(c.f([])) # E: Revealed type is 'builtins.list[builtins.int]' reveal_type(c.f(None)) # E: Revealed type is 'builtins.list[builtins.int]' [builtins fixtures/list.pyi] [case testGenericMethodCalledInGenericContext] from typing import TypeVar, Generic _KT = TypeVar('_KT') _VT = TypeVar('_VT') _T = TypeVar('_T') class M(Generic[_KT, _VT]): def get(self, k: _KT, default: _T) -> _T: ... def f(d: M[_KT, _VT], k: _KT) -> _VT: return d.get(k, None) # E: "get" of "M" does not return a value [case testGenericMethodCalledInGenericContext2] from typing import TypeVar, Generic, Union _KT = TypeVar('_KT') _VT = TypeVar('_VT') _T = TypeVar('_T') class M(Generic[_KT, _VT]): def get(self, k: _KT, default: _T) -> Union[_VT, _T]: ... def f(d: M[_KT, _VT], k: _KT) -> Union[_VT, None]: return d.get(k, None) [case testLambdaDeferredCrash] from typing import Callable class C: def f(self) -> None: g: Callable[[], int] = lambda: 1 or self.x self.x = int() mypy-0.560/test-data/unit/check-inference.test0000644€tŠÔÚ€2›s®0000014037213215007205025455 0ustar jukkaDROPBOX\Domain Users00000000000000-- Inferring locals/globals with simple types -- ------------------------------------------ [case testInferSimpleGvarType] import typing x = A() y = B() x = B() # Fail x = A() x = y # Fail x = x class A: pass class B: pass [out] main:4: error: Incompatible types in assignment (expression has type "B", variable has type "A") main:6: error: Incompatible types in assignment (expression has type "B", variable has type "A") [case testInferSimpleLvarType] import typing def f() -> None: x = A() y = B() x = B() # Fail x = A() x = y # Fail x = x class A: pass class B: pass [out] main:5: error: Incompatible types in assignment (expression has type "B", variable has type "A") main:7: error: Incompatible types in assignment (expression has type "B", variable has type "A") [case testLvarInitializedToVoid] import typing def f() -> None: a = g() # E: "g" does not return a value #b, c = g() # "g" does not return a value TODO def g() -> None: pass [out] [case testInferringLvarTypeFromArgument] import typing def f(a: 'A') -> None: b = a b = B() # E: Incompatible types in assignment (expression has type "B", variable has type "A") b = a a = b class A: pass class B: pass [out] [case testInferringLvarTypeFromGvar] g = None # type: B def f() -> None: a = g a = A() # E: Incompatible types in assignment (expression has type "A", variable has type "B") a = B() class A: pass class B: pass [out] [case testInferringImplicitDynamicTypeForLvar] import typing def f() -> None: a = g() None(a) # E: "None" not callable a.x() def g(): pass [out] [case testInferringExplicitDynamicTypeForLvar] from typing import Any g = None # type: Any def f(a: Any) -> None: b = g None(b) # E: "None" not callable a.x() [out] -- Inferring types of local variables with complex types -- ----------------------------------------------------- [case testInferringTupleTypeForLvar] def f() -> None: a = A(), B() aa = None # type: A bb = None # type: B bb = a[0] # E: Incompatible types in assignment (expression has type "A", variable has type "B") aa = a[1] # E: Incompatible types in assignment (expression has type "B", variable has type "A") aa = a[0] bb = a[1] class A: pass class B: pass [builtins fixtures/tuple.pyi] [out] [case testInferringTupleTypeForLvarWithNones] import typing def f() -> None: a = A(), None b = None, A() class A: pass [builtins fixtures/tuple.pyi] [out] [case testInferringGenericTypeForLvar] from typing import TypeVar, Generic T = TypeVar('T') class A(Generic[T]): pass a_i = None # type: A[int] a_s = None # type: A[str] def f() -> None: a_int = A() # type: A[int] a = a_int a = a_s # E: Incompatible types in assignment (expression has type "A[str]", variable has type "A[int]") a = a_i [builtins fixtures/tuple.pyi] [out] [case testInferringFunctionTypeForLvar] import typing def f() -> None: a = g a(B()) # E: Argument 1 has incompatible type "B"; expected "A" a(A()) def g(a: 'A') -> None: pass class A: pass class B: pass [out] [case testInferringFunctionTypeForLvarFromTypeObject] import typing def f() -> None: a = A a(A()) # E: Too many arguments a() t = a # type: type class A: pass [out] -- Inferring variable types in multiple definition -- ----------------------------------------------- [case testInferringLvarTypesInMultiDef] import typing def f() -> None: a, b = A(), B() a = b # E: Incompatible types in assignment (expression has type "B", variable has type "A") a = B() # E: Incompatible types in assignment (expression has type "B", variable has type "A") b = A() # E: Incompatible types in assignment (expression has type "A", variable has type "B") a = A() b = B() class A: pass class B: pass [out] [case testInferringLvarTypesInTupleAssignment] from typing import Tuple def f() -> None: t = None # type: Tuple[A, B] a, b = t a = b # E: Incompatible types in assignment (expression has type "B", variable has type "A") a = B() # E: Incompatible types in assignment (expression has type "B", variable has type "A") b = A() # E: Incompatible types in assignment (expression has type "A", variable has type "B") a = A() b = B() class A: pass class B: pass [out] [case testInferringLvarTypesInNestedTupleAssignment1] from typing import Tuple def f() -> None: t = None # type: Tuple[A, B] a1, (a, b) = A(), t a = b # E: Incompatible types in assignment (expression has type "B", variable has type "A") a = B() # E: Incompatible types in assignment (expression has type "B", variable has type "A") b = A() # E: Incompatible types in assignment (expression has type "A", variable has type "B") a = A() b = B() class A: pass class B: pass [out] [case testInferringLvarTypesInNestedTupleAssignment2] import typing def f() -> None: a, (b, c) = A(), (B(), C()) a = b # E: Incompatible types in assignment (expression has type "B", variable has type "A") a = B() # E: Incompatible types in assignment (expression has type "B", variable has type "A") b = A() # E: Incompatible types in assignment (expression has type "A", variable has type "B") c = A() # E: Incompatible types in assignment (expression has type "A", variable has type "C") a = A() b = B() c = C() class A: pass class B: pass class C: pass [out] [case testInferringLvarTypesInNestedListAssignment] import typing def f() -> None: a, (b, c) = A(), [B(), C()] a = b # E: Incompatible types in assignment (expression has type "B", variable has type "A") a = B() # E: Incompatible types in assignment (expression has type "B", variable has type "A") b = A() # E: Incompatible types in assignment (expression has type "A", variable has type "B") c = A() # E: Incompatible types in assignment (expression has type "A", variable has type "C") a = A() b = B() c = C() class A: pass class B: pass class C: pass [out] [case testInferringLvarTypesInMultiDefWithNoneTypes] import typing def f() -> None: a, b = A(), None c, d = None, A() class A: pass [out] [case testInferringLvarTypesInNestedTupleAssignmentWithNoneTypes] import typing def f() -> None: a1, (a2, b) = A(), (A(), None) class A: pass [out] [case testInferringLvarTypesInMultiDefWithInvalidTuple] from typing import Tuple t = None # type: Tuple[object, object, object] def f() -> None: a, b = t # Fail c, d, e, f = t # Fail g, h, i = t [builtins fixtures/tuple.pyi] [out] main:5: error: Too many values to unpack (2 expected, 3 provided) main:6: error: Need more than 3 values to unpack (4 expected) [case testInvalidRvalueTypeInInferredMultipleLvarDefinition] import typing def f() -> None: a, b = f # E: 'def ()' object is not iterable c, d = A() # E: '__main__.A' object is not iterable class A: pass [builtins fixtures/for.pyi] [out] [case testInvalidRvalueTypeInInferredNestedTupleAssignment] import typing def f() -> None: a1, (a2, b) = A(), f # E: 'def ()' object is not iterable a3, (c, d) = A(), A() # E: '__main__.A' object is not iterable class A: pass [builtins fixtures/for.pyi] [out] [case testInferringMultipleLvarDefinitionWithListRvalue] from typing import List class C: pass class D: pass def f() -> None: list_c = [C()] list_d = [D()] a, b = list_c c, d, e = list_d a = D() # E: Incompatible types in assignment (expression has type "D", variable has type "C") b = D() # E: Incompatible types in assignment (expression has type "D", variable has type "C") c = C() # E: Incompatible types in assignment (expression has type "C", variable has type "D") b = c # E: Incompatible types in assignment (expression has type "D", variable has type "C") a = C() b = C() c = D() d = D() e = D() a = b c = d d = e [builtins fixtures/for.pyi] [out] [case testInferringNestedTupleAssignmentWithListRvalue] from typing import List class C: pass class D: pass def f() -> None: list_c = [C()] list_d = [D()] c1, (a, b) = C(), list_c c2, (c, d, e) = C(), list_d a = D() # E: Incompatible types in assignment (expression has type "D", variable has type "C") b = D() # E: Incompatible types in assignment (expression has type "D", variable has type "C") c = C() # E: Incompatible types in assignment (expression has type "C", variable has type "D") b = c # E: Incompatible types in assignment (expression has type "D", variable has type "C") a = C() b = C() c = D() d = D() e = D() a = b c = d d = e [builtins fixtures/for.pyi] [out] [case testInferringMultipleLvarDefinitionWithImplicitDynamicRvalue] import typing def f() -> None: a, b = g() a.x b.x def g(): pass [case testInferringMultipleLvarDefinitionWithExplicitDynamicRvalue] from typing import Any def f(d: Any) -> None: a, b = d a.x b.x [case testInferringTypesFromIterable] from typing import Iterable class Nums(Iterable[int]): def __iter__(self): pass def __next__(self): pass a, b = Nums() a = b = 1 a = '' # E: Incompatible types in assignment (expression has type "str", variable has type "int") b = '' # E: Incompatible types in assignment (expression has type "str", variable has type "int") [builtins fixtures/for.pyi] -- Type variable inference for generic functions -- --------------------------------------------- [case testInferSimpleGenericFunction] from typing import Tuple, TypeVar T = TypeVar('T') a = None # type: A b = None # type: B c = None # type: Tuple[A, object] b = id(a) # E: Incompatible types in assignment (expression has type "A", variable has type "B") a = id(b) # E: Incompatible types in assignment (expression has type "B", variable has type "A") a = id(c) # E: Incompatible types in assignment (expression has type "Tuple[A, object]", variable has type "A") a = id(a) b = id(b) c = id(c) def id(a: T) -> T: pass class A: pass class B: pass [builtins fixtures/tuple.pyi] [case testInferringGenericFunctionTypeForLvar] from typing import TypeVar T = TypeVar('T') def f() -> None: a = id b = None # type: int c = None # type: str b = a(c) # E: Incompatible types in assignment (expression has type "str", variable has type "int") b = a(b) c = a(c) def id(x: T) -> T: return x [out] [case testUnderspecifiedInferenceResult] from typing import TypeVar T = TypeVar('T') class A: pass a = None # type: A def ff() -> None: x = f() # E: Need type annotation for variable reveal_type(x) # E: Revealed type is 'Any' g(None) # Ok f() # Ok because not used to infer local variable type g(a) def f() -> T: pass def g(a: T) -> None: pass [out] [case testInferenceWithMultipleConstraints] from typing import TypeVar T = TypeVar('T') a = None # type: A b = None # type: B b = f(a, b) # E: Incompatible types in assignment (expression has type "A", variable has type "B") b = f(b, a) # E: Incompatible types in assignment (expression has type "A", variable has type "B") a = f(a, b) a = f(b, a) def f(a: T, b: T) -> T: pass class A: pass class B(A): pass [case testInferenceWithMultipleVariables] from typing import Tuple, TypeVar T = TypeVar('T') S = TypeVar('S') a, b = None, None # type: (A, B) taa = None # type: Tuple[A, A] tab = None # type: Tuple[A, B] tba = None # type: Tuple[B, A] taa = f(a, b) # Fail taa = f(b, a) # Fail tba = f(a, b) # Fail tab = f(a, b) tba = f(b, a) def f(a: T, b: S) -> Tuple[T, S]: pass class A: pass class B: pass [builtins fixtures/tuple.pyi] [out] main:9: error: Argument 2 to "f" has incompatible type "B"; expected "A" main:10: error: Argument 1 to "f" has incompatible type "B"; expected "A" main:11: error: Argument 1 to "f" has incompatible type "A"; expected "B" main:11: error: Argument 2 to "f" has incompatible type "B"; expected "A" [case testConstraintSolvingWithSimpleGenerics] from typing import TypeVar, Generic T = TypeVar('T') ao = None # type: A[object] ab = None # type: A[B] ac = None # type: A[C] ab = f(ao) # E: Argument 1 to "f" has incompatible type "A[object]"; expected "A[B]" ao = f(ab) # E: Argument 1 to "f" has incompatible type "A[B]"; expected "A[object]" ab = f(ac) # E: Argument 1 to "f" has incompatible type "A[C]"; expected "A[B]" ab = g(ao) # E: Argument 1 to "g" has incompatible type "A[object]"; expected "A[B]" ao = g(ab) # E: Argument 1 to "g" has incompatible type "A[B]"; expected "A[object]" ab = f(ab) ac = f(ac) ao = f(ao) ab = g(ab) ao = g(ao) def f(a: 'A[T]') -> 'A[T]': pass def g(a: T) -> T: pass class A(Generic[T]): pass class B: pass class C: pass [case testConstraintSolvingFailureWithSimpleGenerics] from typing import TypeVar, Generic T = TypeVar('T') ao = None # type: A[object] ab = None # type: A[B] f(ao, ab) # E: Cannot infer type argument 1 of "f" f(ab, ao) # E: Cannot infer type argument 1 of "f" f(ao, ao) f(ab, ab) def f(a: 'A[T]', b: 'A[T]') -> None: pass class A(Generic[T]): pass class B: pass [case testTypeInferenceWithCalleeDefaultArgs] from typing import TypeVar T = TypeVar('T') a = None # type: A o = None # type: object a = f(o) # E: Incompatible types in assignment (expression has type "object", variable has type "A") a = g(a, o) # E: Incompatible types in assignment (expression has type "object", variable has type "A") o = f() o = f(o) a = f(a) a = g(a) def f(a: T = None) -> T: pass def g(a: T, b: T = None) -> T: pass class A: pass -- Generic function inference with multiple inheritance -- ---------------------------------------------------- [case testGenericFunctionInferenceWithMultipleInheritance] from typing import TypeVar class I: pass class J: pass class A(I, J): pass class B(I, J): pass class C(I): pass class D(J): pass T = TypeVar('T') def f(a: T, b: T) -> T: pass def g(x: I) -> None: pass a = f(A(), C()) g(a) b = f(A(), B()) g(b) c = f(A(), D()) g(c) # E: Argument 1 to "g" has incompatible type "J"; expected "I" d = f(D(), A()) g(d) # E: Argument 1 to "g" has incompatible type "J"; expected "I" e = f(D(), C()) g(e) # E: Argument 1 to "g" has incompatible type "object"; expected "I" [case testGenericFunctionInferenceWithMultipleInheritance2] from typing import TypeVar class I: pass class J: pass class A(I): pass class B(A, J): pass class C(I, J): pass T = TypeVar('T') def f(a: T, b: T) -> T: pass def g(x: I) -> None: pass def h(x: J) -> None: pass a = f(B(), C()) g(a) h(a) # E: Argument 1 to "h" has incompatible type "I"; expected "J" b = f(C(), B()) g(b) h(b) # E: Argument 1 to "h" has incompatible type "I"; expected "J" c = f(A(), B()) g(a) h(b) # E: Argument 1 to "h" has incompatible type "I"; expected "J" [case testGenericFunctionInferenceWithMultipleInheritance3] from typing import TypeVar class I: pass class J: pass class K(J): pass class A(K): pass class B(A, I): pass class C(I, J): pass T = TypeVar('T') def f(a: T, b: T) -> T: pass def g(x: K) -> None: pass a = f(B(), C()) g(a) # E: Argument 1 to "g" has incompatible type "J"; expected "K" b = f(A(), C()) g(b) # E: Argument 1 to "g" has incompatible type "J"; expected "K" c = f(A(), B()) g(c) [case testPrecedenceOfFirstBaseAsInferenceResult] from typing import TypeVar from abc import abstractmethod, ABCMeta T = TypeVar('T') a, i, j = None, None, None # type: (A, I, J) a = f(B(), C()) class I(metaclass=ABCMeta): pass class J(metaclass=ABCMeta): pass def f(a: T, b: T) -> T: pass class A: pass class B(A, I, J): pass class C(A, I, J): pass -- Generic function inference with function arguments -- -------------------------------------------------- [case testNonOverloadedMapInference] from typing import TypeVar, Callable, List t = TypeVar('t') s = TypeVar('s') class A: pass b = bool() def f(x: bool) -> A: pass def mymap(f: Callable[[t], s], a: List[t]) -> List[s]: pass l = mymap(f, [b]) l = [A()] lb = [b] l = lb # E: Incompatible types in assignment (expression has type "List[bool]", variable has type "List[A]") [builtins fixtures/for.pyi] [case testGenericFunctionWithTypeTypeAsCallable] from typing import Callable, Type, TypeVar T = TypeVar('T') def f(x: Callable[..., T]) -> T: return x() class A: pass x = None # type: Type[A] y = f(x) reveal_type(y) # E: Revealed type is '__main__.A*' -- Generic function inference with unions -- -------------------------------------- [case testUnionInference] from typing import TypeVar, Union, List T = TypeVar('T') U = TypeVar('U') def f(x: Union[T, int], y: T) -> T: pass f(1, 'a')() # E: "str" not callable f('a', 1)() # E: "object" not callable f('a', 'a')() # E: "str" not callable f(1, 1)() # E: "int" not callable def g(x: Union[T, List[T]]) -> List[T]: pass def h(x: List[str]) -> None: pass g('a')() # E: "List[str]" not callable # The next line is a case where there are multiple ways to satisfy a constraint # involving a Union. Either T = List[str] or T = str would turn out to be valid, # but mypy doesn't know how to branch on these two options (and potentially have # to backtrack later) and defaults to T = . The result is an # awkward error message. Either a better error message, or simply accepting the # call, would be preferable here. g(['a']) # E: Argument 1 to "g" has incompatible type "List[str]"; expected "List[]" h(g(['a'])) def i(x: Union[List[T], List[U]], y: List[T], z: List[U]) -> None: pass a = [1] b = ['b'] i(a, a, b) i(b, a, b) i(a, b, b) # E: Argument 1 to "i" has incompatible type "List[int]"; expected "List[str]" [builtins fixtures/list.pyi] [case testCallableListJoinInference] from typing import Any, Callable def fun() -> None: callbacks = [ callback1, callback2, ] for c in callbacks: call(c, 1234) # this must not fail def callback1(i: int) -> int: return i def callback2(i: int) -> str: return 'hello' def call(c: Callable[[int], Any], i: int) -> None: c(i) [builtins fixtures/list.pyi] [out] [case testCallableMeetAndJoin] # flags: --python-version 3.6 from typing import Callable, Any, TypeVar class A: ... class B(A): ... def f(c: Callable[[B], int]) -> None: ... c: Callable[[A], int] d: Callable[[B], int] lst = [c, d] reveal_type(lst) # E: Revealed type is 'builtins.list[def (__main__.B) -> builtins.int]' T = TypeVar('T') def meet_test(x: Callable[[T], int], y: Callable[[T], int]) -> T: ... CA = Callable[[A], A] CB = Callable[[B], B] ca: Callable[[CA], int] cb: Callable[[CB], int] reveal_type(meet_test(ca, cb)) # E: Revealed type is 'def (__main__.A) -> __main__.B' [builtins fixtures/list.pyi] [out] [case testUnionInferenceWithTypeVarValues] from typing import TypeVar, Union AnyStr = TypeVar('AnyStr', bytes, str) def f(x: Union[AnyStr, int], *a: AnyStr) -> None: pass f('foo') f('foo', 'bar') f('foo', b'bar') # E: Value of type variable "AnyStr" of "f" cannot be "object" f(1) f(1, 'foo') f(1, 'foo', b'bar') # E: Value of type variable "AnyStr" of "f" cannot be "object" [builtins fixtures/primitives.pyi] [case testUnionTwoPassInference-skip] from typing import TypeVar, Union, List T = TypeVar('T') U = TypeVar('U') def j(x: Union[List[T], List[U]], y: List[T]) -> List[U]: pass a = [1] b = ['b'] # We could infer: Since List[str] <: List[T], we must have T = str. # Then since List[int] <: Union[List[str], List[U]], and List[int] is # not a subtype of List[str], we must have U = int. # This is not currently implemented. j(a, b) [builtins fixtures/list.pyi] [case testUnionContext] from typing import TypeVar, Union, List T = TypeVar('T') def f() -> List[T]: pass d1 = f() # type: Union[List[int], str] d2 = f() # type: Union[int, str] # E: Incompatible types in assignment (expression has type "List[]", variable has type "Union[int, str]") def g(x: T) -> List[T]: pass d3 = g(1) # type: Union[List[int], List[str]] [builtins fixtures/list.pyi] [case testGenericFunctionSubtypingWithUnions] from typing import TypeVar, Union, List T = TypeVar('T') S = TypeVar('S') def k1(x: int, y: List[T]) -> List[Union[T, int]]: pass def k2(x: S, y: List[T]) -> List[Union[T, int]]: pass a = k2 a = k2 a = k1 # E: Incompatible types in assignment (expression has type "Callable[[int, List[T]], List[Union[T, int]]]", variable has type "Callable[[S, List[T]], List[Union[T, int]]]") b = k1 b = k1 b = k2 [builtins fixtures/list.pyi] [case testAmbiguousUnionContextAndMultipleInheritance] from typing import TypeVar, Union, Generic _T = TypeVar('_T') class T(Generic[_T]): pass class U(Generic[_T]): pass class V(T[_T], U[_T]): pass def wait_for(fut: Union[T[_T], U[_T]]) -> _T: ... reveal_type(wait_for(V[str]())) # E: Revealed type is 'builtins.str*' [case testAmbiguousUnionContextAndMultipleInheritance2] from typing import TypeVar, Union, Generic _T = TypeVar('_T') _S = TypeVar('_S') class T(Generic[_T, _S]): pass class U(Generic[_T, _S]): pass class V(T[_T, _S], U[_T, _S]): pass def wait_for(fut: Union[T[_T, _S], U[_T, _S]]) -> T[_T, _S]: ... reveal_type(wait_for(V[int, str]())) \ # E: Revealed type is '__main__.T[builtins.int*, builtins.str*]' -- Literal expressions -- ------------------- [case testDictLiteral] from typing import Dict class A: pass class B: pass def d_ab() -> Dict[A, B]: return {} def d_aa() -> Dict[A, A]: return {} a, b = None, None # type: (A, B) d = {a:b} d = d_ab() d = d_aa() # E: Incompatible types in assignment (expression has type "Dict[A, A]", variable has type "Dict[A, B]") [builtins fixtures/dict.pyi] [case testSetLiteral] from typing import Any, Set a, x = None, None # type: (int, Any) def s_i() -> Set[int]: return set() def s_s() -> Set[str]: return set() s = {a} s = {x} s = s_i() s = s_s() # E: Incompatible types in assignment (expression has type "Set[str]", variable has type "Set[int]") [builtins fixtures/set.pyi] [case testSetWithStarExpr] s = {1, 2, *(3, 4)} t = {1, 2, *s} reveal_type(s) # E: Revealed type is 'builtins.set[builtins.int*]' reveal_type(t) # E: Revealed type is 'builtins.set[builtins.int*]' [builtins fixtures/set.pyi] -- For statements -- -------------- [case testInferenceOfFor1] a, b = None, None # type: (A, B) for x in [A()]: b = x # E: Incompatible types in assignment (expression has type "A", variable has type "B") a = x for y in []: # E: Need type annotation for variable a = y # E: Cannot determine type of 'y' reveal_type(y) # E: Revealed type is 'Any' \ # E: Cannot determine type of 'y' class A: pass class B: pass [builtins fixtures/for.pyi] [case testInferenceOfFor2] a, b, c = None, None, None # type: (A, B, C) for x, (y, z) in [(A(), (B(), C()))]: b = x # Fail c = y # Fail a = z # Fail a = x b = y c = z for xx, yy, zz in [(A(), B())]: # Fail pass for xx, (yy, zz) in [(A(), B())]: # Fail pass for xxx, yyy in [(None, None)]: pass class A: pass class B: pass class C: pass [builtins fixtures/for.pyi] [out] main:4: error: Incompatible types in assignment (expression has type "A", variable has type "B") main:5: error: Incompatible types in assignment (expression has type "B", variable has type "C") main:6: error: Incompatible types in assignment (expression has type "C", variable has type "A") main:10: error: Need more than 2 values to unpack (3 expected) main:12: error: '__main__.B' object is not iterable [case testInferenceOfFor3] a, b = None, None # type: (A, B) for x, y in [[A()]]: b = x # E: Incompatible types in assignment (expression has type "A", variable has type "B") b = y # E: Incompatible types in assignment (expression has type "A", variable has type "B") a = x a = y for e, f in [[]]: # E: Need type annotation for variable reveal_type(e) # E: Revealed type is 'Any' \ # E: Cannot determine type of 'e' reveal_type(f) # E: Revealed type is 'Any' \ # E: Cannot determine type of 'f' class A: pass class B: pass [builtins fixtures/for.pyi] [case testForStatementInferenceWithVoid] import typing for x in f(): # E: "f" does not return a value pass def f() -> None: pass [builtins fixtures/for.pyi] [case testReusingInferredForIndex] import typing for a in [A()]: pass a = A() a = B() # E: Incompatible types in assignment (expression has type "B", variable has type "A") for a in []: pass a = A() a = B() # E: Incompatible types in assignment (expression has type "B", variable has type "A") class A: pass class B: pass [builtins fixtures/for.pyi] [case testReusingInferredForIndex2] import typing def f() -> None: for a in [A()]: pass a = A() a = B() # E: Incompatible types in assignment (expression has type "B", variable has type "A") for a in []: pass a = A() a = B() # E: Incompatible types in assignment (expression has type "B", variable has type "A") class A: pass class B: pass [builtins fixtures/for.pyi] [out] -- Regression tests -- ---------------- [case testMultipleAssignmentWithPartialDefinition] a = None # type: A x, a = a, a x = a a = x x = object() # E: Incompatible types in assignment (expression has type "object", variable has type "A") a = object() # E: Incompatible types in assignment (expression has type "object", variable has type "A") class A: pass [case testMultipleAssignmentWithPartialDefinition2] a = None # type: A a, x = [a, a] x = a a = x x = object() # E: Incompatible types in assignment (expression has type "object", variable has type "A") a = object() # E: Incompatible types in assignment (expression has type "object", variable has type "A") class A: pass [builtins fixtures/for.pyi] [case testMultipleAssignmentWithPartialDefinition3] from typing import Any, cast a = None # type: A x, a = cast(Any, a) x = a a = x x = object() a = object() # E: Incompatible types in assignment (expression has type "object", variable has type "A") class A: pass [case testInferGlobalDefinedInBlock] import typing if A: a = A() a = A() a = B() # E: Incompatible types in assignment (expression has type "B", variable has type "A") class A: pass class B: pass [case testAssigningAnyStrToNone] from typing import Tuple, TypeVar AnyStr = TypeVar('AnyStr', str, bytes) def f(x: AnyStr) -> Tuple[AnyStr]: pass x = None (x,) = f('') reveal_type(x) # E: Revealed type is 'builtins.str' -- Inferring attribute types -- ------------------------- [case testInferAttributeType] import typing class A: a = B() class B: pass A().a = B() A().a = A() # E: Incompatible types in assignment (expression has type "A", variable has type "B") [case testInferAttributeTypeAndAssignInInit] import typing class A: a = B() def __init__(self) -> None: self.a = A() # E: Incompatible types in assignment (expression has type "A", variable has type "B") self.a = B() class B: pass [out] [case testInferAttributeInInit] import typing class B: pass class A: def __init__(self) -> None: self.a = A() self.b = B() a = A() a.a = A() a.b = B() a.a = B() # E: Incompatible types in assignment (expression has type "B", variable has type "A") a.b = A() # E: Incompatible types in assignment (expression has type "A", variable has type "B") [case testInferAttributeInInitUsingChainedAssignment] import typing class B: pass class A: def __init__(self) -> None: self.a = self.b = A() a = A() a.a = A() a.b = A() a.a = B() # E: Incompatible types in assignment (expression has type "B", variable has type "A") a.b = B() # E: Incompatible types in assignment (expression has type "B", variable has type "A") -- Lambdas -- ------- [case testInferLambdaType] from typing import List, Callable li = [1] l = lambda: li f1 = l # type: Callable[[], List[int]] f2 = l # type: Callable[[], List[str]] # E: Incompatible types in assignment (expression has type "Callable[[], List[int]]", variable has type "Callable[[], List[str]]") [builtins fixtures/list.pyi] [case testInferLambdaType2] from typing import List, Callable l = lambda: [B()] f1 = l # type: Callable[[], List[B]] f2 = l # type: Callable[[], List[A]] # E: Incompatible types in assignment (expression has type "Callable[[], List[B]]", variable has type "Callable[[], List[A]]") class A: pass class B: pass [builtins fixtures/list.pyi] [case testUninferableLambda] from typing import TypeVar, Callable X = TypeVar('X') def f(x: Callable[[X], X]) -> X: pass y = f(lambda x: x) # E: Cannot infer type argument 1 of "f" [case testUninferableLambdaWithTypeError] from typing import TypeVar, Callable X = TypeVar('X') def f(x: Callable[[X], X], y: str) -> X: pass y = f(lambda x: x, 1) # Fail [out] main:4: error: Cannot infer type argument 1 of "f" main:4: error: Argument 2 to "f" has incompatible type "int"; expected "str" [case testInferLambdaNone] from typing import Callable def f(x: Callable[[], None]) -> None: pass def g(x: Callable[[], int]) -> None: pass a = lambda: None f(a) g(a) b = lambda: None # type: Callable[[], None] f(b) g(b) [case testLambdaDefaultContext] # flags: --strict-optional from typing import Callable def f(a: Callable[..., None] = lambda *a, **k: None): pass def g(a: Callable[..., None] = lambda *a, **k: 1): # E: Incompatible default for argument "a" (default has type "Callable[[VarArg(Any), KwArg(Any)], int]", argument has type "Callable[..., None]") pass [builtins fixtures/dict.pyi] [case testLambdaVarargContext] # Should not crash from typing import Callable def f(a: Callable[[int, int, int], int] = lambda *a, **k: 1): pass [builtins fixtures/dict.pyi] [case testLambdaDeferredSpecialCase] from typing import Callable class A: def f(self) -> None: h(lambda: self.x) def g(self) -> None: self.x = 1 def h(x: Callable[[], int]) -> None: pass -- Boolean operators -- ----------------- [case testOrOperationWithGenericOperands] from typing import List a = None # type: List[A] o = None # type: List[object] a2 = a or [] a = a2 a2 = o # E: Incompatible types in assignment (expression has type "List[object]", variable has type "List[A]") class A: pass [builtins fixtures/list.pyi] -- Accessing variable before its type has been inferred -- ---------------------------------------------------- [case testAccessGlobalVarBeforeItsTypeIsAvailable] import typing x.y # E: Cannot determine type of 'x' x = object() x.y # E: "object" has no attribute "y" [case testAccessDataAttributeBeforeItsTypeIsAvailable] a = None # type: A a.x.y # E: Cannot determine type of 'x' class A: def __init__(self) -> None: self.x = object() a.x.y # E: "object" has no attribute "y" -- Ducktype declarations -- --------------------- [case testListWithDucktypeCompatibility] from typing import List, _promote class A: pass @_promote(A) class B: pass a = None # type: List[A] x1 = [A(), B()] x2 = [B(), A()] x3 = [B(), B()] a = x1 a = x2 a = x3 # E: Incompatible types in assignment (expression has type "List[B]", variable has type "List[A]") [builtins fixtures/list.pyi] [case testListWithDucktypeCompatibilityAndTransitivity] from typing import List, _promote class A: pass @_promote(A) class B: pass @_promote(B) class C: pass a = None # type: List[A] x1 = [A(), C()] x2 = [C(), A()] x3 = [B(), C()] a = x1 a = x2 a = x3 # E: Incompatible types in assignment (expression has type "List[B]", variable has type "List[A]") [builtins fixtures/list.pyi] -- Inferring type of variable when initialized to an empty collection -- ------------------------------------------------------------------ [case testInferListInitializedToEmpty] a = [] a.append(1) a.append('') # E: Argument 1 to "append" of "list" has incompatible type "str"; expected "int" [builtins fixtures/list.pyi] [out] [case testInferListInitializedToEmptyUsingUpdate] a = [] a.extend(['']) a.append(0) # E: Argument 1 to "append" of "list" has incompatible type "int"; expected "str" [builtins fixtures/list.pyi] [out] [case testInferListInitializedToEmptyAndNotAnnotated] a = [] # E: Need type annotation for variable [builtins fixtures/list.pyi] [out] [case testInferListInitializedToEmptyAndReadBeforeAppend] a = [] # E: Need type annotation for variable if a: pass a.xyz a.append('') [builtins fixtures/list.pyi] [out] [case testInferListInitializedToEmptyAndIncompleteTypeInAppend] a = [] # E: Need type annotation for variable a.append([]) a() [builtins fixtures/list.pyi] [out] [case testInferListInitializedToEmptyAndMultipleAssignment] a, b = [], [] a.append(1) b.append('') a() # E: "List[int]" not callable b() # E: "List[str]" not callable [builtins fixtures/list.pyi] [out] [case testInferListInitializedToEmptyInFunction] def f() -> None: a = [] a.append(1) a.append('') # E: Argument 1 to "append" of "list" has incompatible type "str"; expected "int" [builtins fixtures/list.pyi] [out] [case testInferListInitializedToEmptyAndNotAnnotatedInFunction] def f() -> None: a = [] # E: Need type annotation for variable def g() -> None: pass a = [] a.append(1) [builtins fixtures/list.pyi] [out] [case testInferListInitializedToEmptyAndReadBeforeAppendInFunction] def f() -> None: a = [] # E: Need type annotation for variable if a: pass a.xyz a.append('') [builtins fixtures/list.pyi] [out] [case testInferListInitializedToEmptyInClassBody] class A: a = [] a.append(1) a.append('') # E: Argument 1 to "append" of "list" has incompatible type "str"; expected "int" [builtins fixtures/list.pyi] [out] [case testInferListInitializedToEmptyAndNotAnnotatedInClassBody] class A: a = [] # E: Need type annotation for variable class B: a = [] a.append(1) [builtins fixtures/list.pyi] [out] [case testInferListInitializedToEmptyInMethod] class A: def f(self) -> None: a = [] a.append(1) a.append('') # E: Argument 1 to "append" of "list" has incompatible type "str"; expected "int" [builtins fixtures/list.pyi] [out] [case testInferListInitializedToEmptyAndNotAnnotatedInMethod] class A: def f(self) -> None: a = [] # E: Need type annotation for variable [builtins fixtures/list.pyi] [out] [case testInferListInitializedToEmptyInMethodViaAttribute] class A: def f(self) -> None: # Attributes aren't supported right now. self.a = [] # E: Need type annotation for variable self.a.append(1) self.a.append('') [builtins fixtures/list.pyi] [out] [case testInferListInitializedToEmptyInClassBodyAndOverriden] from typing import List class A: def __init__(self) -> None: self.x = [] # E: Need type annotation for variable class B(A): @property def x(self) -> List[int]: return [123] [builtins fixtures/list.pyi] [out] [case testInferSetInitializedToEmpty] a = set() a.add(1) a.add('') # E: Argument 1 to "add" of "set" has incompatible type "str"; expected "int" [builtins fixtures/set.pyi] [out] [case testInferSetInitializedToEmptyUsingDiscard] a = set() a.discard('') a.add(0) # E: Argument 1 to "add" of "set" has incompatible type "int"; expected "str" [builtins fixtures/set.pyi] [out] [case testInferSetInitializedToEmptyUsingUpdate] a = set() a.update({0}) a.add('') # E: Argument 1 to "add" of "set" has incompatible type "str"; expected "int" [builtins fixtures/set.pyi] [out] [case testInferDictInitializedToEmpty] a = {} a[1] = '' a() # E: "Dict[int, str]" not callable [builtins fixtures/dict.pyi] [out] [case testInferDictInitializedToEmptyUsingUpdate] a = {} a.update({'': 42}) a() # E: "Dict[str, int]" not callable [builtins fixtures/dict.pyi] [out] [case testInferDictInitializedToEmptyUsingUpdateError] a = {} # E: Need type annotation for variable a.update([1, 2]) a() [builtins fixtures/dict.pyi] [out] [case testInferDictInitializedToEmptyAndIncompleteTypeInUpdate] a = {} # E: Need type annotation for variable a[1] = {} b = {} # E: Need type annotation for variable b[{}] = 1 [builtins fixtures/dict.pyi] [out] [case testInferDictInitializedToEmptyAndUpdatedFromMethod] map = {} def add(): map[1] = 2 [builtins fixtures/dict.pyi] [out] [case testSpecialCaseEmptyListInitialization] def f(blocks: Any): # E: Name 'Any' is not defined to_process = [] # E: Need type annotation for variable to_process = list(blocks) [builtins fixtures/list.pyi] [out] [case testSpecialCaseEmptyListInitialization2] def f(blocks: object): to_process = [] # E: Need type annotation for variable to_process = list(blocks) # E: No overload variant of "list" matches argument types [builtins.object] [builtins fixtures/list.pyi] [out] -- Inferring types of variables first initialized to None (partial types) -- ---------------------------------------------------------------------- [case testLocalVariablePartiallyInitializedToNone] def f() -> None: if object(): x = None else: x = 1 x() # E: "int" not callable [out] [case testLocalVariablePartiallyTwiceInitializedToNone] def f() -> None: if object(): x = None elif object(): x = None else: x = 1 x() # E: "int" not callable [out] [case testLvarInitializedToNoneWithoutType] import typing def f() -> None: a = None a.x() # E: "None" has no attribute "x" [out] [case testGvarPartiallyInitializedToNone] x = None if object(): x = 1 x() # E: "int" not callable [case testPartiallyInitializedToNoneAndThenToPartialList] x = None if object(): # Promote from partial None to partial list. x = [] x.append(1) x.append('') # E: Argument 1 to "append" of "list" has incompatible type "str"; expected "int" [builtins fixtures/list.pyi] [case testPartiallyInitializedToNoneAndThenReadPartialList] x = None if object(): # Promote from partial None to partial list. x = [] # E: Need type annotation for variable x [builtins fixtures/list.pyi] [case testPartiallyInitializedToNoneAndPartialListAndLeftPartial] def f() -> None: x = None if object(): # Promote from partial None to partial list. x = [] # E: Need type annotation for variable [builtins fixtures/list.pyi] [out] [case testPartiallyInitializedToNoneAndThenToIncompleteType-skip] # TODO(ddfisher): fix partial type bug and re-enable from typing import TypeVar, Dict T = TypeVar('T') def f(*x: T) -> Dict[int, T]: pass x = None # E: Need type annotation for variable if object(): x = f() [builtins fixtures/dict.pyi] [case testPartiallyInitializedVariableDoesNotEscapeScope1] def f() -> None: x = None reveal_type(x) # E: Revealed type is 'builtins.None' x = 1 [out] [case testPartiallyInitializedVariableDoesNotEscapeScope2] x = None def f() -> None: x = None x = 1 x() # E: "None" not callable [case testAttributePartiallyInitializedToNone] class A: def f(self) -> None: self.x = None self.x = 1 self.x() # E: "int" not callable [out] [case testAttributePartiallyInitializedToNoneWithMissingAnnotation] class A: def f(self) -> None: self.x = None def g(self) -> None: self.x = 1 self.x() [out] main:6: error: Incompatible types in assignment (expression has type "int", variable has type "None") main:7: error: "None" not callable [case testGlobalInitializedToNoneSetFromFunction] a = None def f(): global a a = 42 [out] [case testGlobalInitializedToNoneSetFromMethod] a = None class C: def m(self): global a a = 42 [out] -- More partial type errors -- ------------------------ [case testPartialTypeErrorSpecialCase1] # This used to crash. class A: x = None def f(self) -> None: for a in self.x: pass [builtins fixtures/for.pyi] [out] main:5: error: "None" has no attribute "__iter__" [case testPartialTypeErrorSpecialCase2] # This used to crash. class A: x = [] def f(self) -> None: for a in self.x: pass [builtins fixtures/for.pyi] [out] main:3: error: Need type annotation for variable [case testPartialTypeErrorSpecialCase3] class A: x = None def f(self) -> None: for a in A.x: pass [builtins fixtures/for.pyi] [out] main:4: error: "None" has no attribute "__iter__" -- Multipass -- --------- [case testMultipassAndAccessVariableBeforeDefinition] def f() -> None: y = x y() # E: "int" not callable x = 1 [out] [case testMultipassAndAccessInstanceVariableBeforeDefinition] class A: def f(self) -> None: y = self.x y() # E: "int" not callable def g(self) -> None: self.x = 1 [out] [case testMultipassAndTopLevelVariable] y = x # E: Cannot determine type of 'x' y() x = 1+0 [out] [case testMultipassAndDecoratedMethod] from typing import Callable, TypeVar T = TypeVar('T') class A: def f(self) -> None: self.g() # E: Too few arguments for "g" of "A" self.g(1) @dec def g(self, x: str) -> None: pass def dec(f: Callable[[A, str], T]) -> Callable[[A, int], T]: pass [out] [case testMultipassAndDefineAttributeBasedOnNotReadyAttribute] class A: def f(self) -> None: self.y = self.x def g(self) -> None: self.x = 1 def h(self) -> None: self.y() # E: "int" not callable [out] [case testMultipassAndDefineAttributeBasedOnNotReadyAttribute2] class A: def f(self) -> None: self.y = self.x self.z = self.y self.z() # E self.y() # E def g(self) -> None: self.x = 1 def h(self) -> None: self.y() # E [out] main:5: error: "int" not callable main:6: error: "int" not callable main:12: error: "int" not callable [case testMultipassAndPartialTypes] def f() -> None: x = [] y x.append(1) x.append('') # E: Argument 1 to "append" of "list" has incompatible type "str"; expected "int" x.append(y) # E: Argument 1 to "append" of "list" has incompatible type "str"; expected "int" y = '' [builtins fixtures/list.pyi] [out] [case testMultipassAndPartialTypes2] s = '' n = 0 def f() -> None: global s, n x = [] x.append(y) s = x[0] n = x[0] # E: Incompatible types in assignment (expression has type "str", variable has type "int") x.append(1) # E: Argument 1 to "append" of "list" has incompatible type "int"; expected "str" y = '' [builtins fixtures/list.pyi] [out] [case testMultipassAndPartialTypes3] from typing import Dict def g(d: Dict[str, int]) -> None: pass def f() -> None: x = {} x[1] = y g(x) # E: Argument 1 to "g" has incompatible type "Dict[int, str]"; expected "Dict[str, int]" x[1] = 1 # E: Incompatible types in assignment (expression has type "int", target has type "str") x[1] = '' y = '' [builtins fixtures/dict.pyi] [out] [case testMultipassAndPartialTypes4] from typing import Dict def g(d: Dict[str, int]) -> None: pass def f() -> None: x = {} y x[1] = 1 g(x) # E: Argument 1 to "g" has incompatible type "Dict[int, int]"; expected "Dict[str, int]" y = '' [builtins fixtures/dict.pyi] [out] [case testMultipassAndCircularDependency] class A: def f(self) -> None: self.x = self.y # E: Cannot determine type of 'y' def g(self) -> None: self.y = self.x [out] [case testMultipassAndPartialTypesSpecialCase1] def f() -> None: y = o x = [] x.append(y) x() # E: "List[int]" not callable o = 1 [builtins fixtures/list.pyi] [out] [case testMultipassAndPartialTypesSpecialCase2] def f() -> None: y = o x = {} x[''] = y x() # E: "Dict[str, int]" not callable o = 1 [builtins fixtures/dict.pyi] [out] [case testMultipassAndPartialTypesSpecialCase3] def f() -> None: x = {} # E: Need type annotation for variable y = o z = {} # E: Need type annotation for variable o = 1 [builtins fixtures/dict.pyi] [out] [case testMultipassAndPartialTypesSpecialCase4] def f() -> None: y = o x = None x = y x() # E: "int" not callable o = 1 [out] [case testMultipassAndPartialTypesSpecialCase5] def f() -> None: x = None y = o x = y x() # E: "int" not callable o = 1 [out] [case testMultipassAndClassAttribute] class S: def foo(self) -> int: return R.X class R: X = 2 [case testMultipassAndMultipleFiles] import m def f() -> None: x() x = 0 [file m.py] def g() -> None: y() y = 0 [out] tmp/m.py:2: error: "int" not callable main:3: error: "int" not callable -- Tests for special cases of unification -- -------------------------------------- [case testUnificationRedundantUnion] from typing import Union a = None # type: Union[int, str] b = None # type: Union[str, tuple] def f(): pass def g(x: Union[int, str]): pass c = a if f() else b g(c) # E: Argument 1 to "g" has incompatible type "Union[int, str, tuple]"; expected "Union[int, str]" [case testUnificationMultipleInheritance] class A: pass class B: def foo(self): pass class C(A, B): pass def f(): pass a1 = B() if f() else C() a1.foo() a2 = C() if f() else B() a2.foo() [case testUnificationMultipleInheritanceAmbiguous] # Show that join_instances_via_supertype() breaks ties using the first base class. class A1: pass class B1: def foo1(self): pass class C1(A1, B1): pass class A2: pass class B2: def foo2(self): pass class C2(A2, B2): pass class D1(C1, C2): pass class D2(C2, C1): pass def f(): pass a1 = D1() if f() else D2() a1.foo1() a2 = D2() if f() else D1() a2.foo2() [case testUnificationEmptyListLeft] def f(): pass a = [] if f() else [0] a() # E: "List[int]" not callable [builtins fixtures/list.pyi] [case testUnificationEmptyListRight] def f(): pass a = [0] if f() else [] a() # E: "List[int]" not callable [builtins fixtures/list.pyi] [case testUnificationEmptyListLeftInContext] from typing import List def f(): pass a = [] if f() else [0] # type: List[int] a() # E: "List[int]" not callable [builtins fixtures/list.pyi] [case testUnificationEmptyListRightInContext] # TODO Find an example that really needs the context from typing import List def f(): pass a = [0] if f() else [] # type: List[int] a() # E: "List[int]" not callable [builtins fixtures/list.pyi] [case testUnificationEmptySetLeft] def f(): pass a = set() if f() else {0} a() # E: "Set[int]" not callable [builtins fixtures/set.pyi] [case testUnificationEmptyDictLeft] def f(): pass a = {} if f() else {0: 0} a() # E: "Dict[int, int]" not callable [builtins fixtures/dict.pyi] [case testUnificationEmptyDictRight] def f(): pass a = {0: 0} if f() else {} a() # E: "Dict[int, int]" not callable [builtins fixtures/dict.pyi] [case testUnificationDictWithEmptyListLeft] def f(): pass a = {0: []} if f() else {0: [0]} a() # E: "Dict[int, List[int]]" not callable [builtins fixtures/dict.pyi] [case testUnificationDictWithEmptyListRight] def f(): pass a = {0: [0]} if f() else {0: []} a() # E: "Dict[int, List[int]]" not callable [builtins fixtures/dict.pyi] [case testMisguidedSetItem] from typing import Generic, Sequence, TypeVar T = TypeVar('T') class C(Sequence[T], Generic[T]): pass C[0] = 0 [out] main:4: error: Type expected within [...] main:4: error: Unsupported target for indexed assignment [case testNoCrashOnPartialMember] class C: x = None def __init__(self) -> None: self.x = [] # E: Need type annotation for variable [builtins fixtures/list.pyi] [out] [case testNoCrashOnPartialVariable] from typing import Tuple, TypeVar T = TypeVar('T', bound=str) def f(x: T) -> Tuple[T]: ... x = None (x,) = f('') reveal_type(x) # E: Revealed type is 'builtins.str' [out] [case testNoCrashOnPartialVariable2] from typing import Tuple, TypeVar T = TypeVar('T', bound=str) def f() -> Tuple[T]: ... x = None (x,) = f() [out] [case testNoCrashOnPartialVariable3] from typing import Tuple, TypeVar T = TypeVar('T') def f(x: T) -> Tuple[T, T]: ... x = None (x, x) = f('') reveal_type(x) # E: Revealed type is 'builtins.str' [out] [case testInferenceNestedTuplesFromGenericIterable] from typing import Tuple, TypeVar T = TypeVar('T') def make_tuple(elem: T) -> Tuple[T]: return (elem,) def main() -> None: ((a, b),) = make_tuple((1, 2)) reveal_type(a) # E: Revealed type is 'builtins.int' reveal_type(b) # E: Revealed type is 'builtins.int' [builtins fixtures/tuple.pyi] [out] [case testDontMarkUnreachableAfterInferenceUninhabited] from typing import TypeVar T = TypeVar('T') def f() -> T: pass class C: x = f() # E: Need type annotation for variable def m(self) -> str: return 42 # E: Incompatible return value type (got "int", expected "str") if bool(): f() 1 + '' # E: Unsupported left operand type for + ("int") [builtins fixtures/list.pyi] [out] [case testDontMarkUnreachableAfterInferenceUninhabited2] # flags: --strict-optional from typing import TypeVar, Optional T = TypeVar('T') def f(x: Optional[T] = None) -> T: pass class C: x = f() # E: Need type annotation for variable def m(self) -> str: return 42 # E: Incompatible return value type (got "int", expected "str") if bool(): f() 1 + '' # E: Unsupported left operand type for + ("int") [builtins fixtures/list.pyi] [out] [case testDontMarkUnreachableAfterInferenceUninhabited3] from typing import TypeVar, List T = TypeVar('T') def f(x: List[T]) -> T: pass class C: x = f([]) # E: Need type annotation for variable def m(self) -> str: return 42 # E: Incompatible return value type (got "int", expected "str") if bool(): f([]) 1 + '' # E: Unsupported left operand type for + ("int") [builtins fixtures/list.pyi] [out] mypy-0.560/test-data/unit/check-isinstance.test0000644€tŠÔÚ€2›s®0000014153713215007205025663 0ustar jukkaDROPBOX\Domain Users00000000000000[case testForcedAssignment] x = 1 # type: object y = 1 y = x # E: Incompatible types in assignment (expression has type "object", variable has type "int") x = 2 y = x [case testJoinAny] from typing import List, Any x = None # type: List[Any] def foo() -> List[int]: pass def bar() -> List[str]: pass if bool(): x = foo() else: x = bar() x * 2 [builtins fixtures/list.pyi] [case testGeneratorExpressionTypes] class A: y = 1 x = [A()] y = [x] z = [1,2] z = [a.y for b in y for a in b] [builtins fixtures/list.pyi] [case testIsinstanceNestedTuple] from typing import Union, List, Tuple, Dict def f(x: Union[int, str, List]) -> None: if isinstance(x, (str, (int,))): reveal_type(x) # E: Revealed type is 'Union[builtins.int, builtins.str]' x[1] # E: Value of type "Union[int, str]" is not indexable else: reveal_type(x) # E: Revealed type is 'builtins.list[Any]' x[1] reveal_type(x) # E: Revealed type is 'Union[builtins.int, builtins.str, builtins.list[Any]]' if isinstance(x, (str, (list,))): reveal_type(x) # E: Revealed type is 'Union[builtins.str, builtins.list[Any]]' x[1] reveal_type(x) # E: Revealed type is 'Union[builtins.int, builtins.str, builtins.list[Any]]' [builtins fixtures/isinstancelist.pyi] [case testClassAttributeInitialization-skip] class A: x = None # type: int def __init__(self) -> None: self.y = None # type: int z = self.x w = self.y [case testAssignmentSubtypes-skip] from typing import Union def foo(x: Union[str, int]): if isinstance(x, int): x = 'a' x + 'a' # Works in the current code z = x # We probably want this to be of type str. y = [x] # But what type should this be? y[0] + 'a' # (1) Should this work? y + [1] # (2) Or this? z = 1 # Also, is this valid? x = None # type: int y = [x] [builtins fixtures/isinstancelist.pyi] [case testFunctionDefaultArgs] class A: pass class B(A): y = 1 x = A() def foo(x: A = B()): x.y # E: "A" has no attribute "y" [builtins fixtures/isinstance.pyi] [case testIsinstanceFancyConditionals] class A: pass class B(A): y = 1 x = A() if isinstance(x, B): x.y while isinstance(x, B): x.y while isinstance(x, B): x.y x = B() [builtins fixtures/isinstance.pyi] [case testSubtypingWithAny] class A: y = 1 class B(A): z = 1 def foo(): pass x = A() x = B() x.z x = foo() x.z # E: "A" has no attribute "z" x.y [case testSingleMultiAssignment-skip] x = 'a' (x, ) = ('a',) [case testUnionMultiAssignment] from typing import Union x = None # type: Union[int, str] x = 1 x = 'a' x + 1 # E: Unsupported operand types for + ("str" and "int") x = 1 (x, y) = ('a', 1) x + 1 # E: Unsupported operand types for + ("str" and "int") [builtins fixtures/isinstancelist.pyi] [case testUnionIfZigzag] from typing import Union def f(x: Union[int, str]) -> None: x = 1 if x: x = 'a' x = 1 x + 1 [builtins fixtures/isinstancelist.pyi] [case testTwoLoopsUnion] from typing import Union def foo() -> Union[int, str]: pass def bar() -> None: x = foo() if isinstance(x, int): return while bool(): x + 'a' while bool(): x = foo() if bool(): return x = 'a' x + 'a' [builtins fixtures/isinstancelist.pyi] [case testComplicatedBlocks] from typing import Union def foo() -> Union[int, str]: pass def bar() -> None: x = foo() if isinstance(x, int): return while bool(): x + 'a' while bool(): x = foo() if bool(): return x = 'a' x + 'a' x = foo() if isinstance(x, int): return while bool(): x + 'a' while bool(): x + 'a' # E: Unsupported operand types for + (likely involving Union) x = foo() if bool(): continue x = 'a' x = 'a' x + 'a' [builtins fixtures/isinstancelist.pyi] [case testUnionTryExcept] class A: y = A() class B(A): z = 1 x = A() x = B() x.z try: x.z x = A() x = B() x.z except: pass x.z # E: "A" has no attribute "z" [case testUnionTryExcept2] class A: y = A() class B(A): z = 1 x = A() try: x.z # E: "A" has no attribute "z" x = A() x = B() x.z except: x.z # E: "A" has no attribute "z" x = B() x.z else: x = B() x.z [case testUnionTryExcept3] class A: y = A() class B(A): z = 1 x = A() x = B() try: raise BaseException() x = A() except: pass x.z x = B() try: x = A() raise BaseException() except: pass x.z # E: "A" has no attribute "z" x = B() try: pass except: x = A() raise BaseException() x.z try: x = A() except: pass x.z # E: "A" has no attribute "z" x = B() try: pass except: x = A() x.z # E: "A" has no attribute "z" [builtins fixtures/exception.pyi] [case testUnionTryExcept4] class A: pass class B(A): z = 1 x = A() while bool(): try: x.z # E: "A" has no attribute "z" x = A() except: x = B() else: x = B() x.z [builtins fixtures/exception.pyi] [case testUnionTryFinally] class A: pass class B(A): b = 1 x = A() x = B() try: x = A() x.b # E: "A" has no attribute "b" x = B() finally: x.b # E: "A" has no attribute "b" x.b [case testUnionTryFinally2] class A: pass class B(A): b = 1 x = A() x = B() try: x = A() x = B() except: pass finally: pass x.b # E: "A" has no attribute "b" [case testUnionTryFinally3] class A: pass class B(A): b = 1 x = A() x = B() try: x = A() x = B() except: pass finally: x = B() x.b [case testUnionTryFinally4] class A: pass class B(A): b = 1 while 2: x = A() x = B() try: x = A() x = B() except: pass finally: x.b # E: "A" has no attribute "b" if not isinstance(x, B): break x.b [builtins fixtures/isinstancelist.pyi] [case testUnionTryFinally5] class A: pass class B(A): b = 1 while 2: x = A() try: x = A() x = B() finally: x.b # E: "A" has no attribute "b" break x.b x.b [case testUnionTryFinally6] class A: pass class B(A): b = 1 def f() -> int: x = B() # type: A try: x = B() except: x = A() # An exception could occur here x = B() finally: return x.b # E: "A" has no attribute "b" [case testUnionListIsinstance] from typing import Union, List def f(x: Union[List[int], List[str], int]) -> None: if isinstance(x, list): a = x[0] if isinstance(a, int): a + 1 a + 'x' # E: Unsupported operand types for + ("int" and "str") # type of a? reveal_type(x) # E: Revealed type is 'Union[builtins.list[builtins.int], builtins.list[builtins.str]]' x + 1 # E: Unsupported operand types for + (likely involving Union) else: x[0] # E: Value of type "int" is not indexable x + 1 x[0] # E: Value of type "Union[List[int], List[str], int]" is not indexable x + 1 # E: Unsupported operand types for + (likely involving Union) [builtins fixtures/isinstancelist.pyi] [case testUnionListIsinstance2] from typing import Union, List class A: a = 1 class B: pass class C: pass def g(x: Union[A, B]) -> A: pass def h(x: C) -> A: pass def f(x: Union[A, B, C]) -> None: if isinstance(x, C): x = h(x) else: x = g(x) x.a [builtins fixtures/isinstancelist.pyi] [case testUnionStrictDefnBasic] from typing import Union def foo() -> Union[int, str]: pass x = foo() x = 1 x = x + 1 x = foo() x = x + 1 # E: Unsupported operand types for + (likely involving Union) if isinstance(x, str): x = x + 1 # E: Unsupported operand types for + ("str" and "int") x = 1 x = x + 1 [builtins fixtures/isinstancelist.pyi] [case testSubtypeRedefinitionBasic] from typing import Union class A: pass class B(A): y = 1 x = A() x.y # E: "A" has no attribute "y" x = B() x.y # OK: x is known to be a B [builtins fixtures/isinstancelist.pyi] [case testIsInstanceBasic] from typing import Union x = None # type: Union[int, str] if isinstance(x, str): x = x + 1 # E: Unsupported operand types for + ("str" and "int") x = x + 'a' else: x = x + 'a' # E: Unsupported operand types for + ("int" and "str") x = x + 1 [builtins fixtures/isinstancelist.pyi] [case testIsInstanceIndexing] from typing import Union x = None # type: Union[int, str] j = [x] if isinstance(j[0], str): j[0] = j[0] + 'a' j[0] = j[0] + 1 # E: Unsupported operand types for + ("str" and "int") else: j[0] = j[0] + 'a' # E: Unsupported operand types for + ("int" and "str") j[0] = j[0] + 1 [builtins fixtures/isinstancelist.pyi] [case testIsInstanceSubClassMember] from typing import Union class Animal: pass class Dog(Animal): paws = 4 # type: Union[int, str] def bark(self): pass class House: pet = None # type: Animal h = House() h.pet = Dog() while bool(): if isinstance(h.pet, Dog): if isinstance(h.pet.paws, str): x = h.pet.paws + 'a' y = h.pet.paws + 1 # E: Unsupported operand types for + (likely involving Union) z = h.pet.paws + 'a' # E: Unsupported operand types for + (likely involving Union) if isinstance(h.pet.paws, str): x = h.pet.paws + 'a' break y = h.pet.paws + 1 z = h.pet.paws + 'a' # E: Unsupported operand types for + ("int" and "str") [builtins fixtures/isinstancelist.pyi] [case testIsInstanceSubClassReset] class A: pass class B(A): b = 1 class C: a = A() x = C() x.a.b # E: "A" has no attribute "b" if isinstance(x.a, B): x.a.b x = C() x.a.b # E: "A" has no attribute "b" [builtins fixtures/isinstance.pyi] [case testIsinstanceTuple] from typing import Union class A: pass class B: def method2(self, arg: int): return 123 class C: def method2(self, arg: int): return 456 def method3(self, arg: str): return 'abc' v = A() # type: Union[A, B, C] if isinstance(v, (B, C)): v.method2(123) v.method3('xyz') # E: Item "B" of "Union[B, C]" has no attribute "method3" [builtins fixtures/isinstance.pyi] [case testIsinstanceNeverWidens] from typing import Union class A: pass class B: pass class C: pass a = A() # type: A assert isinstance(a, (A, B)) reveal_type(a) # E: Revealed type is '__main__.A' b = A() # type: Union[A, B] assert isinstance(b, (A, B, C)) reveal_type(b) # E: Revealed type is 'Union[__main__.A, __main__.B]' [builtins fixtures/isinstance.pyi] [case testMemberAssignmentChanges-skip] from typing import Union class Dog: paws = 1 # type: Union[int, str] pet = Dog() pet.paws + 'a' # E: moo pet.paws = 'a' pet.paws + 'a' pet.paws = 1 pet.paws + 1 [builtins fixtures/isinstancelist.pyi] [case testIsInstanceSubClassMemberHard-skip] from typing import Union class Animal: pass class Dog(Animal): paws = 4 # type: Union[int, str] def bark(self): pass class House: pet = None # type: Animal h = House() h.pet = Dog() if isinstance(h.pet, Dog): if isinstance(h.pet.paws, str): for i in [1]: h.pet.paws + 'a' if bool(): break h.pet.paws = 1 h.pet.paws + 1 if isinstance(h.pet.paws, str): h.pet.paws + 'a' else: h.pet.paws + 1 [builtins fixtures/isinstancelist.pyi] [case testIsInstanceReturn] from typing import Union def foo() -> None: x = 1 # type: Union[int, str] if isinstance(x, int): return y = x + 'asdad' def bar() -> None: x = 1 # type: Union[int, str] if isinstance(x, int): return else: pass y = x + 'asdad' foo() [builtins fixtures/isinstancelist.pyi] [case testIsInstanceBadBreak] from typing import Union def foo() -> None: x = None # type: Union[int, str] if isinstance(x, int): for z in [1,2]: break else: pass y = x + 'asdad' # E: Unsupported operand types for + (likely involving Union) foo() [builtins fixtures/isinstancelist.pyi] [case testIsInstanceThreeUnion] from typing import Union, List x = None # type: Union[int, str, List[int]] while bool(): if isinstance(x, int): x + 1 elif isinstance(x, str): x + 'a' else: x + [1] x + 'a' # E: Unsupported operand types for + (likely involving Union) x + [1] # E: Unsupported operand types for + (likely involving Union) [builtins fixtures/isinstancelist.pyi] [case testIsInstanceThreeUnion2] from typing import Union, List x = None # type: Union[int, str, List[int]] while bool(): if isinstance(x, int): x + 1 break elif isinstance(x, str): x + 'a' break x + [1] x + 'a' # E: Unsupported operand types for + ("List[int]" and "str") x + [1] # E: Unsupported operand types for + (likely involving Union) [builtins fixtures/isinstancelist.pyi] [case testIsInstanceThreeUnion3] from typing import Union, List while bool(): x = None # type: Union[int, str, List[int]] x = 1 if isinstance(x, int): x + 1 break elif isinstance(x, str): x + 'a' break x + [1] # These lines aren't reached because x was an int x + 'a' x + [1] # E: Unsupported operand types for + (likely involving Union) [builtins fixtures/isinstancelist.pyi] [case testRemovingTypeRepeatedly] from typing import Union def foo() -> Union[int, str]: pass for i in [1, 2]: x = foo() x + 'a' # E: Unsupported operand types for + (likely involving Union) if isinstance(x, int): break x + 'a' x = foo() x + 'a' # E: Unsupported operand types for + (likely involving Union) if isinstance(x, int): break x + 'a' x = foo() x + 'a' # E: Unsupported operand types for + (likely involving Union) if isinstance(x, int): break x + 'a' x + 'a' # E: Unsupported operand types for + (likely involving Union) [builtins fixtures/isinstancelist.pyi] [case testModifyRepeatedly] from typing import Union def foo() -> Union[int, str]: pass x = foo() x + 1 # E: Unsupported operand types for + (likely involving Union) x + 'a' # E: Unsupported operand types for + (likely involving Union) x = 1 x + 1 x + 'a' # E: Unsupported operand types for + ("int" and "str") x = 'a' x + 1 # E: Unsupported operand types for + ("str" and "int") x + 'a' x = foo() x + 1 # E: Unsupported operand types for + (likely involving Union) x + 'a' # E: Unsupported operand types for + (likely involving Union) [builtins fixtures/isinstancelist.pyi] [case testModifyLoop] from typing import Union def foo() -> Union[int, str]: pass x = foo() x + 1 # E: Unsupported operand types for + (likely involving Union) x = 'a' x + 1 # E: Unsupported operand types for + ("str" and "int") x = 1 x + 1 while bool(): x + 1 # E: Unsupported operand types for + (likely involving Union) x = 'a' [builtins fixtures/isinstancelist.pyi] [case testModifyLoop2] from typing import Union def foo() -> Union[int, str]: pass x = foo() x + 1 # E: Unsupported operand types for + (likely involving Union) x = 'a' x + 1 # E: Unsupported operand types for + ("str" and "int") x = 1 x + 1 for i in [1]: x = 'a' x + 1 # E: Unsupported operand types for + (likely involving Union) [builtins fixtures/isinstancelist.pyi] [case testModifyLoop3] from typing import Union def foo() -> Union[int, str]: pass x = foo() x = 1 while bool(): x + 1 x = 'a' break else: x + 1 x + 1 # E: Unsupported operand types for + (likely involving Union) x = 1 for y in [1]: x + 1 x = 'a' break else: x + 1 x + 1 # E: Unsupported operand types for + (likely involving Union) [builtins fixtures/isinstancelist.pyi] [case testModifyLoopWhile4] from typing import Union def foo() -> Union[int, str]: pass x = foo() x = 1 while bool(): x + 1 if bool(): x = 'a' break else: x + 1 x = 'a' x + 'a' x = 1 while bool(): x + 1 # E: Unsupported operand types for + (likely involving Union) if bool(): x = 'a' continue else: x + 1 # E: Unsupported operand types for + (likely involving Union) x = 'a' x + 'a' [builtins fixtures/isinstancelist.pyi] [case testModifyLoopFor4] from typing import Union def foo() -> Union[int, str]: pass x = foo() x = 1 for y in [1]: x + 1 if bool(): x = 'a' break else: x + 1 x = 'a' x + 'a' x = 1 for y in [1]: x + 1 # E: Unsupported operand types for + (likely involving Union) if bool(): x = 'a' continue else: x + 1 # E: Unsupported operand types for + (likely involving Union) x = 'a' x + 'a' [builtins fixtures/isinstancelist.pyi] [case testModifyNestedLoop] from typing import Union def foo() -> Union[int, str]: pass x = foo() x = 1 for y in [1]: for z in [1]: break else: x = 'a' break else: x + 1 x + 1 # E: Unsupported operand types for + (likely involving Union) x = 1 while bool(): while bool(): break else: x = 'a' break else: x + 1 x + 1 # E: Unsupported operand types for + (likely involving Union) [builtins fixtures/isinstancelist.pyi] [case testModifyLoopLong] from typing import Union class A: a = 1 def foo() -> Union[int, str, A]: pass def bar() -> None: x = foo() x + 1 # E: Unsupported left operand type for + (some union) \ # E: Unsupported operand types for + (likely involving Union) if isinstance(x, A): x.a else: if isinstance(x, int): x + 1 x + 'a' # E: Unsupported operand types for + ("int" and "str") else: x + 'a' x.a # E: "str" has no attribute "a" x = A() if isinstance(x, str): x + 'a' else: while bool(): if isinstance(x, int): x + 1 else: x.a break while bool(): if isinstance(x, int): x + 1 else: x.a continue while bool(): if isinstance(x, int): x + 1 else: x.a # E: Item "str" of "Union[str, A]" has no attribute "a" x = 'a' [builtins fixtures/isinstancelist.pyi] [case testWhileExitCondition1] from typing import Union x = 1 # type: Union[int, str] while isinstance(x, int): if bool(): continue x = 'a' else: reveal_type(x) # E: Revealed type is 'builtins.str' reveal_type(x) # E: Revealed type is 'builtins.str' [builtins fixtures/isinstance.pyi] [case testWhileExitCondition2] from typing import Union x = 1 # type: Union[int, str] while isinstance(x, int): if bool(): break x = 'a' else: reveal_type(x) # E: Revealed type is 'builtins.str' reveal_type(x) # E: Revealed type is 'Union[builtins.int, builtins.str]' [builtins fixtures/isinstance.pyi] [case testWhileLinkedList] from typing import Union LinkedList = Union['Cons', 'Nil'] class Nil: pass class Cons: tail = None # type: LinkedList def last(x: LinkedList) -> Nil: while isinstance(x, Cons): x = x.tail return x [builtins fixtures/isinstance.pyi] [case testReturnAndFlow] def foo() -> int: return 1 and 2 return 'a' [case testCastIsinstance] from typing import Union def foo() -> Union[int, str]: pass x = foo() y = 1 # type: int if isinstance(x, str): x = y x + 1 x + 'a' # E: Unsupported operand types for + ("int" and "str") [builtins fixtures/isinstancelist.pyi] [case testUnreachableCode] x = 1 # type: int while bool(): x = 'a' # E: Incompatible types in assignment (expression has type "str", variable has type "int") break x = 'a' # Note: no error because unreachable code [builtins fixtures/isinstancelist.pyi] [case testUnreachableCode2] x = 1 while bool(): try: pass except: continue else: continue x + 'a' [builtins fixtures/isinstance.pyi] [case testUnreachableWhileTrue] def f(x: int) -> None: while True: if x: return 1() [builtins fixtures/bool.pyi] [case testUnreachableAssertFalse] def f() -> None: assert False 1() [builtins fixtures/bool.pyi] [case testUnreachableAssertFalse2] def f() -> None: # The old parser doesn't understand the syntax below assert False, "hi" 1() [builtins fixtures/bool.pyi] [case testUnreachableReturnOrAssertFalse] def f(x: int) -> int: if x: return x else: assert False 1() [builtins fixtures/bool.pyi] [case testUnreachableTryExcept] def f() -> None: try: f() return except BaseException: return 1() [builtins fixtures/exception.pyi] [case testUnreachableTryExceptElse] def f() -> None: try: f() except BaseException: return else: return 1() [builtins fixtures/exception.pyi] [case testUnreachableTryReturnFinally1] def f() -> None: try: return finally: pass 1() [case testUnreachableTryReturnFinally2] def f() -> None: try: pass finally: return 1() [case testUnreachableTryReturnExceptRaise] def f() -> None: try: return except: raise 1() [case testUnreachableReturnLambda] from typing import Callable def g(t: Callable[[int], int]) -> int: pass def f() -> int: return g(lambda x: x) 1() [case testIsinstanceAnd] class A: pass class B(A): flag = 1 x = B() # type: A if isinstance(x, B) and 1: x.flag [builtins fixtures/isinstancelist.pyi] [case testIsinstanceShortcircuit] class A: pass class B(A): flag = 1 x = B() # type: A if isinstance(x, B) and x.flag: pass if isinstance(x, B) or x.flag: # E: "A" has no attribute "flag" pass if not isinstance(x, B) or x.flag: pass if not isinstance(x, B) and x.flag: # E: "A" has no attribute "flag" pass [builtins fixtures/isinstancelist.pyi] [case testIsinstanceExpression] class A: pass class B(A): flag = 1 x = B() # type: A x.flag if isinstance(x, B) else 0 0 if not isinstance(x, B) else x.flag 0 if isinstance(x, B) else x.flag # E: "A" has no attribute "flag" [builtins fixtures/isinstancelist.pyi] [case testIsinstanceMultiAnd] class A: pass class B(A): flag = 1 class C(A): glaf = 1 x = B() # type: A y = C() # type: A if isinstance(x, B) and isinstance(y, C): x.flag += 1 y.glaf += 1 x() # E: "B" not callable y() # E: "C" not callable else: x() # E: "A" not callable y() # E: "A" not callable [builtins fixtures/isinstancelist.pyi] [case testIsinstanceMultiAndSpecialCase] class A: pass class B(A): flag = 1 class C(A): glaf = 1 x = B() # type: A y = C() # type: A if isinstance(x, B) and isinstance(y, int): 1() # type checking skipped if isinstance(y, int) and isinstance(x, B): 1() # type checking skipped if isinstance(y, int) and y > 42: 1() # type checking skipped [builtins fixtures/isinstancelist.pyi] [case testReturnWithCallExprAndIsinstance] from typing import Union def f(x: Union[int, str]) -> None: if not isinstance(x, int): return foo() x() # E: "int" not callable def foo(): pass [builtins fixtures/isinstancelist.pyi] [case testIsinstanceOr1] from typing import Optional def f(a: bool, x: object) -> Optional[int]: if a or not isinstance(x, int): return None reveal_type(x) # E: Revealed type is 'builtins.int' return x [builtins fixtures/isinstance.pyi] [case testIsinstanceOr2] from typing import Optional def g(a: bool, x: object) -> Optional[int]: if not isinstance(x, int) or a: return None reveal_type(x) # E: Revealed type is 'builtins.int' return x [builtins fixtures/isinstance.pyi] [case testIsinstanceOr3] from typing import Optional def h(a: bool, x: object) -> Optional[int]: if a or isinstance(x, int): return None return x # E: Incompatible return value type (got "object", expected "Optional[int]") [builtins fixtures/isinstance.pyi] [case testIsinstanceWithOverlappingUnionType] from typing import Union def f(x: Union[float, int]) -> None: if isinstance(x, float): pass if not isinstance(x, int): f(x) [builtins fixtures/isinstance.pyi] [case testIsinstanceWithOverlappingUnionType2] from typing import Union class A: pass class B(A): pass def f(x: Union[A, B]) -> None: if isinstance(x, A): pass if not isinstance(x, B): f(x) [builtins fixtures/isinstance.pyi] [case testIsinstanceOfSuperclass] class A: pass class B(A): pass x = B() if isinstance(x, A): reveal_type(x) # E: Revealed type is '__main__.B' if not isinstance(x, A): reveal_type(x) # unreachable x = A() reveal_type(x) # E: Revealed type is '__main__.B' [builtins fixtures/isinstance.pyi] [case testIsinstanceOfNonoverlapping] class A: pass class B: pass x = B() if isinstance(x, A): reveal_type(x) # unreachable else: reveal_type(x) # E: Revealed type is '__main__.B' reveal_type(x) # E: Revealed type is '__main__.B' [builtins fixtures/isinstance.pyi] [case testAssertIsinstance] def f(x: object): assert isinstance(x, int) y = 0 # type: int y = x [builtins fixtures/isinstance.pyi] [case testUnionAssertIsinstance] from typing import Union def f(x: Union[str, int]): assert isinstance(x, int) y = 0 # type: int y = x [builtins fixtures/isinstance.pyi] [case testAnyAssertIsinstance] from typing import Any def f(x: Any): assert isinstance(x, int) # this should narrow x to type int x + "foo" # E: Unsupported operand types for + ("int" and "str") [builtins fixtures/isinstance.pyi] [case testIsinstanceOfGenericClassRetainsParameters] from typing import List, Union def f(x: Union[List[int], str]) -> None: if isinstance(x, list): x[0]() # E: "int" not callable else: reveal_type(x) # E: Revealed type is 'builtins.str' reveal_type(x) # E: Revealed type is 'Union[builtins.list[builtins.int], builtins.str]' [builtins fixtures/isinstancelist.pyi] [case testIsinstanceOrIsinstance] class A: pass class B(A): flag = 1 class C(A): flag = 2 x1 = A() if isinstance(x1, B) or isinstance(x1, C): reveal_type(x1) # E: Revealed type is 'Union[__main__.B, __main__.C]' f = x1.flag # type: int else: reveal_type(x1) # E: Revealed type is '__main__.A' f = 0 reveal_type(x1) # E: Revealed type is '__main__.A' x2 = A() if isinstance(x2, A) or isinstance(x2, C): reveal_type(x2) # E: Revealed type is '__main__.A' f = x2.flag # E: "A" has no attribute "flag" else: # unreachable 1() reveal_type(x2) # E: Revealed type is '__main__.A' [builtins fixtures/isinstance.pyi] [case testComprehensionIsInstance] from typing import List, Union a = [] # type: List[Union[int, str]] l = [x for x in a if isinstance(x, int)] g = (x for x in a if isinstance(x, int)) d = {0: x for x in a if isinstance(x, int)} reveal_type(l) # E: Revealed type is 'builtins.list[builtins.int*]' reveal_type(g) # E: Revealed type is 'typing.Iterator[builtins.int*]' reveal_type(d) # E: Revealed type is 'builtins.dict[builtins.int*, builtins.int*]' [builtins fixtures/isinstancelist.pyi] [case testIsinstanceInWrongOrderInBooleanOp] class A: m = 1 def f(x: object) -> None: if x.m and isinstance(x, A) or False: # E: "object" has no attribute "m" pass [builtins fixtures/isinstance.pyi] [case testIsinstanceAndOr] class A: a = None # type: A def f(x: object) -> None: b = isinstance(x, A) and x.a or A() reveal_type(b) # E: Revealed type is '__main__.A' [builtins fixtures/isinstance.pyi] [case testIsInstanceWithUnknownType] from typing import Union def f(x: Union[int, str], typ: type) -> None: if isinstance(x, (typ, int)): x + 1 # E: Unsupported operand types for + (likely involving Union) reveal_type(x) # E: Revealed type is 'Union[builtins.int, builtins.str]' else: reveal_type(x) # E: Revealed type is 'builtins.str' reveal_type(x) # E: Revealed type is 'Union[builtins.int, builtins.str]' [builtins fixtures/isinstancelist.pyi] [case testIsInstanceWithBoundedType] from typing import Union, Type class A: pass def f(x: Union[int, A], a: Type[A]) -> None: if isinstance(x, (a, int)): reveal_type(x) # E: Revealed type is 'Union[builtins.int, __main__.A]' else: reveal_type(x) # E: Revealed type is '__main__.A' reveal_type(x) # E: Revealed type is 'Union[builtins.int, __main__.A]' [builtins fixtures/isinstancelist.pyi] [case testIsInstanceWithEmtpy2ndArg] from typing import Union def f(x: Union[int, str]) -> None: if isinstance(x, ()): reveal_type(x) # E: Revealed type is 'Union[builtins.int, builtins.str]' else: reveal_type(x) # E: Revealed type is 'Union[builtins.int, builtins.str]' [builtins fixtures/isinstancelist.pyi] [case testIsInstanceWithTypeObject] from typing import Union, Type class A: pass def f(x: Union[int, A], a: Type[A]) -> None: if isinstance(x, a): reveal_type(x) # E: Revealed type is '__main__.A' elif isinstance(x, int): reveal_type(x) # E: Revealed type is 'builtins.int' else: reveal_type(x) # E: Revealed type is '__main__.A' reveal_type(x) # E: Revealed type is 'Union[builtins.int, __main__.A]' [builtins fixtures/isinstancelist.pyi] [case testIssubclassUnreachable] from typing import Type, Sequence, Union x: Type[str] if issubclass(x, int): reveal_type(x) # unreachable block class X: pass class Y(X): pass class Z(X): pass a: Union[Type[Y], Type[Z]] if issubclass(a, X): reveal_type(a) # E: Revealed type is 'Union[Type[__main__.Y], Type[__main__.Z]]' else: reveal_type(a) # unreachable block [builtins fixtures/isinstancelist.pyi] [case testIssubclasDestructuringUnions1] from typing import Union, List, Tuple, Dict, Type def f(x: Union[Type[int], Type[str], Type[List]]) -> None: if issubclass(x, (str, (int,))): reveal_type(x) # E: Revealed type is 'Union[Type[builtins.int], Type[builtins.str]]' reveal_type(x()) # E: Revealed type is 'Union[builtins.int, builtins.str]' x()[1] # E: Value of type "Union[int, str]" is not indexable else: reveal_type(x) # E: Revealed type is 'Type[builtins.list[Any]]' reveal_type(x()) # E: Revealed type is 'builtins.list[Any]' x()[1] reveal_type(x) # E: Revealed type is 'Union[Type[builtins.int], Type[builtins.str], Type[builtins.list[Any]]]' reveal_type(x()) # E: Revealed type is 'Union[builtins.int, builtins.str, builtins.list[Any]]' if issubclass(x, (str, (list,))): reveal_type(x) # E: Revealed type is 'Union[Type[builtins.str], Type[builtins.list[Any]]]' reveal_type(x()) # E: Revealed type is 'Union[builtins.str, builtins.list[Any]]' x()[1] reveal_type(x) # E: Revealed type is 'Union[Type[builtins.int], Type[builtins.str], Type[builtins.list[Any]]]' reveal_type(x()) # E: Revealed type is 'Union[builtins.int, builtins.str, builtins.list[Any]]' [builtins fixtures/isinstancelist.pyi] [case testIssubclasDestructuringUnions2] from typing import Union, List, Tuple, Dict, Type def f(x: Type[Union[int, str, List]]) -> None: if issubclass(x, (str, (int,))): reveal_type(x) # E: Revealed type is 'Union[Type[builtins.int], Type[builtins.str]]' reveal_type(x()) # E: Revealed type is 'Union[builtins.int, builtins.str]' x()[1] # E: Value of type "Union[int, str]" is not indexable else: reveal_type(x) # E: Revealed type is 'Type[builtins.list[Any]]' reveal_type(x()) # E: Revealed type is 'builtins.list[Any]' x()[1] reveal_type(x) # E: Revealed type is 'Union[Type[builtins.int], Type[builtins.str], Type[builtins.list[Any]]]' reveal_type(x()) # E: Revealed type is 'Union[builtins.int, builtins.str, builtins.list[Any]]' if issubclass(x, (str, (list,))): reveal_type(x) # E: Revealed type is 'Union[Type[builtins.str], Type[builtins.list[Any]]]' reveal_type(x()) # E: Revealed type is 'Union[builtins.str, builtins.list[Any]]' x()[1] reveal_type(x) # E: Revealed type is 'Union[Type[builtins.int], Type[builtins.str], Type[builtins.list[Any]]]' reveal_type(x()) # E: Revealed type is 'Union[builtins.int, builtins.str, builtins.list[Any]]' [builtins fixtures/isinstancelist.pyi] [case testIssubclasDestructuringUnions3] from typing import Union, List, Tuple, Dict, Type def f(x: Type[Union[int, str, List]]) -> None: reveal_type(x) # E: Revealed type is 'Union[Type[builtins.int], Type[builtins.str], Type[builtins.list[Any]]]' reveal_type(x()) # E: Revealed type is 'Union[builtins.int, builtins.str, builtins.list[Any]]' if issubclass(x, (str, (int,))): reveal_type(x) # E: Revealed type is 'Union[Type[builtins.int], Type[builtins.str]]' reveal_type(x()) # E: Revealed type is 'Union[builtins.int, builtins.str]' x()[1] # E: Value of type "Union[int, str]" is not indexable else: reveal_type(x) # E: Revealed type is 'Type[builtins.list[Any]]' reveal_type(x()) # E: Revealed type is 'builtins.list[Any]' x()[1] reveal_type(x) # E: Revealed type is 'Union[Type[builtins.int], Type[builtins.str], Type[builtins.list[Any]]]' reveal_type(x()) # E: Revealed type is 'Union[builtins.int, builtins.str, builtins.list[Any]]' if issubclass(x, (str, (list,))): reveal_type(x) # E: Revealed type is 'Union[Type[builtins.str], Type[builtins.list[Any]]]' reveal_type(x()) # E: Revealed type is 'Union[builtins.str, builtins.list[Any]]' x()[1] reveal_type(x) # E: Revealed type is 'Union[Type[builtins.int], Type[builtins.str], Type[builtins.list[Any]]]' reveal_type(x()) # E: Revealed type is 'Union[builtins.int, builtins.str, builtins.list[Any]]' [builtins fixtures/isinstancelist.pyi] [case testIssubclass] from typing import Type, ClassVar class Goblin: level: int class GoblinAmbusher(Goblin): job: ClassVar[str] = 'Ranger' def test_issubclass(cls: Type[Goblin]) -> None: if issubclass(cls, GoblinAmbusher): reveal_type(cls) # E: Revealed type is 'Type[__main__.GoblinAmbusher]' cls.level cls.job ga = cls() ga.level = 15 ga.job ga.job = "Warrior" # E: Cannot assign to class variable "job" via instance else: reveal_type(cls) # E: Revealed type is 'Type[__main__.Goblin]' cls.level cls.job # E: "Type[Goblin]" has no attribute "job" g = cls() g.level = 15 g.job # E: "Goblin" has no attribute "job" [builtins fixtures/isinstancelist.pyi] [case testIssubclassDeepHierarchy] from typing import Type, ClassVar class Mob: pass class Goblin(Mob): level: int class GoblinAmbusher(Goblin): job: ClassVar[str] = 'Ranger' def test_issubclass(cls: Type[Mob]) -> None: if issubclass(cls, Goblin): reveal_type(cls) # E: Revealed type is 'Type[__main__.Goblin]' cls.level cls.job # E: "Type[Goblin]" has no attribute "job" g = cls() g.level = 15 g.job # E: "Goblin" has no attribute "job" if issubclass(cls, GoblinAmbusher): reveal_type(cls) # E: Revealed type is 'Type[__main__.GoblinAmbusher]' cls.level cls.job g = cls() g.level = 15 g.job g.job = 'Warrior' # E: Cannot assign to class variable "job" via instance else: reveal_type(cls) # E: Revealed type is 'Type[__main__.Mob]' cls.job # E: "Type[Mob]" has no attribute "job" cls.level # E: "Type[Mob]" has no attribute "level" m = cls() m.level = 15 # E: "Mob" has no attribute "level" m.job # E: "Mob" has no attribute "job" if issubclass(cls, GoblinAmbusher): reveal_type(cls) # E: Revealed type is 'Type[__main__.GoblinAmbusher]' cls.job cls.level ga = cls() ga.level = 15 ga.job ga.job = 'Warrior' # E: Cannot assign to class variable "job" via instance if issubclass(cls, GoblinAmbusher): reveal_type(cls) # E: Revealed type is 'Type[__main__.GoblinAmbusher]' cls.level cls.job ga = cls() ga.level = 15 ga.job ga.job = "Warrior" # E: Cannot assign to class variable "job" via instance [builtins fixtures/isinstancelist.pyi] [case testIssubclassTuple] from typing import Type, ClassVar class Mob: pass class Goblin(Mob): level: int class GoblinAmbusher(Goblin): job: ClassVar[str] = 'Ranger' class GoblinDigger(Goblin): job: ClassVar[str] = 'Thief' def test_issubclass(cls: Type[Mob]) -> None: if issubclass(cls, (Goblin, GoblinAmbusher)): reveal_type(cls) # E: Revealed type is 'Type[__main__.Goblin]' cls.level cls.job # E: "Type[Goblin]" has no attribute "job" g = cls() g.level = 15 g.job # E: "Goblin" has no attribute "job" if issubclass(cls, GoblinAmbusher): cls.level reveal_type(cls) # E: Revealed type is 'Type[__main__.GoblinAmbusher]' cls.job ga = cls() ga.level = 15 ga.job ga.job = "Warrior" # E: Cannot assign to class variable "job" via instance else: reveal_type(cls) # E: Revealed type is 'Type[__main__.Mob]' cls.job # E: "Type[Mob]" has no attribute "job" cls.level # E: "Type[Mob]" has no attribute "level" m = cls() m.level = 15 # E: "Mob" has no attribute "level" m.job # E: "Mob" has no attribute "job" if issubclass(cls, GoblinAmbusher): reveal_type(cls) # E: Revealed type is 'Type[__main__.GoblinAmbusher]' cls.job cls.level ga = cls() ga.level = 15 ga.job ga.job = "Warrior" # E: Cannot assign to class variable "job" via instance if issubclass(cls, (GoblinDigger, GoblinAmbusher)): reveal_type(cls) # E: Revealed type is 'Union[Type[__main__.GoblinDigger], Type[__main__.GoblinAmbusher]]' cls.level cls.job g = cls() g.level = 15 g.job g.job = "Warrior" # E: Cannot assign to class variable "job" via instance [builtins fixtures/isinstancelist.pyi] [case testIssubclassBuiltins] from typing import List, Type class MyList(List): pass class MyIntList(List[int]): pass def f(cls: Type[object]) -> None: if issubclass(cls, MyList): reveal_type(cls) # E: Revealed type is 'Type[__main__.MyList]' cls()[0] else: reveal_type(cls) # E: Revealed type is 'Type[builtins.object]' cls()[0] # E: Value of type "object" is not indexable if issubclass(cls, MyIntList): reveal_type(cls) # E: Revealed type is 'Type[__main__.MyIntList]' cls()[0] + 1 [builtins fixtures/isinstancelist.pyi] [case testIsinstanceTypeArgs] from typing import Iterable, TypeVar x = 1 T = TypeVar('T') isinstance(x, Iterable) isinstance(x, Iterable[int]) # E: Parameterized generics cannot be used with class or instance checks isinstance(x, Iterable[T]) # E: Parameterized generics cannot be used with class or instance checks isinstance(x, (int, Iterable[int])) # E: Parameterized generics cannot be used with class or instance checks isinstance(x, (int, (str, Iterable[int]))) # E: Parameterized generics cannot be used with class or instance checks [builtins fixtures/isinstancelist.pyi] [case testIsinstanceTypeArgsAliases] from typing import Iterable, TypeVar x = 1 T = TypeVar('T') It = Iterable It2 = Iterable[T] isinstance(x, It[int]) # E: Parameterized generics cannot be used with class or instance checks isinstance(x, It) isinstance(x, It2[int]) # E: Parameterized generics cannot be used with class or instance checks isinstance(x, It2) # E: Parameterized generics cannot be used with class or instance checks [builtins fixtures/isinstance.pyi] [case testIssubclassTypeArgs] from typing import Iterable, TypeVar x = int T = TypeVar('T') issubclass(x, Iterable) issubclass(x, Iterable[int]) # E: Parameterized generics cannot be used with class or instance checks issubclass(x, Iterable[T]) # E: Parameterized generics cannot be used with class or instance checks issubclass(x, (int, Iterable[int])) # E: Parameterized generics cannot be used with class or instance checks [builtins fixtures/isinstance.pyi] [case testIsinstanceAndNarrowTypeVariable] from typing import TypeVar class A: pass class B(A): pass T = TypeVar('T', bound=A) def f(x: T) -> None: if isinstance(x, B): reveal_type(x) # E: Revealed type is '__main__.B' else: reveal_type(x) # E: Revealed type is 'T`-1' reveal_type(x) # E: Revealed type is 'T`-1' [builtins fixtures/isinstance.pyi] [case testIsinstanceAndTypeType] from typing import Type def f(x: Type[int]) -> None: if isinstance(x, type): reveal_type(x) # E: Revealed type is 'Type[builtins.int]' else: reveal_type(x) # Unreachable reveal_type(x) # E: Revealed type is 'Type[builtins.int]' [builtins fixtures/isinstance.pyi] [case testIsinstanceVariableSubstitution] T = (int, str) U = (list, T) x: object = None if isinstance(x, T): reveal_type(x) # E: Revealed type is 'Union[builtins.int, builtins.str]' if isinstance(x, U): reveal_type(x) # E: Revealed type is 'Union[builtins.list[Any], builtins.int, builtins.str]' if isinstance(x, (set, (list, T))): reveal_type(x) # E: Revealed type is 'Union[builtins.set[Any], builtins.list[Any], builtins.int, builtins.str]' [builtins fixtures/isinstancelist.pyi] [case testIsInstanceTooFewArgs] isinstance() # E: Too few arguments for "isinstance" x: object if isinstance(): # E: Too few arguments for "isinstance" x = 1 reveal_type(x) # E: Revealed type is 'builtins.int' if isinstance(x): # E: Too few arguments for "isinstance" x = 1 reveal_type(x) # E: Revealed type is 'builtins.int' [builtins fixtures/isinstancelist.pyi] [case testIsInstanceTooManyArgs] isinstance(1, 1, 1) # E: Too many arguments for "isinstance" \ # E: Argument 2 to "isinstance" has incompatible type "int"; expected "Union[type, Tuple[Any, ...]]" x: object if isinstance(x, str, 1): # E: Too many arguments for "isinstance" reveal_type(x) # E: Revealed type is 'builtins.object' x = 1 reveal_type(x) # E: Revealed type is 'builtins.int' [builtins fixtures/isinstancelist.pyi] [case testIsinstanceNarrowAny] from typing import Any def narrow_any_to_str_then_reassign_to_int() -> None: v = 1 # type: Any if isinstance(v, str): reveal_type(v) # E: Revealed type is 'builtins.str' v = 2 reveal_type(v) # E: Revealed type is 'Any' [builtins fixtures/isinstance.pyi] [case testNarrowTypeAfterInList] # flags: --strict-optional from typing import List, Optional x: List[int] y: Optional[int] if y in x: reveal_type(y) # E: Revealed type is 'builtins.int' else: reveal_type(y) # E: Revealed type is 'Union[builtins.int, builtins.None]' if y not in x: reveal_type(y) # E: Revealed type is 'Union[builtins.int, builtins.None]' else: reveal_type(y) # E: Revealed type is 'builtins.int' [builtins fixtures/list.pyi] [out] [case testNarrowTypeAfterInListOfOptional] # flags: --strict-optional from typing import List, Optional x: List[Optional[int]] y: Optional[int] if y not in x: reveal_type(y) # E: Revealed type is 'Union[builtins.int, builtins.None]' else: reveal_type(y) # E: Revealed type is 'Union[builtins.int, builtins.None]' [builtins fixtures/list.pyi] [out] [case testNarrowTypeAfterInListNonOverlapping] # flags: --strict-optional from typing import List, Optional x: List[str] y: Optional[int] if y in x: reveal_type(y) # E: Revealed type is 'Union[builtins.int, builtins.None]' else: reveal_type(y) # E: Revealed type is 'Union[builtins.int, builtins.None]' [builtins fixtures/list.pyi] [out] [case testNarrowTypeAfterInListNested] # flags: --strict-optional from typing import List, Optional, Any x: Optional[int] lst: Optional[List[int]] nested_any: List[List[Any]] if lst in nested_any: reveal_type(lst) # E: Revealed type is 'builtins.list[builtins.int]' if x in nested_any: reveal_type(x) # E: Revealed type is 'Union[builtins.int, builtins.None]' [builtins fixtures/list.pyi] [out] [case testNarrowTypeAfterInTuple] # flags: --strict-optional from typing import Optional class A: pass class B(A): pass class C(A): pass y: Optional[B] if y in (B(), C()): reveal_type(y) # E: Revealed type is '__main__.B' else: reveal_type(y) # E: Revealed type is 'Union[__main__.B, builtins.None]' [builtins fixtures/tuple.pyi] [out] [case testNarrowTypeAfterInNamedTuple] # flags: --strict-optional from typing import NamedTuple, Optional class NT(NamedTuple): x: int y: int nt: NT y: Optional[int] if y not in nt: reveal_type(y) # E: Revealed type is 'Union[builtins.int, builtins.None]' else: reveal_type(y) # E: Revealed type is 'builtins.int' [builtins fixtures/tuple.pyi] [out] [case testNarrowTypeAfterInDict] # flags: --strict-optional from typing import Dict, Optional x: Dict[str, int] y: Optional[str] if y in x: reveal_type(y) # E: Revealed type is 'builtins.str' else: reveal_type(y) # E: Revealed type is 'Union[builtins.str, builtins.None]' if y not in x: reveal_type(y) # E: Revealed type is 'Union[builtins.str, builtins.None]' else: reveal_type(y) # E: Revealed type is 'builtins.str' [builtins fixtures/dict.pyi] [out] [case testNarrowTypeAfterInList_python2] # flags: --strict-optional from typing import List, Optional x = [] # type: List[int] y = None # type: Optional[int] # TODO: Fix running tests on Python 2: "Iterator[int]" has no attribute "next" if y in x: # type: ignore reveal_type(y) # E: Revealed type is 'builtins.int' else: reveal_type(y) # E: Revealed type is 'Union[builtins.int, builtins.None]' if y not in x: # type: ignore reveal_type(y) # E: Revealed type is 'Union[builtins.int, builtins.None]' else: reveal_type(y) # E: Revealed type is 'builtins.int' [builtins_py2 fixtures/python2.pyi] [out] [case testNarrowTypeAfterInNoAnyOrObject] # flags: --strict-optional from typing import Any, List, Optional x: List[Any] z: List[object] y: Optional[int] if y in x: reveal_type(y) # E: Revealed type is 'Union[builtins.int, builtins.None]' else: reveal_type(y) # E: Revealed type is 'Union[builtins.int, builtins.None]' if y not in z: reveal_type(y) # E: Revealed type is 'Union[builtins.int, builtins.None]' else: reveal_type(y) # E: Revealed type is 'Union[builtins.int, builtins.None]' [typing fixtures/typing-full.pyi] [builtins fixtures/list.pyi] [out] [case testNarrowTypeAfterInUserDefined] # flags: --strict-optional from typing import Container, Optional class C(Container[int]): def __contains__(self, item: object) -> bool: return item is 'surprise' y: Optional[int] # We never trust user defined types if y in C(): reveal_type(y) # E: Revealed type is 'Union[builtins.int, builtins.None]' else: reveal_type(y) # E: Revealed type is 'Union[builtins.int, builtins.None]' if y not in C(): reveal_type(y) # E: Revealed type is 'Union[builtins.int, builtins.None]' else: reveal_type(y) # E: Revealed type is 'Union[builtins.int, builtins.None]' [typing fixtures/typing-full.pyi] [builtins fixtures/list.pyi] [out] [case testNarrowTypeAfterInSet] # flags: --strict-optional from typing import Optional, Set s: Set[str] y: Optional[str] if y in {'a', 'b', 'c'}: reveal_type(y) # E: Revealed type is 'builtins.str' else: reveal_type(y) # E: Revealed type is 'Union[builtins.str, builtins.None]' if y not in s: reveal_type(y) # E: Revealed type is 'Union[builtins.str, builtins.None]' else: reveal_type(y) # E: Revealed type is 'builtins.str' [builtins fixtures/set.pyi] [out] [case testNarrowTypeAfterInTypedDict] # flags: --strict-optional from typing import Optional from mypy_extensions import TypedDict class TD(TypedDict): a: int b: str td: TD def f() -> None: x: Optional[str] if x not in td: return reveal_type(x) # E: Revealed type is 'builtins.str' [typing fixtures/typing-full.pyi] [builtins fixtures/dict.pyi] [out] [case testIsinstanceWidensWithAnyArg] from typing import Any class A: ... B: Any x: A x.foo() # E: "A" has no attribute "foo" assert isinstance(x, B) x.foo() reveal_type(x) # E: Revealed type is 'Any' [builtins fixtures/isinstance.pyi] [case testIsinstanceWidensUnionWithAnyArg] from typing import Any, Union class A: ... B: Any x: Union[A, B] reveal_type(x) # E: Revealed type is 'Union[__main__.A, Any]' assert isinstance(x, B) reveal_type(x) # E: Revealed type is 'Any' [builtins fixtures/isinstance.pyi] [case testIsinstanceIgnoredImport] from typing import Union from foo import A # type: ignore def f(x: Union[A, str]) -> None: x.method_only_in_a() # E: Item "str" of "Union[Any, str]" has no attribute "method_only_in_a" if isinstance(x, A): x.method_only_in_a() [builtins fixtures/isinstance.pyi] mypy-0.560/test-data/unit/check-kwargs.test0000644€tŠÔÚ€2›s®0000002717013215007205025015 0ustar jukkaDROPBOX\Domain Users00000000000000-- Test cases for keyword arguments. [case testTypeErrorInKeywordArgument] import typing def f(o: object) -> None: pass f(o=None()) # E: "None" not callable [case testSimpleKeywordArgument] import typing def f(a: 'A') -> None: pass f(a=A()) f(a=object()) # E: Argument 1 to "f" has incompatible type "object"; expected "A" class A: pass [case testTwoKeywordArgumentsNotInOrder] import typing def f(a: 'A', b: 'B') -> None: pass f(b=A(), a=A()) # E: Argument 1 to "f" has incompatible type "A"; expected "B" f(b=B(), a=B()) # E: Argument 2 to "f" has incompatible type "B"; expected "A" f(a=A(), b=B()) f(b=B(), a=A()) class A: pass class B: pass [case testOneOfSeveralOptionalKeywordArguments] import typing def f(a: 'A' = None, b: 'B' = None, c: 'C' = None) -> None: pass f(a=A()) f(b=B()) f(c=C()) f(b=B(), c=C()) f(a=B()) # E: Argument 1 to "f" has incompatible type "B"; expected "Optional[A]" f(b=A()) # E: Argument 1 to "f" has incompatible type "A"; expected "Optional[B]" f(c=B()) # E: Argument 1 to "f" has incompatible type "B"; expected "Optional[C]" f(b=B(), c=A()) # E: Argument 2 to "f" has incompatible type "A"; expected "Optional[C]" class A: pass class B: pass class C: pass [case testBothPositionalAndKeywordArguments] import typing def f(a: 'A', b: 'B') -> None: pass f(A(), b=A()) # E: Argument 2 to "f" has incompatible type "A"; expected "B" f(A(), b=B()) class A: pass class B: pass [case testContextSensitiveTypeInferenceForKeywordArg] from typing import List def f(a: 'A', b: 'List[A]') -> None: pass f(b=[], a=A()) class A: pass [builtins fixtures/list.pyi] [case testGivingSameKeywordArgumentTwice] import typing def f(a: 'A', b: 'B') -> None: pass f(a=A(), b=B(), a=A()) # E: keyword argument repeated class A: pass class B: pass [case testGivingArgumentAsPositionalAndKeywordArg] import typing def f(a: 'A', b: 'B' = None) -> None: pass f(A(), a=A()) # E: "f" gets multiple values for keyword argument "a" class A: pass class B: pass [case testGivingArgumentAsPositionalAndKeywordArg2] import typing def f(a: 'A' = None, b: 'B' = None) -> None: pass f(A(), a=A()) # E: "f" gets multiple values for keyword argument "a" class A: pass class B: pass [case testPositionalAndKeywordForSameArg] # This used to crash in check_argument_count(). See #1095. def f(a: int): pass def g(): f(0, a=1) [out] [case testInvalidKeywordArgument] import typing def f(a: 'A') -> None: pass # N: "f" defined here f(b=object()) # E: Unexpected keyword argument "b" for "f" class A: pass [case testKeywordArgumentsWithDynamicallyTypedCallable] from typing import Any f = None # type: Any f(x=f(), z=None()) # E: "None" not callable f(f, zz=None()) # E: "None" not callable f(x=None) [case testKeywordArgumentWithFunctionObject] from typing import Callable f = None # type: Callable[[A, B], None] f(a=A(), b=B()) f(A(), b=B()) class A: pass class B: pass [out] main:3: error: Unexpected keyword argument "a" main:3: error: Unexpected keyword argument "b" main:4: error: Unexpected keyword argument "b" [case testKeywordOnlyArguments] import typing def f(a: 'A', *, b: 'B' = None) -> None: pass def g(a: 'A', *, b: 'B') -> None: pass def h(a: 'A', *, b: 'B', aa: 'A') -> None: pass def i(a: 'A', *, b: 'B', aa: 'A' = None) -> None: pass f(A(), b=B()) f(b=B(), a=A()) f(A()) f(A(), B()) # E: Too many positional arguments for "f" g(A(), b=B()) g(b=B(), a=A()) g(A()) # E: Missing named argument "b" for "g" g(A(), B()) # E: Too many positional arguments for "g" h(A()) # E: Missing named argument "b" for "h" # E: Missing named argument "aa" for "h" h(A(), b=B()) # E: Missing named argument "aa" for "h" h(A(), aa=A()) # E: Missing named argument "b" for "h" h(A(), b=B(), aa=A()) h(A(), aa=A(), b=B()) i(A()) # E: Missing named argument "b" for "i" i(A(), b=B()) i(A(), aa=A()) # E: Missing named argument "b" for "i" i(A(), b=B(), aa=A()) i(A(), aa=A(), b=B()) class A: pass class B: pass [case testKeywordOnlyArgumentsFastparse] import typing def f(a: 'A', *, b: 'B' = None) -> None: pass def g(a: 'A', *, b: 'B') -> None: pass def h(a: 'A', *, b: 'B', aa: 'A') -> None: pass def i(a: 'A', *, b: 'B', aa: 'A' = None) -> None: pass f(A(), b=B()) f(b=B(), a=A()) f(A()) f(A(), B()) # E: Too many positional arguments for "f" g(A(), b=B()) g(b=B(), a=A()) g(A()) # E: Missing named argument "b" for "g" g(A(), B()) # E: Too many positional arguments for "g" h(A()) # E: Missing named argument "b" for "h" # E: Missing named argument "aa" for "h" h(A(), b=B()) # E: Missing named argument "aa" for "h" h(A(), aa=A()) # E: Missing named argument "b" for "h" h(A(), b=B(), aa=A()) h(A(), aa=A(), b=B()) i(A()) # E: Missing named argument "b" for "i" i(A(), b=B()) i(A(), aa=A()) # E: Missing named argument "b" for "i" i(A(), b=B(), aa=A()) i(A(), aa=A(), b=B()) class A: pass class B: pass [case testKwargsAfterBareArgs] from typing import Tuple, Any def f(a, *, b=None) -> None: pass a = None # type: Any b = None # type: Any f(a, **b) [builtins fixtures/dict.pyi] [case testKeywordArgAfterVarArgs] import typing def f(*a: 'A', b: 'B' = None) -> None: pass f() f(A()) f(A(), A()) f(b=B()) f(A(), b=B()) f(A(), A(), b=B()) f(B()) # E: Argument 1 to "f" has incompatible type "B"; expected "A" f(A(), B()) # E: Argument 2 to "f" has incompatible type "B"; expected "A" f(b=A()) # E: Argument 1 to "f" has incompatible type "A"; expected "Optional[B]" class A: pass class B: pass [builtins fixtures/list.pyi] [case testKeywordArgAfterVarArgsWithBothCallerAndCalleeVarArgs] from typing import List def f(*a: 'A', b: 'B' = None) -> None: pass a = None # type: List[A] f(*a) f(A(), *a) f(b=B()) f(*a, b=B()) f(A(), *a, b=B()) f(A(), B()) # E: Argument 2 to "f" has incompatible type "B"; expected "A" f(A(), b=A()) # E: Argument 2 to "f" has incompatible type "A"; expected "Optional[B]" f(*a, b=A()) # E: Argument 2 to "f" has incompatible type "A"; expected "Optional[B]" class A: pass class B: pass [builtins fixtures/list.pyi] [case testCallingDynamicallyTypedFunctionWithKeywordArgs] import typing def f(x, y=A()): pass f(x=A(), y=A()) f(y=A(), x=A()) f(y=A()) # E: Missing positional argument "x" in call to "f" f(A(), z=A()) # E: Unexpected keyword argument "z" for "f" class A: pass [case testKwargsArgumentInFunctionBody] from typing import Dict, Any def f( **kwargs: 'A') -> None: d1 = kwargs # type: Dict[str, A] d2 = kwargs # type: Dict[A, Any] # E: Incompatible types in assignment (expression has type "Dict[str, A]", variable has type "Dict[A, Any]") d3 = kwargs # type: Dict[Any, str] # E: Incompatible types in assignment (expression has type "Dict[str, A]", variable has type "Dict[Any, str]") class A: pass [builtins fixtures/dict.pyi] [out] [case testKwargsArgumentInFunctionBodyWithImplicitAny] from typing import Dict, Any def f(**kwargs) -> None: d1 = kwargs # type: Dict[str, A] d2 = kwargs # type: Dict[str, str] d3 = kwargs # type: Dict[A, Any] # E: Incompatible types in assignment (expression has type "Dict[str, Any]", variable has type "Dict[A, Any]") class A: pass [builtins fixtures/dict.pyi] [out] [case testCallingFunctionThatAcceptsVarKwargs] import typing def f( **kwargs: 'A') -> None: pass f() f(x=A()) f(y=A(), z=A()) f(x=B()) # E: Argument 1 to "f" has incompatible type "B"; expected "A" f(A()) # E: Too many arguments for "f" # Perhaps a better message would be "Too many *positional* arguments..." class A: pass class B: pass [builtins fixtures/dict.pyi] [case testCallingFunctionWithKeywordVarArgs] from typing import Dict def f( **kwargs: 'A') -> None: pass d = None # type: Dict[str, A] f(**d) f(x=A(), **d) d2 = None # type: Dict[str, B] f(**d2) # E: Argument 1 to "f" has incompatible type "**Dict[str, B]"; expected "A" f(x=A(), **d2) # E: Argument 2 to "f" has incompatible type "**Dict[str, B]"; expected "A" class A: pass class B: pass [builtins fixtures/dict.pyi] [case testKwargsAllowedInDunderCall] class Formatter: def __call__(self, message: str, bold: bool = False) -> str: pass formatter = Formatter() formatter("test", bold=True) reveal_type(formatter.__call__) # E: Revealed type is 'def (message: builtins.str, bold: builtins.bool =) -> builtins.str' [builtins fixtures/bool.pyi] [out] [case testKwargsAllowedInDunderCallKwOnly] class Formatter: def __call__(self, message: str, *, bold: bool = False) -> str: pass formatter = Formatter() formatter("test", bold=True) reveal_type(formatter.__call__) # E: Revealed type is 'def (message: builtins.str, *, bold: builtins.bool =) -> builtins.str' [builtins fixtures/bool.pyi] [out] [case testPassingMappingForKeywordVarArg] from typing import Mapping def f(**kwargs: 'A') -> None: pass b = None # type: Mapping d = None # type: Mapping[A, A] m = None # type: Mapping[str, A] f(**d) # E: Keywords must be strings f(**m) f(**b) class A: pass [builtins fixtures/dict.pyi] [case testPassingMappingSubclassForKeywordVarArg] from typing import Mapping class MappingSubclass(Mapping[str, str]): pass def f(**kwargs: 'A') -> None: pass d = None # type: MappingSubclass f(**d) class A: pass [builtins fixtures/dict.pyi] [case testInvalidTypeForKeywordVarArg] from typing import Dict def f(**kwargs: 'A') -> None: pass d = None # type: Dict[A, A] f(**d) # E: Keywords must be strings f(**A()) # E: Argument after ** must be a mapping, not "A" class A: pass [builtins fixtures/dict.pyi] [case testPassingKeywordVarArgsToNonVarArgsFunction] from typing import Any, Dict def f(a: 'A', b: 'B') -> None: pass d = None # type: Dict[str, Any] f(**d) d2 = None # type: Dict[str, A] f(**d2) # E: Argument 1 to "f" has incompatible type "**Dict[str, A]"; expected "B" class A: pass class B: pass [builtins fixtures/dict.pyi] [case testBothKindsOfVarArgs] from typing import Any, List, Dict def f(a: 'A', b: 'A') -> None: pass l = None # type: List[Any] d = None # type: Dict[Any, Any] f(*l, **d) class A: pass [builtins fixtures/dict.pyi] [case testKeywordArgumentAndCommentSignature] import typing def f(x): # type: (int) -> str # N: "f" defined here pass f(x='') # E: Argument 1 to "f" has incompatible type "str"; expected "int" f(x=0) f(y=0) # E: Unexpected keyword argument "y" for "f" [case testKeywordArgumentAndCommentSignature2] import typing class A: def f(self, x): # type: (int) -> str # N: "f" of "A" defined here pass A().f(x='') # E: Argument 1 to "f" of "A" has incompatible type "str"; expected "int" A().f(x=0) A().f(y=0) # E: Unexpected keyword argument "y" for "f" of "A" [case testKeywordVarArgsAndCommentSignature] import typing def f(**kwargs): # type: (**int) -> None pass f(z=1) f(x=1, y=1) f(x='', y=1) # E: Argument 1 to "f" has incompatible type "str"; expected "int" f(x=1, y='') # E: Argument 2 to "f" has incompatible type "str"; expected "int" [builtins fixtures/dict.pyi] [case testCallsWithStars] def f(a: int) -> None: pass s = ('',) f(*s) # E: Argument 1 to "f" has incompatible type "*Tuple[str]"; expected "int" a = {'': 0} f(a) # E: Argument 1 to "f" has incompatible type "Dict[str, int]"; expected "int" f(**a) # okay b = {'': ''} f(b) # E: Argument 1 to "f" has incompatible type "Dict[str, str]"; expected "int" f(**b) # E: Argument 1 to "f" has incompatible type "**Dict[str, str]"; expected "int" c = {0: 0} f(**c) # E: Keywords must be strings [builtins fixtures/dict.pyi] [case testCallStar2WithStar] def f(**k): pass f(*(1, 2)) # E: Too many arguments for "f" [builtins fixtures/dict.pyi] [case testUnexpectedMethodKwargInNestedClass] class A: class B: def __init__(self) -> None: # N: "B" defined here pass A.B(x=1) # E: Unexpected keyword argument "x" for "B" [case testUnexpectedMethodKwargFromOtherModule] import m m.A(x=1) # E: Unexpected keyword argument "x" for "A" [file m.py] class A: def __init__(self) -> None: # N: "A" defined here pass mypy-0.560/test-data/unit/check-lists.test0000644€tŠÔÚ€2›s®0000000401613215007205024647 0ustar jukkaDROPBOX\Domain Users00000000000000-- Nested list assignment -- ----------------------------- [case testNestedListAssignment] from typing import List a1, b1, c1 = None, None, None # type: (A, B, C) a2, b2, c2 = None, None, None # type: (A, B, C) a1, [b1, c1] = a2, [b2, c2] a1, [a1, [b1, c1]] = a2, [a2, [b2, c2]] a1, [a1, [a1, b1]] = a1, [a1, [a1, c1]] # E: Incompatible types in assignment (expression has type "C", variable has type "B") class A: pass class B: pass class C: pass [builtins fixtures/list.pyi] [out] [case testNestedListAssignmentToTuple] from typing import List a, b, c = None, None, None # type: (A, B, C) a, b = [a, b] a, b = [a] # E: Need more than 1 value to unpack (2 expected) a, b = [a, b, c] # E: Too many values to unpack (2 expected, 3 provided) class A: pass class B: pass class C: pass [builtins fixtures/list.pyi] [out] [case testListAssignmentFromTuple] from typing import List a, b, c = None, None, None # type: (A, B, C) t = a, b [a, b], c = t, c [a, c], c = t, c # E: Incompatible types in assignment (expression has type "B", variable has type "C") [a, a, a], c = t, c # E: Need more than 2 values to unpack (3 expected) [a], c = t, c # E: Too many values to unpack (1 expected, 2 provided) class A: pass class B: pass class C: pass [builtins fixtures/list.pyi] [out] [case testListAssignmentUnequalAmountToUnpack] from typing import List a, b, c = None, None, None # type: (A, B, C) def f() -> None: # needed because test parser tries to parse [a, b] as section header [a, b] = [a, b] [a, b] = [a] # E: Need more than 1 value to unpack (2 expected) [a, b] = [a, b, c] # E: Too many values to unpack (2 expected, 3 provided) class A: pass class B: pass class C: pass [builtins fixtures/list.pyi] [out] [case testListWithStarExpr] (x, *a) = [1, 2, 3] a = [1, *[2, 3]] reveal_type(a) # E: Revealed type is 'builtins.list[builtins.int*]' b = [0, *a] reveal_type(b) # E: Revealed type is 'builtins.list[builtins.int*]' c = [*a, 0] reveal_type(c) # E: Revealed type is 'builtins.list[builtins.int*]' [builtins fixtures/list.pyi] mypy-0.560/test-data/unit/check-modules.test0000644€tŠÔÚ€2›s®0000012564013215007205025170 0ustar jukkaDROPBOX\Domain Users00000000000000-- Type checker test cases dealing with modules and imports. [case testAccessImportedDefinitions] import m import typing m.f() # E: Too few arguments for "f" m.f(object()) # E: Argument 1 to "f" has incompatible type "object"; expected "A" m.x = object() # E: Incompatible types in assignment (expression has type "object", variable has type "A") m.f(m.A()) m.x = m.A() [file m.py] class A: pass def f(a: A) -> None: pass x = A() [case testAccessImportedDefinitions] import m import typing m.f(object()) # E: Argument 1 to "f" has incompatible type "object"; expected "A" m.f(m.A()) [file m.py] class A: pass def f(a: A) -> None: pass [case testAccessImportedDefinitions2] from m import f, A import typing f(object()) # E: Argument 1 to "f" has incompatible type "object"; expected "A" f(A()) [file m.py] class A: pass def f(a: A) -> None: pass [case testImportedExceptionType] import m import typing try: pass except m.Err: pass except m.Bad: # E: Exception type must be derived from BaseException pass [file m.py] class Err(BaseException): pass class Bad: pass [builtins fixtures/exception.pyi] [case testImportedExceptionType2] from m import Err, Bad import typing try: pass except Err: pass except Bad: # E: Exception type must be derived from BaseException pass [file m.py] class Err(BaseException): pass class Bad: pass [builtins fixtures/exception.pyi] [case testImportWithinBlock] import typing if 1: import m m.a = m.b # E: Incompatible types in assignment (expression has type "B", variable has type "A") m.a = m.a m.f() m.f(m.a) # E: Too many arguments for "f" m.a = m.A() m.a = m.B() # E: Incompatible types in assignment (expression has type "B", variable has type "A") [file m.py] class A: pass class B: pass a = A() b = B() def f() -> None: pass [case testImportWithinFunction] import typing def f() -> None: from m import a, b, f, A, B a = b # E: Incompatible types in assignment (expression has type "B", variable has type "A") a = a f() f(a) # E: Too many arguments for "f" a = A() a = B() # E: Incompatible types in assignment (expression has type "B", variable has type "A") [file m.py] class A: pass class B: pass a = A() b = B() def f() -> None: pass [out] [case testImportWithinMethod] import typing class C: def f(self) -> None: from m import * a = b # E: Incompatible types in assignment (expression has type "B", variable has type "A") a = a f() f(a) # E: Too many arguments for "f" a = A() a = B() # E: Incompatible types in assignment (expression has type "B", variable has type "A") [file m.py] class A: pass class B: pass a = A() b = B() def f() -> None: pass [out] [case testImportWithinClassBody] import typing class C: import m m.f() m.f(C) # E: Too many arguments for "f" [file m.py] def f() -> None: pass [out] [case testImportWithinClassBody2] import typing class C: from m import f f() f(C) # E: Too many arguments for "f" [file m.py] def f() -> None: pass [out] [case testImportWithStub] import _m _m.f("hola") [file _m.pyi] def f(c:str) -> None: pass [out] [case testImportWithStubIncompatibleType] import _m _m.f("hola") _m.f(12) # E: Argument 1 to "f" has incompatible type "int"; expected "str" [file _m.py] def f(c): print(c) [file _m.pyi] def f(c:str) -> None: pass [case testInvalidOperationsOnModules] import m import typing class A: pass m() # E: Module not callable a = m # type: A # E: Incompatible types in assignment (expression has type Module, variable has type "A") m + None # E: Unsupported left operand type for + (Module) [file m.py] [builtins fixtures/module.pyi] [case testNameDefinedInDifferentModule] import m, n import typing m.x # E: Module has no attribute "x" [file m.py] y = object() [file n.py] x = object() [builtins fixtures/module.pyi] [case testChainedAssignmentAndImports] import m i, s = None, None # type: (int, str) i = m.x i = m.y s = m.x # E: Incompatible types in assignment (expression has type "int", variable has type "str") s = m.y # E: Incompatible types in assignment (expression has type "int", variable has type "str") [file m.py] x = y = 1 [builtins fixtures/primitives.pyi] [case testConditionalFunctionDefinitionAndImports] import m import typing m.f(1) m.f('x') # E: Argument 1 to "f" has incompatible type "str"; expected "int" [file m.py] x = object() if x: def f(x: int) -> None: pass else: def f(x: int) -> None: pass [case testTypeCheckWithUnknownModule] import nonexistent None + '' [out] main:1: error: Cannot find module named 'nonexistent' main:1: note: (Perhaps setting MYPYPATH or using the "--ignore-missing-imports" flag would help) main:2: error: Unsupported left operand type for + ("None") [case testTypeCheckWithUnknownModule2] import m, nonexistent None + '' m.x = 1 m.x = '' [file m.py] x = 1 [out] main:1: error: Cannot find module named 'nonexistent' main:1: note: (Perhaps setting MYPYPATH or using the "--ignore-missing-imports" flag would help) main:2: error: Unsupported left operand type for + ("None") main:4: error: Incompatible types in assignment (expression has type "str", variable has type "int") [case testTypeCheckWithUnknownModule3] import nonexistent, m None + '' m.x = 1 m.x = '' [file m.py] x = 1 [out] main:1: error: Cannot find module named 'nonexistent' main:1: note: (Perhaps setting MYPYPATH or using the "--ignore-missing-imports" flag would help) main:2: error: Unsupported left operand type for + ("None") main:4: error: Incompatible types in assignment (expression has type "str", variable has type "int") [case testTypeCheckWithUnknownModule4] import nonexistent, another None + '' [out] main:1: error: Cannot find module named 'nonexistent' main:1: note: (Perhaps setting MYPYPATH or using the "--ignore-missing-imports" flag would help) main:1: error: Cannot find module named 'another' main:2: error: Unsupported left operand type for + ("None") [case testTypeCheckWithUnknownModule5] import nonexistent as x None + '' [out] main:1: error: Cannot find module named 'nonexistent' main:1: note: (Perhaps setting MYPYPATH or using the "--ignore-missing-imports" flag would help) main:2: error: Unsupported left operand type for + ("None") [case testTypeCheckWithUnknownModuleUsingFromImport] from nonexistent import x None + '' [out] main:1: error: Cannot find module named 'nonexistent' main:1: note: (Perhaps setting MYPYPATH or using the "--ignore-missing-imports" flag would help) main:2: error: Unsupported left operand type for + ("None") [case testTypeCheckWithUnknownModuleUsingImportStar] from nonexistent import * None + '' [out] main:1: error: Cannot find module named 'nonexistent' main:1: note: (Perhaps setting MYPYPATH or using the "--ignore-missing-imports" flag would help) main:2: error: Unsupported left operand type for + ("None") [case testAccessingUnknownModule] import xyz xyz.foo() xyz() [out] main:1: error: Cannot find module named 'xyz' main:1: note: (Perhaps setting MYPYPATH or using the "--ignore-missing-imports" flag would help) [case testAccessingUnknownModule2] import xyz, bar xyz.foo() bar() [out] main:1: error: Cannot find module named 'xyz' main:1: note: (Perhaps setting MYPYPATH or using the "--ignore-missing-imports" flag would help) main:1: error: Cannot find module named 'bar' [case testAccessingUnknownModule3] import xyz as z xyz.foo() z() [out] main:1: error: Cannot find module named 'xyz' main:1: note: (Perhaps setting MYPYPATH or using the "--ignore-missing-imports" flag would help) main:2: error: Name 'xyz' is not defined [case testAccessingNameImportedFromUnknownModule] from xyz import y, z y.foo() z() [out] main:1: error: Cannot find module named 'xyz' main:1: note: (Perhaps setting MYPYPATH or using the "--ignore-missing-imports" flag would help) [case testAccessingNameImportedFromUnknownModule2] from xyz import * y [out] main:1: error: Cannot find module named 'xyz' main:1: note: (Perhaps setting MYPYPATH or using the "--ignore-missing-imports" flag would help) main:2: error: Name 'y' is not defined [case testAccessingNameImportedFromUnknownModule3] from xyz import y as z y z [out] main:1: error: Cannot find module named 'xyz' main:1: note: (Perhaps setting MYPYPATH or using the "--ignore-missing-imports" flag would help) main:2: error: Name 'y' is not defined [case testUnknownModuleRedefinition] import xab def xab(): pass [out] main:1: error: Cannot find module named 'xab' main:1: note: (Perhaps setting MYPYPATH or using the "--ignore-missing-imports" flag would help) [case testAccessingUnknownModuleFromOtherModule] import x x.nonexistent.foo x.z [file x.py] import nonexistent [builtins fixtures/module.pyi] [out] tmp/x.py:1: error: Cannot find module named 'nonexistent' tmp/x.py:1: note: (Perhaps setting MYPYPATH or using the "--ignore-missing-imports" flag would help) main:3: error: Module has no attribute "z" [case testUnknownModuleImportedWithinFunction] def f(): import foobar def foobar(): pass foobar('') [out] main:2: error: Cannot find module named 'foobar' main:2: note: (Perhaps setting MYPYPATH or using the "--ignore-missing-imports" flag would help) main:4: error: Too many arguments for "foobar" [case testUnknownModuleImportedWithinFunction2] def f(): from foobar import x def x(): pass x('') [out] main:2: error: Cannot find module named 'foobar' main:2: note: (Perhaps setting MYPYPATH or using the "--ignore-missing-imports" flag would help) main:4: error: Too many arguments for "x" [case testRelativeImports] import typing import m.a m.a.x = m.a.y # Error [file m/__init__.py] [file m/a.py] import typing from .b import A, B, x, y z = x z = y # Error [file m/b.py] import typing class A: pass class B: pass x = A() y = B() [out] tmp/m/a.py:4: error: Incompatible types in assignment (expression has type "B", variable has type "A") main:3: error: Incompatible types in assignment (expression has type "B", variable has type "A") [case testRelativeImports2] import typing import m.a m.a.x = m.a.y # E: Incompatible types in assignment (expression has type "B", variable has type "A") [file m/__init__.py] [file m/a.py] import typing from .b import A, B, x, y [file m/b.py] import typing class A: pass class B: pass x = A() y = B() [case testExportedValuesInImportAll] import typing from m import * _ = a _ = b _ = c _ = d _ = e _ = f # E: Name 'f' is not defined _ = _g # E: Name '_g' is not defined [file m.py] __all__ = ['a'] __all__ += ('b',) __all__.append('c') __all__.extend(('d', 'e')) a = b = c = d = e = f = _g = 1 [builtins fixtures/module_all.pyi] [case testAllMustBeSequenceStr] import typing __all__ = [1, 2, 3] [builtins fixtures/module_all.pyi] [out] main:2: error: Type of __all__ must be "Sequence[str]", not "List[int]" [case testAllMustBeSequenceStr_python2] import typing __all__ = [1, 2, 3] [builtins_py2 fixtures/module_all_python2.pyi] [out] main:2: error: Type of __all__ must be "Sequence[unicode]", not "List[int]" [case testAllUnicodeSequenceOK_python2] import typing __all__ = [u'a', u'b', u'c'] [builtins_py2 fixtures/module_all_python2.pyi] [out] [case testUnderscoreExportedValuesInImportAll] import typing from m import * _ = a _ = _b _ = __c__ _ = ___d _ = e _ = f # E: Name 'f' is not defined _ = _g # E: Name '_g' is not defined [file m.py] __all__ = ['a'] __all__ += ('_b',) __all__.append('__c__') __all__.extend(('___d', 'e')) a = _b = __c__ = ___d = e = f = _g = 1 [builtins fixtures/module_all.pyi] [case testEllipsisInitializerInStubFileWithType] import m m.x = '' # E: Incompatible types in assignment (expression has type "str", variable has type "int") [file m.pyi] x = ... # type: int [case testEllipsisInitializerInStubFileWithoutType] import m m.x = '' # E: Incompatible types in assignment (expression has type "str", variable has type "ellipsis") [file m.pyi] # Ellipsis is only special with a # type: comment (not sure though if this is great) x = ... [case testEllipsisInitializerInModule] x = ... # type: int # E: Incompatible types in assignment (expression has type "ellipsis", variable has type "int") [case testEllipsisDefaultArgValueInStub] import m m.f(1) m.f('') # E: Argument 1 to "f" has incompatible type "str"; expected "int" [file m.pyi] def f(x: int = ...) -> None: pass [case testEllipsisDefaultArgValueInStub2] import m def f(x: int = ...) -> None: pass [file m.pyi] def g(x: int = '') -> None: pass [out] tmp/m.pyi:1: error: Incompatible default for argument "x" (default has type "str", argument has type "int") main:2: error: Incompatible default for argument "x" (default has type "ellipsis", argument has type "int") [case testEllipsisDefaultArgValueInNonStub] def f(x: int = ...) -> None: pass # E: Incompatible default for argument "x" (default has type "ellipsis", argument has type "int") [out] [case testStarImportOverlapping] from m1 import * from m2 import * j = '' [file m1.py] x = 1 [file m2.py] x = 1 [case testStarImportOverlappingMismatch] from m1 import * from m2 import * # E: Incompatible import of "x" (imported name has type "int", local name has type "str") j = '' [file m1.py] x = '' [file m2.py] x = 1 [case testStarImportOverridingLocalImports-skip] from m1 import * from m2 import * x = '' # E: TODO (cannot assign str to int) [file m1.py] x = 1 [file m2.py] x = 1 [case testAssignToFuncDefViaImport] from m import * # E: Incompatible import of "x" (imported name has type "int", local name has type "str") f = None x = '' [file m.py] def f(): pass x = 1+0 [out] -- Conditional definitions and function redefinitions via module object -- -------------------------------------------------------------------- [case testConditionalImportAndAssign] try: from m import x except: x = None try: from m import x as y except: y = 1 # E: Incompatible types in assignment (expression has type "int", variable has type "str") [file m.py] x = '' [case testAssignAndConditionalImport] x = '' try: from m import x except: pass y = 1 try: from m import x as y # E: Incompatible import of "y" (imported name has type "str", local name has type "int") except: pass [file m.py] x = '' [case testAssignAndConditionalStarImport] x = '' y = 1 try: from m import * # E: Incompatible import of "y" (imported name has type "str", local name has type "int") except: pass [file m.py] x = '' y = '' [case testRedefineImportedFunctionViaImport] try: from m import f, g except: def f(x): pass def g(x): pass # E: All conditional function variants must have identical signatures [file m.py] def f(x): pass def g(x, y): pass [case testImportedVariableViaImport] try: from m import x except: from n import x # E: Incompatible import of "x" (imported name has type "str", local name has type "int") [file m.py] x = 1 [file n.py] x = '' [case testRedefineFunctionViaImport] def f(x): pass def g(x): pass try: from m import f, g # E: Incompatible import of "g" (imported name has type "Callable[[Any, Any], Any]", local name has type "Callable[[Any], Any]") except: pass [file m.py] def f(x): pass def g(x, y): pass [case testImportVariableAndAssignNone] try: from m import x except: x = None [file m.py] x = 1 [case testImportFunctionAndAssignNone] try: from m import f except: f = None [file m.py] def f(): pass [case testImportFunctionAndAssignFunction] def g(x): pass try: from m import f except: f = g [file m.py] def f(x): pass [case testImportFunctionAndAssignIncompatible] try: from m import f except: f = 1 # E: Incompatible types in assignment (expression has type "int", variable has type "Callable[[], Any]") [file m.py] def f(): pass [case testAssignToFuncDefViaGlobalDecl2] import typing from m import f def g() -> None: global f f = None f = 1 # E: Incompatible types in assignment (expression has type "int", variable has type "Callable[[], Any]") [file m.py] def f(): pass [out] [case testAssignToFuncDefViaNestedModules] import m.n m.n.f = None m.n.f = 1 # E: Incompatible types in assignment (expression has type "int", variable has type "Callable[[], Any]") [file m/__init__.py] [file m/n.py] def f(): pass [out] [case testAssignToFuncDefViaModule] import m m.f = None m.f = 1 # E: Incompatible types in assignment (expression has type "int", variable has type "Callable[[], Any]") [file m.py] def f(): pass [out] [case testConditionalImportAndAssignNoneToModule] if object(): import m else: m = None m.f(1) # E: Argument 1 to "f" has incompatible type "int"; expected "str" [file m.py] def f(x: str) -> None: pass [builtins fixtures/module.pyi] [out] [case testConditionalImportAndAssignInvalidToModule] if object(): import m else: m = 1 # E: Incompatible types in assignment (expression has type "int", variable has type Module) [file m.py] [builtins fixtures/module.pyi] [out] [case testImportAndAssignToModule] import m m = None m.f(1) # E: Argument 1 to "f" has incompatible type "int"; expected "str" [file m.py] def f(x: str) -> None: pass [builtins fixtures/module.pyi] [out] -- Test cases that simulate 'mypy -m modname' -- -- The module name to import is encoded in a comment. [case testTypeCheckNamedModule] # cmd: mypy -m m.a [file m/__init__.py] None + 1 [file m/a.py] [out] tmp/m/__init__.py:1: error: Unsupported left operand type for + ("None") [case testTypeCheckNamedModule2] # cmd: mypy -m m.a [file m/__init__.py] [file m/a.py] None + 1 [out] tmp/m/a.py:1: error: Unsupported left operand type for + ("None") [case testTypeCheckNamedModule3] # cmd: mypy -m m [file m/__init__.py] None + 1 [file m/a.py] [out] tmp/m/__init__.py:1: error: Unsupported left operand type for + ("None") [case testTypeCheckNamedModule4] # cmd: mypy -m m [file m/__init__.py] [file m/a.py] None + 1 # Not analyzed. [out] [case testTypeCheckNamedModule5] # cmd: mypy -m m None + '' # Not analyzed. [file m.py] None + 1 [out] tmp/m.py:1: error: Unsupported left operand type for + ("None") [case testTypeCheckNamedModuleWithImportCycle] # cmd: mypy -m m.a None + 1 # Does not generate error, as this file won't be analyzed. [file m/__init__.py] import m.a [file m/a.py] [out] -- Checks dealing with submodules and different kinds of imports -- ------------------------------------------------------------- [case testSubmoduleRegularImportAddsAllParents] import a.b.c reveal_type(a.value) # E: Revealed type is 'builtins.int' reveal_type(a.b.value) # E: Revealed type is 'builtins.str' reveal_type(a.b.c.value) # E: Revealed type is 'builtins.float' b.value # E: Name 'b' is not defined c.value # E: Name 'c' is not defined [file a/__init__.py] value = 3 [file a/b/__init__.py] value = "a" [file a/b/c.py] value = 3.2 [out] [case testSubmoduleImportAsDoesNotAddParents] import a.b.c as foo reveal_type(foo.value) # E: Revealed type is 'builtins.float' a.value # E: Name 'a' is not defined b.value # E: Name 'b' is not defined c.value # E: Name 'c' is not defined [file a/__init__.py] value = 3 [file a/b/__init__.py] value = "a" [file a/b/c.py] value = 3.2 [out] [case testSubmoduleImportFromDoesNotAddParents] from a import b reveal_type(b.value) # E: Revealed type is 'builtins.str' b.c.value # E: Module has no attribute "c" a.value # E: Name 'a' is not defined [file a/__init__.py] value = 3 [file a/b/__init__.py] value = "a" [file a/b/c.py] value = 3.2 [builtins fixtures/module.pyi] [out] [case testSubmoduleImportFromDoesNotAddParents2] from a.b import c reveal_type(c.value) # E: Revealed type is 'builtins.float' a.value # E: Name 'a' is not defined b.value # E: Name 'b' is not defined [file a/__init__.py] value = 3 [file a/b/__init__.py] value = "a" [file a/b/c.py] value = 3.2 [out] [case testSubmoduleRegularImportNotDirectlyAddedToParent] import a.b.c def accept_float(x: float) -> None: pass accept_float(a.b.c.value) [file a/__init__.py] value = 3 b.value a.b.value [file a/b/__init__.py] value = "a" c.value a.b.c.value [file a/b/c.py] value = 3.2 [out] tmp/a/b/__init__.py:2: error: Name 'c' is not defined tmp/a/b/__init__.py:3: error: Name 'a' is not defined tmp/a/__init__.py:2: error: Name 'b' is not defined tmp/a/__init__.py:3: error: Name 'a' is not defined [case testSubmoduleMixingLocalAndQualifiedNames] from a.b import MyClass val1 = None # type: a.b.MyClass # E: Name 'a' is not defined val2 = None # type: MyClass [file a/__init__.py] [file a/b.py] class MyClass: pass [out] [case testSubmoduleMixingImportFrom] import parent.child [file parent/__init__.py] [file parent/common.py] class SomeClass: pass [file parent/child.py] from parent.common import SomeClass from parent import common foo = parent.common.SomeClass() [builtins fixtures/module.pyi] [out] tmp/parent/child.py:3: error: Name 'parent' is not defined [case testSubmoduleMixingImportFromAndImport] import parent.child [file parent/__init__.py] [file parent/common.py] class SomeClass: pass [file parent/unrelated.py] class ShouldNotLoad: pass [file parent/child.py] from parent.common import SomeClass import parent # Note, since this might be unintuitive -- when `parent.common` is loaded in any way, # shape, or form, it's added to `parent`'s namespace, which is why the below line # succeeds. foo = parent.common.SomeClass() reveal_type(foo) bar = parent.unrelated.ShouldNotLoad() [builtins fixtures/module.pyi] [out] tmp/parent/child.py:8: error: Revealed type is 'parent.common.SomeClass' tmp/parent/child.py:9: error: Module has no attribute "unrelated" [case testSubmoduleMixingImportFromAndImport2] import parent.child [file parent/__init__.py] [file parent/common.py] class SomeClass: pass [file parent/child.py] from parent import common import parent foo = parent.common.SomeClass() reveal_type(foo) [builtins fixtures/module.pyi] [out] tmp/parent/child.py:4: error: Revealed type is 'parent.common.SomeClass' -- Tests repeated imports [case testIdenticalImportFromTwice] from a import x, y, z from b import x, y, z [file a.py] from common import x, y, z [file b.py] from common import x, y, z [file common.py] x = 3 def y() -> int: return 3 class z: pass [out] [case testIdenticalImportStarTwice] from a import * from b import * [file a.py] from common import x, y, z [file b.py] from common import x, y, z [file common.py] x = 3 def y() -> int: return 3 class z: pass [out] [case testDifferentImportSameNameTwice] from a import x, y, z from b import x, y, z [file a.py] x = 3 def y() -> int: return 1 class z: pass [file b.py] x = "foo" def y() -> str: return "foo" class z: pass [out] main:2: error: Incompatible import of "x" (imported name has type "str", local name has type "int") main:2: error: Incompatible import of "y" (imported name has type "Callable[[], str]", local name has type "Callable[[], int]") main:2: error: Incompatible import of "z" (imported name has type "Type[b.z]", local name has type "Type[a.z]") -- Misc [case testInheritFromBadImport] # cmd: mypy -m bar [file foo.py] pass [file bar.py] from foo import B class C(B): pass [out] tmp/bar.py:1: error: Module 'foo' has no attribute 'B' [case testImportSuppressedWhileAlmostSilent] # cmd: mypy -m main # flags: --follow-imports=error [file main.py] import mod [file mod.py] [builtins fixtures/module.pyi] [out] tmp/main.py:1: note: Import of 'mod' ignored tmp/main.py:1: note: (Using --follow-imports=error, module not passed on command line) [case testAncestorSuppressedWhileAlmostSilent] # cmd: mypy -m foo.bar # flags: --follow-imports=error [file foo/bar.py] [file foo/__init__.py] [builtins fixtures/module.pyi] [out] tmp/foo/bar.py: note: Ancestor package 'foo' ignored tmp/foo/bar.py: note: (Using --follow-imports=error, submodule passed on command line) [case testStubImportNonStubWhileSilent] # cmd: mypy -m main # flags: --follow-imports=skip [file main.py] from stub import x, z # Followed from other import y # Not followed x + '' # No error here y + '' # No error here z + '' # Error here [file stub.pyi] from non_stub import x as x # this import is not followed z = 42 [file non_stub.py] x = 42 x + '' # no error because file is not analyzed [file other.py] y = 42 [builtins fixtures/module.pyi] [out] tmp/main.py:5: error: Unsupported left operand type for + ("int") [case testSilentSubmoduleImport] # cmd: mypy -m foo # flags: --follow-imports=skip [file foo/__init__.py] from foo import bar [file foo/bar.py] pass [case testSuperclassInImportCycle] import a import d a.A().f(d.D()) [file a.py] if 0: import d class B: pass class C(B): pass class A: def f(self, x: B) -> None: pass [file d.py] import a class D(a.C): pass [case testSuperclassInImportCycleReversedImports] import d import a a.A().f(d.D()) [file a.py] if 0: import d class B: pass class C(B): pass class A: def f(self, x: B) -> None: pass [file d.py] import a class D(a.C): pass [case testPreferPackageOverFile] import a [file a.py] / # intentional syntax error -- this file shouldn't be parsed [file a/__init__.py] pass [out] [case testPreferPackageOverFile2] from a import x [file a.py] / # intentional syntax error -- this file shouldn't be parsed [file a/__init__.py] x = 0 [out] [case testImportInClass] class C: import foo reveal_type(C.foo.bar) # E: Revealed type is 'builtins.int' [file foo.py] bar = 0 [builtins fixtures/module.pyi] [out] [case testIfFalseImport] if False: import a def f(x: 'a.A') -> int: return x.f() [file a.py] class A: def f(self) -> int: return 0 [builtins fixtures/bool.pyi] -- Test stability under import cycles -- ---------------------------------- -- The first two tests are identical except one main has 'import x' -- and the other 'import y'. Previously (before build.order_ascc() -- was added) one of these would fail because the imports were -- processed in the (reverse) order in which the files were -- encountered. [case testImportCycleStability1] import x [file x.py] def f() -> str: return '' class Base: attr = f() def foo(): import y [file y.py] import x class Sub(x.Base): attr = x.Base.attr [out] [case testImportCycleStability2] import y [file x.py] def f() -> str: return '' class Base: attr = f() def foo(): import y [file y.py] import x class Sub(x.Base): attr = x.Base.attr [out] -- This case isn't fixed by order_ascc(), but is fixed by the -- lightweight type inference added to semanal.py -- (analyze_simple_literal_type()). [case testImportCycleStability3] import y [file x.py] class Base: pass def foo() -> int: import y reveal_type(y.Sub.attr) return y.Sub.attr [file y.py] import x class Sub(x.Base): attr = 0 [out] tmp/x.py:5: error: Revealed type is 'builtins.int' -- This case has a symmetrical cycle, so it doesn't matter in what -- order the files are processed. It depends on the lightweight type -- interference. [case testImportCycleStability4] import x [file x.py] import y class C: attr = '' def foo() -> int: return y.D.attr [file y.py] import x class D: attr = 0 def bar() -> str: return x.C.attr -- These cases test all supported literal types. [case testImportCycleStability5] import y [file x.py] class Base: pass def foo() -> None: import y i = y.Sub.iattr # type: int f = y.Sub.fattr # type: float s = y.Sub.sattr # type: str b = y.Sub.battr # type: bytes [file y.py] import x class Sub(x.Base): iattr = 0 fattr = 0.0 sattr = '' battr = b'' [out] [case testImportCycleStability6_python2] import y [file x.py] class Base: pass def foo(): # type: () -> None import y i = y.Sub.iattr # type: int f = y.Sub.fattr # type: float s = y.Sub.sattr # type: str u = y.Sub.uattr # type: unicode [file y.py] import x class Sub(x.Base): iattr = 0 fattr = 0.0 sattr = '' uattr = u'' [out] -- This case tests module-level variables. [case testImportCycleStability7] import x [file x.py] def foo() -> int: import y reveal_type(y.value) return y.value [file y.py] import x value = 12 [out] tmp/x.py:3: error: Revealed type is 'builtins.int' -- This is not really cycle-related but still about the lightweight -- type checker. [case testImportCycleStability8] x = 1 # type: str reveal_type(x) [out] main:1: error: Incompatible types in assignment (expression has type "int", variable has type "str") main:2: error: Revealed type is 'builtins.str' -- Tests for cross-module second_pass checking. [case testSymmetricImportCycle1] import a [file a.py] import b def f() -> int: return b.x y = 0 + 0 [file b.py] import a def g() -> int: reveal_type(a.y) return a.y x = 1 + 1 [out] tmp/b.py:3: error: Revealed type is 'builtins.int' [case testSymmetricImportCycle2] import b [file a.py] import b def f() -> int: reveal_type(b.x) return b.x y = 0 + 0 [file b.py] import a def g() -> int: return a.y x = 1 + 1 [out] tmp/a.py:3: error: Revealed type is 'builtins.int' [case testThreePassesRequired] import b [file a.py] import b class C: def f1(self) -> None: self.x2 def f2(self) -> None: self.x2 = b.b [file b.py] import a b = 1 + 1 [out] tmp/a.py:4: error: Cannot determine type of 'x2' [case testErrorInPassTwo1] import b [file a.py] import b def f() -> None: a = b.x + 1 a + '' [file b.py] import a x = 1 + 1 [out] tmp/a.py:4: error: Unsupported operand types for + ("int" and "str") [case testErrorInPassTwo2] import a [file a.py] import b def f() -> None: a = b.x + 1 a + '' [file b.py] import a x = 1 + 1 [out] tmp/a.py:4: error: Unsupported operand types for + ("int" and "str") [case testDeferredDecorator] import a [file a.py] import b def g() -> None: f('') @b.deco def f(a: str) -> int: pass reveal_type(f) x = 1 + 1 [file b.py] from typing import Callable, TypeVar import a T = TypeVar('T') def deco(f: Callable[[T], int]) -> Callable[[T], int]: a.x return f [out] tmp/a.py:6: error: Revealed type is 'def (builtins.str*) -> builtins.int' [case testDeferredClassContext] class A: def f(self) -> str: return 'foo' class B(A): def f(self) -> str: return self.x def initialize(self): self.x = 'bar' [out] -- Scripts and __main__ [case testScriptsAreModules] # flags: --scripts-are-modules [file a] pass [file b] pass [case testScriptsAreNotModules] # cmd: mypy a b [file a] pass [file b] pass [out] [case testTypeCheckPrio] # cmd: mypy -m part1 part2 part3 part4 [file part1.py] from part3 import Thing class FirstThing: pass [file part2.py] from part4 import part4_thing as Thing [file part3.py] from part2 import Thing reveal_type(Thing) [file part4.py] from typing import TYPE_CHECKING if TYPE_CHECKING: from part1 import FirstThing def part4_thing(a: int) -> str: pass [builtins fixtures/bool.pyi] [out] tmp/part3.py:2: error: Revealed type is 'def (a: builtins.int) -> builtins.str' [case testImportStarAliasAnyList] import bar [file bar.py] from foo import * def bar(y: AnyAlias) -> None: pass l = None # type: ListAlias[int] reveal_type(l) [file foo.py] from typing import Any, List AnyAlias = Any ListAlias = List [builtins fixtures/list.pyi] [out] tmp/bar.py:5: error: Revealed type is 'builtins.list[builtins.int]' [case testImportStarAliasSimpleGeneric] from ex2a import * def do_something(dic: Row) -> None: pass def do_another() -> Row: return {} do_something({'good': 'bad'}) # E: Dict entry 0 has incompatible type "str": "str"; expected "str": "int" reveal_type(do_another()) # E: Revealed type is 'builtins.dict[builtins.str, builtins.int]' [file ex2a.py] from typing import Dict Row = Dict[str, int] [builtins fixtures/dict.pyi] [out] [case testImportStarAliasGeneric] from y import * notes = None # type: G[X] another = G[X]() second = XT[str]() last = XT[G]() reveal_type(notes) # E: Revealed type is 'y.G[y.G[builtins.int]]' reveal_type(another) # E: Revealed type is 'y.G[y.G*[builtins.int]]' reveal_type(second) # E: Revealed type is 'y.G[builtins.str*]' reveal_type(last) # E: Revealed type is 'y.G[y.G*]' [file y.py] from typing import Generic, TypeVar T = TypeVar('T') class G(Generic[T]): pass X = G[int] XT = G[T] [out] [case testImportStarAliasCallable] from foo import * from typing import Any def bar(x: Any, y: AnyCallable) -> Any: return 'foo' cb = None # type: AnyCallable reveal_type(cb) # E: Revealed type is 'def (*Any, **Any) -> Any' [file foo.py] from typing import Callable, Any AnyCallable = Callable[..., Any] [out] [case testRevealType] import types def f() -> types.ModuleType: return types reveal_type(f()) # E: Revealed type is 'types.ModuleType' reveal_type(types) # E: Revealed type is 'types.ModuleType' [builtins fixtures/module.pyi] [case testClassImportAccessedInMethod] class C: import m def foo(self) -> None: x = self.m.a reveal_type(x) # E: Revealed type is 'builtins.str' # ensure we distinguish self from other variables y = 'hello' z = y.m.a # E: "str" has no attribute "m" @classmethod def cmethod(cls) -> None: y = cls.m.a reveal_type(y) # E: Revealed type is 'builtins.str' @staticmethod def smethod(foo: int) -> None: # we aren't confused by first arg of a staticmethod y = foo.m.a # E: "int" has no attribute "m" [file m.py] a = 'foo' [builtins fixtures/module.pyi] [case testModuleAlias] import m m2 = m reveal_type(m2.a) # E: Revealed type is 'builtins.str' m2.b # E: Module has no attribute "b" m2.c = 'bar' # E: Module has no attribute "c" [file m.py] a = 'foo' [builtins fixtures/module.pyi] [case testClassModuleAlias] import m class C: x = m def foo(self) -> None: reveal_type(self.x.a) # E: Revealed type is 'builtins.str' [file m.py] a = 'foo' [builtins fixtures/module.pyi] [case testLocalModuleAlias] import m def foo() -> None: x = m reveal_type(x.a) # E: Revealed type is 'builtins.str' class C: def foo(self) -> None: x = m reveal_type(x.a) # E: Revealed type is 'builtins.str' [file m.py] a = 'foo' [builtins fixtures/module.pyi] [case testChainedModuleAlias] import m m3 = m2 = m m4 = m3 m5 = m4 reveal_type(m2.a) # E: Revealed type is 'builtins.str' reveal_type(m3.a) # E: Revealed type is 'builtins.str' reveal_type(m4.a) # E: Revealed type is 'builtins.str' reveal_type(m5.a) # E: Revealed type is 'builtins.str' [file m.py] a = 'foo' [builtins fixtures/module.pyi] [case testMultiModuleAlias] import m, n m2, n2, (m3, n3) = m, n, [m, n] reveal_type(m2.a) # E: Revealed type is 'builtins.str' reveal_type(n2.b) # E: Revealed type is 'builtins.str' reveal_type(m3.a) # E: Revealed type is 'builtins.str' reveal_type(n3.b) # E: Revealed type is 'builtins.str' x, y = m # E: 'types.ModuleType' object is not iterable x, y, z = m, n # E: Need more than 2 values to unpack (3 expected) x, y = m, m, m # E: Too many values to unpack (2 expected, 3 provided) x, (y, z) = m, n # E: 'types.ModuleType' object is not iterable x, (y, z) = m, (n, n, n) # E: Too many values to unpack (2 expected, 3 provided) [file m.py] a = 'foo' [file n.py] b = 'bar' [builtins fixtures/module.pyi] [case testModuleAliasWithExplicitAnnotation] from typing import Any import types import m mod_mod: types.ModuleType = m mod_mod2: types.ModuleType mod_mod2 = m mod_mod3 = m # type: types.ModuleType mod_any: Any = m mod_int: int = m # E: Incompatible types in assignment (expression has type Module, variable has type "int") reveal_type(mod_mod) # E: Revealed type is 'types.ModuleType' mod_mod.a # E: Module has no attribute "a" reveal_type(mod_mod2) # E: Revealed type is 'types.ModuleType' mod_mod2.a # E: Module has no attribute "a" reveal_type(mod_mod3) # E: Revealed type is 'types.ModuleType' mod_mod3.a # E: Module has no attribute "a" reveal_type(mod_any) # E: Revealed type is 'Any' [file m.py] a = 'foo' [builtins fixtures/module.pyi] [case testModuleAliasPassedToFunction] import types import m def takes_module(x: types.ModuleType): reveal_type(x.__file__) # E: Revealed type is 'builtins.str' n = m takes_module(m) takes_module(n) [file m.py] a = 'foo' [builtins fixtures/module.pyi] [case testModuleAliasRepeated] import m, n if bool(): x = m else: x = 3 # E: Incompatible types in assignment (expression has type "int", variable has type Module) if bool(): y = 3 else: y = m # E: Incompatible types in assignment (expression has type Module, variable has type "int") if bool(): z = m else: z = n # E: Cannot assign multiple modules to name 'z' without explicit 'types.ModuleType' annotation [file m.py] a = 'foo' [file n.py] a = 3 [builtins fixtures/module.pyi] [case testModuleAliasRepeatedWithAnnotation] import types import m, n x: types.ModuleType if bool(): x = m else: x = n x.a # E: Module has no attribute "a" reveal_type(x.__file__) # E: Revealed type is 'builtins.str' [file m.py] a = 'foo' [file n.py] a = 3 [builtins fixtures/module.pyi] [case testModuleAliasRepeatedComplex] import m, n, o x = m x = n # E: Cannot assign multiple modules to name 'x' without explicit 'types.ModuleType' annotation x = o # E: Cannot assign multiple modules to name 'x' without explicit 'types.ModuleType' annotation y = o y, z = m, n # E: Cannot assign multiple modules to name 'y' without explicit 'types.ModuleType' annotation xx = m xx = m reveal_type(xx.a) # E: Revealed type is 'builtins.str' [file m.py] a = 'foo' [file n.py] a = 3 [file o.py] a = 'bar' [builtins fixtures/module.pyi] [case testModuleAliasToOtherModule] import m, n m = n # E: Cannot assign multiple modules to name 'm' without explicit 'types.ModuleType' annotation [file m.py] [file n.py] [builtins fixtures/module.pyi] [case testNoReExportFromStubs] from stub import Iterable # E: Module 'stub' has no attribute 'Iterable' from stub import C c = C() reveal_type(c.x) # E: Revealed type is 'builtins.int' it: Iterable[int] reveal_type(it) # E: Revealed type is 'Any' [file stub.pyi] from typing import Iterable from substub import C as C def fun(x: Iterable[str]) -> Iterable[int]: pass [file substub.pyi] class C: x: int [builtins fixtures/module.pyi] [case testNoReExportFromStubsMemberType] import stub c = stub.C() reveal_type(c.x) # E: Revealed type is 'builtins.int' it: stub.Iterable[int] # E: Name 'stub.Iterable' is not defined reveal_type(it) # E: Revealed type is 'Any' [file stub.pyi] from typing import Iterable from substub import C as C def fun(x: Iterable[str]) -> Iterable[int]: pass [file substub.pyi] class C: x: int [builtins fixtures/module.pyi] [case testNoReExportFromStubsMemberVar] import stub reveal_type(stub.y) # E: Revealed type is 'builtins.int' reveal_type(stub.z) # E: Revealed type is 'Any' \ # E: Module has no attribute "z" [file stub.pyi] from substub import y as y from substub import z [file substub.pyi] y = 42 z: int [builtins fixtures/module.pyi] [case testReExportChildStubs] import mod from mod import submod reveal_type(mod.x) # E: Revealed type is 'mod.submod.C' y = submod.C() reveal_type(y.a) # E: Revealed type is 'builtins.str' [file mod/__init__.pyi] from . import submod x: submod.C [file mod/submod.pyi] class C: a: str [builtins fixtures/module.pyi] [case testReExportChildStubs2] import mod.submod y = mod.submod.C() reveal_type(y.a) # E: Revealed type is 'builtins.str' [file mod/__init__.pyi] from . import submod x: submod.C [file mod/submod.pyi] class C: a: str [builtins fixtures/module.pyi] [case testNoReExportChildStubs] import mod from mod import C, D # E: Module 'mod' has no attribute 'C' reveal_type(mod.x) # E: Revealed type is 'mod.submod.C' mod.C # E: Module has no attribute "C" y = mod.D() reveal_type(y.a) # E: Revealed type is 'builtins.str' [file mod/__init__.pyi] from .submod import C, D as D x: C [file mod/submod.pyi] class C: pass class D: a: str [builtins fixtures/module.pyi] [case testNoReExportNestedStub] from stub import substub # E: Module 'stub' has no attribute 'substub' [file stub.pyi] import substub [file substub.pyi] x = 42 [file mod/submod.pyi] [case testModuleAliasToQualifiedImport] import package.module alias = package.module reveal_type(alias.whatever('/')) # E: Revealed type is 'builtins.str*' [file package/__init__.py] [file package/module.py] from typing import TypeVar T = TypeVar('T') def whatever(x: T) -> T: pass [builtins fixtures/module.pyi] [case testModuleAliasToQualifiedImport2] import mod import othermod alias = mod.submod reveal_type(alias.whatever('/')) # E: Revealed type is 'builtins.str*' alias = othermod # E: Cannot assign multiple modules to name 'alias' without explicit 'types.ModuleType' annotation [file mod.py] import submod [file submod.py] from typing import TypeVar T = TypeVar('T') def whatever(x: T) -> T: pass [file othermod.py] [builtins fixtures/module.pyi] [case testModuleLevelGetattr] import has_getattr reveal_type(has_getattr.any_attribute) # E: Revealed type is 'Any' [file has_getattr.pyi] from typing import Any def __getattr__(name: str) -> Any: ... [builtins fixtures/module.pyi] [case testModuleLevelGetattrReturnType] import has_getattr reveal_type(has_getattr.any_attribute) # E: Revealed type is 'builtins.str' [file has_getattr.pyi] def __getattr__(name: str) -> str: ... [builtins fixtures/module.pyi] [case testModuleLevelGetattrInvalidSignature] import has_getattr reveal_type(has_getattr.any_attribute) [file has_getattr.pyi] def __getattr__(x: int, y: str) -> str: ... [out] tmp/has_getattr.pyi:1: error: Invalid signature "def (builtins.int, builtins.str) -> builtins.str" main:3: error: Revealed type is 'builtins.str' [builtins fixtures/module.pyi] [case testModuleLevelGetattrNotCallable] import has_getattr reveal_type(has_getattr.any_attribute) # E: Revealed type is 'Any' # E: Module has no attribute "any_attribute" [file has_getattr.pyi] __getattr__ = 3 [builtins fixtures/module.pyi] [case testModuleLevelGetattrUntyped] import has_getattr reveal_type(has_getattr.any_attribute) # E: Revealed type is 'Any' [file has_getattr.pyi] def __getattr__(name): ... [builtins fixtures/module.pyi] [case testModuleLevelGetattrNotStub] import has_getattr reveal_type(has_getattr.any_attribute) [file has_getattr.py] def __getattr__(name): ... [out] tmp/has_getattr.py:1: error: __getattr__ is not valid at the module level outside a stub file main:3: error: Revealed type is 'Any' main:3: error: Module has no attribute "any_attribute" [builtins fixtures/module.pyi] [case testModuleLevelGetattribute] def __getattribute__(): ... # E: __getattribute__ is not valid at the module level [case testModuleLevelGetattrImportFrom] from has_attr import name reveal_type(name) # E: Revealed type is 'Any' [file has_attr.pyi] from typing import Any def __getattr__(name: str) -> Any: ... [builtins fixtures/module.pyi] [case testModuleLevelGetattrImportFromRetType] from has_attr import int_attr reveal_type(int_attr) # E: Revealed type is 'builtins.int' [file has_attr.pyi] def __getattr__(name: str) -> int: ... [builtins fixtures/module.pyi] [case testModuleLevelGetattrImportFromNotStub] from non_stub import name reveal_type(name) [file non_stub.py] from typing import Any def __getattr__(name: str) -> Any: ... [out] tmp/non_stub.py:2: error: __getattr__ is not valid at the module level outside a stub file main:1: error: Module 'non_stub' has no attribute 'name' main:2: error: Revealed type is 'Any' [builtins fixtures/module.pyi] [case testModuleLevelGetattrImportFromAs] from has_attr import name as n reveal_type(name) reveal_type(n) [file has_attr.pyi] from typing import Any def __getattr__(name: str) -> Any: ... [out] main:2: error: Revealed type is 'Any' main:2: error: Name 'name' is not defined main:3: error: Revealed type is 'Any' [builtins fixtures/module.pyi] mypy-0.560/test-data/unit/check-multiple-inheritance.test0000644€tŠÔÚ€2›s®0000001405513215007205027637 0ustar jukkaDROPBOX\Domain Users00000000000000-- Test cases for multiple inheritance. -- -- Related: check-abstract.test -- No name collisions -- ------------------ [case testSimpleMultipleInheritanceAndMethods] import typing class A: def f(self, x: int) -> None: pass class B: def g(self, x: str) -> None: pass class C(A, B): pass c = C() c.f(1) c.f('') # E: Argument 1 to "f" of "A" has incompatible type "str"; expected "int" c.g('') c.g(1) # E: Argument 1 to "g" of "B" has incompatible type "int"; expected "str" [case testSimpleMultipleInheritanceAndMethods2] import typing class A: def f(self, x: int) -> None: pass class B: def g(self, x): pass class C(A, B): pass c = C() c.f(1) c.f('') # E: Argument 1 to "f" of "A" has incompatible type "str"; expected "int" c.g('') c.g(1) [case testSimpleMultipleInheritanceAndInstanceVariables] import typing class A: def f(self) -> None: self.x = 1 class B: def g(self) -> None: self.y = '' class C(A, B): pass c = C() c.x = 1 c.x = '' # E: Incompatible types in assignment (expression has type "str", variable has type "int") c.y = '' c.y = 1 # E: Incompatible types in assignment (expression has type "int", variable has type "str") [case testSimpleMultipleInheritanceAndInstanceVariableInClassBody] import typing class A: x = 1 class B: y = '' class C(A, B): pass c = C() c.x = 1 c.x = '' # E: Incompatible types in assignment (expression has type "str", variable has type "int") c.y = '' c.y = 1 # E: Incompatible types in assignment (expression has type "int", variable has type "str") [case testSimpleMultipleInheritanceAndClassVariable] import typing class A: x = 1 class B: y = '' class C(A, B): pass C.x = 1 C.x = '' # E: Incompatible types in assignment (expression has type "str", variable has type "int") C.y = '' C.y = 1 # E: Incompatible types in assignment (expression has type "int", variable has type "str") -- Name collisions -- --------------- [case testMethodNameCollisionInMultipleInheritanceWithValidSigs] import typing class A: def f(self, x: int) -> None: pass class B: def f(self, x: int) -> None: pass class C(A, B): pass c = C() c.f(1) c.f('') # E: Argument 1 to "f" of "A" has incompatible type "str"; expected "int" [case testInstanceVarNameOverlapInMultipleInheritanceWithCompatibleTypes] import typing class A: def f(self) -> None: self.x = 1 class B: def g(self) -> None: self.x = 1 class C(A, B): pass c = C() c.x = 1 c.x = '' # E: Incompatible types in assignment (expression has type "str", variable has type "int") [case testClassVarNameOverlapInMultipleInheritanceWithCompatibleTypes] import typing class A: x = 1 class B: x = 1 class C(A, B): pass c = C() c.x = 1 c.x = '' # E: Incompatible types in assignment (expression has type "str", variable has type "int") C.x = 1 C.x = '' # E: Incompatible types in assignment (expression has type "str", variable has type "int") [case testMethodNameCollisionInMultipleInheritanceWithIncompatibleSigs] import typing class A: def f(self, x: int) -> None: pass class B: def f(self, x: str) -> None: pass class C(A, B): pass [out] main:6: error: Definition of "f" in base class "A" is incompatible with definition in base class "B" [case testMethodNameCollisionInMultipleInheritanceWithIncompatibleSigs2] import typing class A: def f(self, x: int) -> None: pass class B: def f(self, x, y): pass class C(A, B): pass class D(B, A): pass [out] main:6: error: Definition of "f" in base class "A" is incompatible with definition in base class "B" main:7: error: Definition of "f" in base class "B" is incompatible with definition in base class "A" [case testMethodOverridingWithBothDynamicallyAndStaticallyTypedMethods] class A: def f(self) -> int: pass class B: def f(self): pass class C(B, A): pass class D(A, B): pass [out] [case testInstanceVarNameOverlapInMultipleInheritanceWithInvalidTypes] import typing class A: def f(self) -> None: self.x = 1 class B: def g(self) -> None: self.x = '' class C(A, B): pass [out] main:8: error: Definition of "x" in base class "A" is incompatible with definition in base class "B" [case testClassVarNameOverlapInMultipleInheritanceWithInvalidTypes] import typing class A: x = 1 class B: x = '' class C(A, B): pass [out] main:6: error: Definition of "x" in base class "A" is incompatible with definition in base class "B" [case testMethodOverlapsWithClassVariableInMultipleInheritance] from typing import Callable class A: def f(self) -> None: pass class B: f = '' class C(A, B): pass [out] main:6: error: Definition of "f" in base class "A" is incompatible with definition in base class "B" [case testMethodOverlapsWithInstanceVariableInMultipleInheritance] from typing import Callable class A: def f(self) -> None: pass class B: def g(self) -> None: self.f = '' class C(A, B): pass [out] main:7: error: Definition of "f" in base class "A" is incompatible with definition in base class "B" [case testMultipleInheritanceAndInit] import typing class A: def __init__(self, x: int) -> None: pass class B: def __init__(self) -> None: pass class C(A, B): pass [case testMultipleInheritanceAndDifferentButCompatibleSignatures] class A: def clear(self): pass class B: def clear(self, x=None): pass class C(B, A): pass class D(A, B): pass [out] main:8: error: Definition of "clear" in base class "A" is incompatible with definition in base class "B" -- Special cases -- ------------- [case testGenericInheritanceAndOverridingWithMultipleInheritance] from typing import Generic, TypeVar T = TypeVar('T') class G(Generic[T]): def f(self, s: int) -> 'G[T]': pass class A(G[int]): def f(self, s: int) -> 'A': pass class B(A, int): pass [case testCannotDetermineTypeInMultipleInheritance] from typing import Callable, TypeVar T = TypeVar('T') class A(B, C): def f(self): pass class B: @dec def f(self): pass class C: @dec def f(self): pass def dec(f: Callable[..., T]) -> Callable[..., T]: return f [out] main:3: error: Cannot determine type of 'f' in base class 'B' main:3: error: Cannot determine type of 'f' in base class 'C' mypy-0.560/test-data/unit/check-namedtuple.test0000644€tŠÔÚ€2›s®0000004577613215007205025671 0ustar jukkaDROPBOX\Domain Users00000000000000[case testNamedTupleUsedAsTuple] from collections import namedtuple X = namedtuple('X', ['x', 'y']) x = None # type: X a, b = x b = x[0] a = x[1] a, b, c = x # E: Need more than 2 values to unpack (3 expected) x[2] # E: Tuple index out of range [case testNamedTupleWithTupleFieldNamesUsedAsTuple] from collections import namedtuple X = namedtuple('X', ('x', 'y')) x = None # type: X a, b = x b = x[0] a = x[1] a, b, c = x # E: Need more than 2 values to unpack (3 expected) x[2] # E: Tuple index out of range [case testNamedTupleNoUnderscoreFields] from collections import namedtuple X = namedtuple('X', 'x, _y, _z') # E: namedtuple() field names cannot start with an underscore: _y, _z [case testNamedTupleAccessingAttributes] from collections import namedtuple X = namedtuple('X', ['x', 'y']) x = None # type: X x.x x.y x.z # E: "X" has no attribute "z" [case testNamedTupleAttributesAreReadOnly] from collections import namedtuple X = namedtuple('X', ['x', 'y']) x = None # type: X x.x = 5 # E: Property "x" defined in "X" is read-only x.y = 5 # E: Property "y" defined in "X" is read-only x.z = 5 # E: "X" has no attribute "z" class A(X): pass a = None # type: A a.x = 5 # E: Property "x" defined in "A" is read-only a.y = 5 # E: Property "y" defined in "A" is read-only -- a.z = 5 # not supported yet [case testNamedTupleCreateWithPositionalArguments] from collections import namedtuple X = namedtuple('X', ['x', 'y']) x = X(1, 'x') x.x x.z # E: "X" has no attribute "z" x = X(1) # E: Too few arguments for "X" x = X(1, 2, 3) # E: Too many arguments for "X" [case testCreateNamedTupleWithKeywordArguments] from collections import namedtuple X = namedtuple('X', ['x', 'y']) x = X(x=1, y='x') x = X(1, y='x') x = X(x=1, z=1) # E: Unexpected keyword argument "z" for "X" x = X(y=1) # E: Missing positional argument "x" in call to "X" [case testNamedTupleCreateAndUseAsTuple] from collections import namedtuple X = namedtuple('X', ['x', 'y']) x = X(1, 'x') a, b = x a, b, c = x # E: Need more than 2 values to unpack (3 expected) [case testNamedTupleWithItemTypes] from typing import NamedTuple N = NamedTuple('N', [('a', int), ('b', str)]) n = N(1, 'x') s = n.a # type: str # E: Incompatible types in assignment (expression has type "int", \ variable has type "str") i = n.b # type: int # E: Incompatible types in assignment (expression has type "str", \ variable has type "int") x, y = n x = y # E: Incompatible types in assignment (expression has type "str", variable has type "int") [case testNamedTupleWithTupleFieldNamesWithItemTypes] from typing import NamedTuple N = NamedTuple('N', (('a', int), ('b', str))) n = N(1, 'x') s = n.a # type: str # E: Incompatible types in assignment (expression has type "int", \ variable has type "str") i = n.b # type: int # E: Incompatible types in assignment (expression has type "str", \ variable has type "int") x, y = n x = y # E: Incompatible types in assignment (expression has type "str", variable has type "int") [case testNamedTupleConstructorArgumentTypes] from typing import NamedTuple N = NamedTuple('N', [('a', int), ('b', str)]) n = N('x', 'x') # E: Argument 1 to "N" has incompatible type "str"; expected "int" n = N(1, b=2) # E: Argument 2 to "N" has incompatible type "int"; expected "str" N(1, 'x') N(b='x', a=1) [case testNamedTupleAsBaseClass] from typing import NamedTuple N = NamedTuple('N', [('a', int), ('b', str)]) class X(N): pass x = X(1, 2) # E: Argument 2 to "X" has incompatible type "int"; expected "str" s = '' i = 0 s = x.a # E: Incompatible types in assignment (expression has type "int", variable has type "str") i, s = x s, s = x # E: Incompatible types in assignment (expression has type "int", variable has type "str") [case testNamedTupleAsBaseClass2] from typing import NamedTuple class X(NamedTuple('N', [('a', int), ('b', str)])): pass x = X(1, 2) # E: Argument 2 to "X" has incompatible type "int"; expected "str" s = '' i = 0 s = x.a # E: Incompatible types in assignment (expression has type "int", variable has type "str") i, s = x s, s = x # E: Incompatible types in assignment (expression has type "int", variable has type "str") [case testNamedTuplesTwoAsBaseClasses] from typing import NamedTuple A = NamedTuple('A', [('a', int)]) B = NamedTuple('B', [('a', int)]) class X(A, B): # E: Class has two incompatible bases derived from tuple pass [case testNamedTuplesTwoAsBaseClasses2] from typing import NamedTuple A = NamedTuple('A', [('a', int)]) class X(A, NamedTuple('B', [('a', int)])): # E: Class has two incompatible bases derived from tuple pass [case testNamedTupleSelfTypeWithNamedTupleAsBase] from typing import NamedTuple A = NamedTuple('A', [('a', int), ('b', str)]) class B(A): def f(self, x: int) -> None: self.f(self.a) self.f(self.b) # E: Argument 1 to "f" of "B" has incompatible type "str"; expected "int" i = 0 s = '' i, s = self i, i = self # E: Incompatible types in assignment (expression has type "str", \ variable has type "int") [out] [case testNamedTupleTypeReferenceToClassDerivedFrom] from typing import NamedTuple A = NamedTuple('A', [('a', int), ('b', str)]) class B(A): def f(self, x: 'B') -> None: i = 0 s = '' self = x i, s = x i, s = x.a, x.b i, s = x.a, x.a # E: Incompatible types in assignment (expression has type "int", \ variable has type "str") i, i = self # E: Incompatible types in assignment (expression has type "str", \ variable has type "int") [out] [case testNamedTupleSubtyping] from typing import NamedTuple, Tuple A = NamedTuple('A', [('a', int), ('b', str)]) class B(A): pass a = A(1, '') b = B(1, '') t = None # type: Tuple[int, str] b = a # E: Incompatible types in assignment (expression has type "A", variable has type "B") a = t # E: Incompatible types in assignment (expression has type "Tuple[int, str]", variable has type "A") b = t # E: Incompatible types in assignment (expression has type "Tuple[int, str]", variable has type "B") t = a t = (1, '') t = b a = b [case testNamedTupleSimpleTypeInference] from typing import NamedTuple, Tuple A = NamedTuple('A', [('a', int)]) l = [A(1), A(2)] a = A(1) a = l[0] (i,) = l[0] i, i = l[0] # E: Need more than 1 value to unpack (2 expected) l = [A(1)] a = (1,) # E: Incompatible types in assignment (expression has type "Tuple[int]", \ variable has type "A") [builtins fixtures/list.pyi] [case testNamedTupleMissingClassAttribute] import collections MyNamedTuple = collections.namedtuple('MyNamedTuple', ['spam', 'eggs']) MyNamedTuple.x # E: "Type[MyNamedTuple]" has no attribute "x" [case testNamedTupleEmptyItems] from typing import NamedTuple A = NamedTuple('A', []) [case testNamedTupleProperty] from typing import NamedTuple A = NamedTuple('A', [('a', int)]) class B(A): @property def b(self) -> int: return self.a class C(B): pass B(1).b C(2).b [builtins fixtures/property.pyi] [case testNamedTupleAsDict] from collections import namedtuple X = namedtuple('X', ['x', 'y']) x = None # type: X reveal_type(x._asdict()) # E: Revealed type is 'builtins.dict[builtins.str, Any]' [builtins fixtures/dict.pyi] [case testNamedTupleReplace] from collections import namedtuple X = namedtuple('X', ['x', 'y']) x = None # type: X reveal_type(x._replace()) # E: Revealed type is 'Tuple[Any, Any, fallback=__main__.X]' x._replace(y=5) x._replace(x=3) x._replace(x=3, y=5) x._replace(z=5) # E: Unexpected keyword argument "z" for "_replace" of "X" x._replace(5) # E: Too many positional arguments for "_replace" of "X" [case testNamedTupleReplaceAsClass] from collections import namedtuple X = namedtuple('X', ['x', 'y']) x = None # type: X X._replace(x, x=1, y=2) X._replace(x=1, y=2) # E: Missing positional argument "self" in call to "_replace" of "X" [case testNamedTupleReplaceTyped] from typing import NamedTuple X = NamedTuple('X', [('x', int), ('y', str)]) x = None # type: X reveal_type(x._replace()) # E: Revealed type is 'Tuple[builtins.int, builtins.str, fallback=__main__.X]' x._replace(x=5) x._replace(y=5) # E: Argument 1 to "_replace" of "X" has incompatible type "int"; expected "str" [case testNamedTupleMake] from typing import NamedTuple X = NamedTuple('X', [('x', int), ('y', str)]) reveal_type(X._make([5, 'a'])) # E: Revealed type is 'Tuple[builtins.int, builtins.str, fallback=__main__.X]' X._make('a b') # E: Argument 1 to "_make" of "X" has incompatible type "str"; expected "Iterable[Any]" -- # FIX: not a proper class method -- x = None # type: X -- reveal_type(x._make([5, 'a'])) # E: Revealed type is 'Tuple[builtins.int, builtins.str, fallback=__main__.X]' -- x._make('a b') # E: Argument 1 to "_make" of "X" has incompatible type "str"; expected Iterable[Any] [builtins fixtures/list.pyi] [case testNamedTupleFields] from typing import NamedTuple X = NamedTuple('X', [('x', int), ('y', str)]) reveal_type(X._fields) # E: Revealed type is 'Tuple[builtins.str, builtins.str]' [case testNamedTupleSource] from typing import NamedTuple X = NamedTuple('X', [('x', int), ('y', str)]) reveal_type(X._source) # E: Revealed type is 'builtins.str' x = None # type: X reveal_type(x._source) # E: Revealed type is 'builtins.str' [case testNamedTupleUnit] from typing import NamedTuple X = NamedTuple('X', []) x = X() # type: X x._replace() x._fields[0] # E: Tuple index out of range [case testNamedTupleJoinNamedTuple] from typing import NamedTuple X = NamedTuple('X', [('x', int), ('y', str)]) Y = NamedTuple('Y', [('x', int), ('y', str)]) reveal_type([X(3, 'b'), Y(1, 'a')]) # E: Revealed type is 'builtins.list[Tuple[builtins.int, builtins.str]]' [builtins fixtures/list.pyi] [case testNamedTupleJoinTuple] from typing import NamedTuple, Tuple X = NamedTuple('X', [('x', int), ('y', str)]) reveal_type([(3, 'b'), X(1, 'a')]) # E: Revealed type is 'builtins.list[Tuple[builtins.int, builtins.str]]' reveal_type([X(1, 'a'), (3, 'b')]) # E: Revealed type is 'builtins.list[Tuple[builtins.int, builtins.str]]' [builtins fixtures/list.pyi] [case testNamedTupleFieldTypes] from typing import NamedTuple X = NamedTuple('X', [('x', int), ('y', str)]) reveal_type(X._field_types) # E: Revealed type is 'builtins.dict[builtins.str, Any]' x = None # type: X reveal_type(x._field_types) # E: Revealed type is 'builtins.dict[builtins.str, Any]' [builtins fixtures/dict.pyi] [case testNamedTupleAndOtherSuperclass] from typing import NamedTuple class A: pass def f(x: A) -> None: pass class B(NamedTuple('B', []), A): pass f(B()) x = None # type: A x = B() # Sanity check: fail if baseclass does not match class C: pass def g(x: C) -> None: pass class D(NamedTuple('D', []), A): pass g(D()) # E: Argument 1 to "g" has incompatible type "D"; expected "C" y = None # type: C y = D() # E: Incompatible types in assignment (expression has type "D", variable has type "C") [case testNamedTupleSelfTypeMethod] from typing import TypeVar, NamedTuple T = TypeVar('T', bound='A') class A(NamedTuple('A', [('x', str)])): def member(self: T) -> T: return self class B(A): pass a = None # type: A a = A('').member() b = None # type: B b = B('').member() a = B('') a = B('').member() [case testNamedTupleSelfTypeReplace] from typing import NamedTuple, TypeVar A = NamedTuple('A', [('x', str)]) reveal_type(A('hello')._replace(x='')) # E: Revealed type is 'Tuple[builtins.str, fallback=__main__.A]' a = None # type: A a = A('hello')._replace(x='') class B(A): pass reveal_type(B('hello')._replace(x='')) # E: Revealed type is 'Tuple[builtins.str, fallback=__main__.B]' b = None # type: B b = B('hello')._replace(x='') [case testNamedTupleSelfTypeMake] from typing import NamedTuple, TypeVar A = NamedTuple('A', [('x', str)]) reveal_type(A._make([''])) # E: Revealed type is 'Tuple[builtins.str, fallback=__main__.A]' a = A._make(['']) # type: A class B(A): pass reveal_type(B._make([''])) # E: Revealed type is 'Tuple[builtins.str, fallback=__main__.B]' b = B._make(['']) # type: B [builtins fixtures/list.pyi] [case testNamedTupleIncompatibleRedefinition] from typing import NamedTuple class Crash(NamedTuple): count: int # E: Incompatible types in assignment (expression has type "int", base class "tuple" defined the type as "Callable[[Tuple[int, ...], Any], int]") [builtins fixtures/tuple.pyi] [case testNamedTupleInClassNamespace] # https://github.com/python/mypy/pull/2553#issuecomment-266474341 from typing import NamedTuple class C: def f(self): A = NamedTuple('A', [('x', int)]) def g(self): A = NamedTuple('A', [('y', int)]) C.A # E: "Type[C]" has no attribute "A" [case testNamedTupleInFunction] from typing import NamedTuple def f() -> None: A = NamedTuple('A', [('x', int)]) A # E: Name 'A' is not defined [case testNamedTupleForwardAsUpperBound] from typing import NamedTuple, TypeVar, Generic T = TypeVar('T', bound='M') class G(Generic[T]): x: T yb: G[int] # E: Type argument "builtins.int" of "G" must be a subtype of "Tuple[builtins.int, fallback=__main__.M]" yg: G[M] reveal_type(G[M]().x.x) # E: Revealed type is 'builtins.int' reveal_type(G[M]().x[0]) # E: Revealed type is 'builtins.int' M = NamedTuple('M', [('x', int)]) [out] [case testNamedTupleWithImportCycle] import a [file a.py] from collections import namedtuple from b import f N = namedtuple('N', 'a') class X(N): pass [file b.py] import a def f(x: a.X) -> None: reveal_type(x) x = a.X(1) reveal_type(x) [out] tmp/b.py:4: error: Revealed type is 'Tuple[Any, fallback=a.X]' tmp/b.py:6: error: Revealed type is 'Tuple[Any, fallback=a.X]' [case testNamedTupleWithImportCycle2] import a [file a.py] from collections import namedtuple from b import f N = namedtuple('N', 'a') [file b.py] import a def f(x: a.N) -> None: reveal_type(x) x = a.N(1) reveal_type(x) [out] tmp/b.py:4: error: Revealed type is 'Tuple[Any, fallback=a.N]' tmp/b.py:6: error: Revealed type is 'Tuple[Any, fallback=a.N]' [case testSimpleSelfReferrentialNamedTuple] from typing import NamedTuple class MyNamedTuple(NamedTuple): parent: 'MyNamedTuple' def bar(nt: MyNamedTuple) -> MyNamedTuple: return nt x: MyNamedTuple reveal_type(x.parent) [out] main:2: error: Recursive types not fully supported yet, nested types replaced with "Any" main:9: error: Revealed type is 'Tuple[Any, fallback=__main__.MyNamedTuple]' -- Some crazy selef-referential named tuples and types dicts -- to be sure that everything works [case testCrossFileNamedTupleForwardRefs] import a [file a.py] import b from typing import Any, NamedTuple class A: def a(self, b: 'b.B') -> str: return 'a' ATuple = NamedTuple('ATuple', [('a', Any)]) [file b.py] import a class B: def b(self, a: 'a.A') -> str: return 'b' def aWithTuple(self, atuple: 'a.ATuple') -> str: return 'a' [out] [case testSelfRefNT1] from typing import Tuple, NamedTuple Node = NamedTuple('Node', [ # E: Recursive types not fully supported yet, nested types replaced with "Any" ('name', str), ('children', Tuple['Node', ...]), ]) n: Node reveal_type(n) # E: Revealed type is 'Tuple[builtins.str, builtins.tuple[Tuple[builtins.str, builtins.tuple[Any], fallback=__main__.Node]], fallback=__main__.Node]' [builtins fixtures/tuple.pyi] [case testSelfRefNT2] from typing import Tuple, NamedTuple A = NamedTuple('A', [ # E ('x', str), ('y', Tuple['B', ...]), ]) class B(NamedTuple): # E x: A y: int n: A reveal_type(n) # E: Revealed type is 'Tuple[builtins.str, builtins.tuple[Tuple[Tuple[builtins.str, builtins.tuple[Any], fallback=__main__.A], builtins.int, fallback=__main__.B]], fallback=__main__.A]' [builtins fixtures/tuple.pyi] [out] main:3: error: Recursive types not fully supported yet, nested types replaced with "Any" main:7: error: Recursive types not fully supported yet, nested types replaced with "Any" [case testSelfRefNT3] from typing import NamedTuple, Tuple class B(NamedTuple): # E x: Tuple[A, int] y: int A = NamedTuple('A', [ # E: Recursive types not fully supported yet, nested types replaced with "Any" ('x', str), ('y', 'B'), ]) n: B m: A reveal_type(n.x) # E: Revealed type is 'Tuple[Tuple[builtins.str, Tuple[Tuple[Any, builtins.int], builtins.int, fallback=__main__.B], fallback=__main__.A], builtins.int]' reveal_type(m[0]) # E: Revealed type is 'builtins.str' lst = [m, n] reveal_type(lst[0]) # E: Revealed type is 'Tuple[builtins.object, builtins.object]' [builtins fixtures/tuple.pyi] [out] main:3: error: Recursive types not fully supported yet, nested types replaced with "Any" [case testSelfRefNT4] from typing import NamedTuple class B(NamedTuple): # E x: A y: int class A(NamedTuple): # E x: str y: B n: A reveal_type(n.y[0]) # E: Revealed type is 'Tuple[builtins.str, Tuple[Any, builtins.int, fallback=__main__.B], fallback=__main__.A]' [builtins fixtures/tuple.pyi] [out] main:3: error: Recursive types not fully supported yet, nested types replaced with "Any" main:7: error: Recursive types not fully supported yet, nested types replaced with "Any" [case testSelfRefNT5] from typing import NamedTuple B = NamedTuple('B', [ # E: Recursive types not fully supported yet, nested types replaced with "Any" ('x', A), ('y', int), ]) A = NamedTuple('A', [ # E: Recursive types not fully supported yet, nested types replaced with "Any" ('x', str), ('y', 'B'), ]) n: A def f(m: B) -> None: pass reveal_type(n) # E: Revealed type is 'Tuple[builtins.str, Tuple[Tuple[builtins.str, Tuple[Any, builtins.int, fallback=__main__.B], fallback=__main__.A], builtins.int, fallback=__main__.B], fallback=__main__.A]' reveal_type(f) # E: Revealed type is 'def (m: Tuple[Tuple[builtins.str, Tuple[Any, builtins.int, fallback=__main__.B], fallback=__main__.A], builtins.int, fallback=__main__.B])' [builtins fixtures/tuple.pyi] [case testRecursiveNamedTupleInBases] from typing import List, NamedTuple, Union Exp = Union['A', 'B'] # E: Recursive types not fully supported yet, nested types replaced with "Any" class A(NamedTuple('A', [('attr', List[Exp])])): pass class B(NamedTuple('B', [('val', object)])): pass def my_eval(exp: Exp) -> int: reveal_type(exp) # E: Revealed type is 'Union[Tuple[builtins.list[Any], fallback=__main__.A], Tuple[builtins.object, fallback=__main__.B]]' if isinstance(exp, A): my_eval(exp[0][0]) return my_eval(exp.attr[0]) if isinstance(exp, B): return exp.val # E: Incompatible return value type (got "object", expected "int") my_eval(A([B(1), B(2)])) # OK [builtins fixtures/isinstancelist.pyi] [out] [case testForwardReferenceInNamedTuple] from typing import NamedTuple class A(NamedTuple): b: 'B' x: int class B: pass [case testTypeNamedTupleClassmethod] from typing import Type, NamedTuple class D(NamedTuple): @classmethod def f(cls) -> None: pass d: Type[D] d.g() # E: "Type[D]" has no attribute "g" d.f() [builtins fixtures/classmethod.pyi] mypy-0.560/test-data/unit/check-newsyntax.test0000644€tŠÔÚ€2›s®0000001071713215007205025556 0ustar jukkaDROPBOX\Domain Users00000000000000[case testNewSyntaxRequire36] # flags: --python-version 3.5 x: int = 5 # E: Variable annotation syntax is only supported in Python 3.6 and greater [out] [case testNewSyntaxSyntaxError] # flags: --python-version 3.6 x: int: int # E: invalid syntax [out] [case testNewSyntaxBasics] # flags: --python-version 3.6 x: int x = 5 y: int = 5 a: str a = 5 # E: Incompatible types in assignment (expression has type "int", variable has type "str") b: str = 5 # E: Incompatible types in assignment (expression has type "int", variable has type "str") zzz: int zzz: str # E: Name 'zzz' already defined [out] [case testNewSyntaxWithDict] # flags: --python-version 3.6 from typing import Dict, Any d: Dict[int, str] = {} d[42] = 'ab' d[42] = 42 # E: Incompatible types in assignment (expression has type "int", target has type "str") d['ab'] = 'ab' # E: Invalid index type "str" for "Dict[int, str]"; expected type "int" [builtins fixtures/dict.pyi] [out] [case testNewSyntaxWithRevealType] # flags: --python-version 3.6 from typing import Dict def tst_local(dct: Dict[int, T]) -> Dict[T, int]: ret: Dict[T, int] = {} return ret reveal_type(tst_local({1: 'a'})) # E: Revealed type is 'builtins.dict[builtins.str*, builtins.int]' [builtins fixtures/dict.pyi] [out] [case testNewSyntaxWithInstanceVars] # flags: --python-version 3.6 class TstInstance: a: str def __init__(self) -> None: self.x: int TstInstance().x = 5 TstInstance().x = 'ab' # E: Incompatible types in assignment (expression has type "str", variable has type "int") TstInstance().a = 5 # E: Incompatible types in assignment (expression has type "int", variable has type "str") TstInstance().a = 'ab' [out] [case testNewSyntaxWithClassVars] # flags: --strict-optional --python-version 3.6 class CCC: a: str = None # E: Incompatible types in assignment (expression has type "None", variable has type "str") [out] [case testNewSyntaxWithStrictOptional] # flags: --strict-optional --python-version 3.6 strict: int strict = None # E: Incompatible types in assignment (expression has type "None", variable has type "int") strict2: int = None # E: Incompatible types in assignment (expression has type "None", variable has type "int") [out] [case testNewSyntaxWithStrictOptionalFunctions] # flags: --strict-optional --python-version 3.6 def f() -> None: x: int x = None # E: Incompatible types in assignment (expression has type "None", variable has type "int") [out] [case testNewSyntaxWithStrictOptionalClasses] # flags: --strict-optional --python-version 3.6 class C: def meth(self) -> None: x: int = None # E: Incompatible types in assignment (expression has type "None", variable has type "int") self.x: int = None # E: Incompatible types in assignment (expression has type "None", variable has type "int") [out] [case testNewSyntaxSpecialAssign] # flags: --python-version 3.6 class X: x: str x[0]: int x.x: int [out] main:4: error: Unexpected type declaration main:4: error: Unsupported target for indexed assignment main:5: error: Type cannot be declared in assignment to non-self attribute main:5: error: "str" has no attribute "x" [case testNewSyntaxAsyncComprehensionError] # flags: --python-version 3.5 async def f(): results = [i async for i in aiter() if i % 2] # E: Async comprehensions are only supported in Python 3.6 and greater [case testNewSyntaxFstringError] # flags: --python-version 3.5 f'' # E: Format strings are only supported in Python 3.6 and greater [case testNewSyntaxFStringBasics] # flags: --python-version 3.6 f'foobar' f'{"foobar"}' f'foo{"bar"}' f'.{1}.' f'{type(1)}' a: str a = f'foobar' a = f'{"foobar"}' [builtins fixtures/f_string.pyi] [case testNewSyntaxFStringExpressionsOk] # flags: --python-version 3.6 f'.{1 + 1}.' f'.{1 + 1}.{"foo" + "bar"}' [builtins fixtures/f_string.pyi] [case testNewSyntaxFStringExpressionsErrors] # flags: --python-version 3.6 f'{1 + ""}' f'.{1 + ""}' [builtins fixtures/f_string.pyi] [out] main:2: error: Unsupported operand types for + ("int" and "str") main:3: error: Unsupported operand types for + ("int" and "str") [case testNewSyntaxFStringParseFormatOptions] # flags: --python-version 3.6 value = 10.5142 width = 10 precision = 4 f'result: {value:{width}.{precision}}' [builtins fixtures/f_string.pyi] [case testNewSyntaxFStringSingleField] # flags: --python-version 3.6 v = 1 reveal_type(f'{v}') # E: Revealed type is 'builtins.str' reveal_type(f'{1}') # E: Revealed type is 'builtins.str' [builtins fixtures/f_string.pyi] mypy-0.560/test-data/unit/check-newtype.test0000644€tŠÔÚ€2›s®0000002211213215007205025201 0ustar jukkaDROPBOX\Domain Users00000000000000-- Checks NewType(...) -- Checks for basic functionality [case testNewTypePEP484Example1] from typing import NewType UserId = NewType('UserId', int) def name_by_id(user_id: UserId) -> str: return "foo" UserId('user') # E: Argument 1 to "UserId" has incompatible type "str"; expected "int" name_by_id(42) # E: Argument 1 to "name_by_id" has incompatible type "int"; expected "UserId" name_by_id(UserId(42)) id = UserId(5) num = id + 1 reveal_type(id) # E: Revealed type is '__main__.UserId' reveal_type(num) # E: Revealed type is 'builtins.int' [out] [case testNewTypePEP484Example2] from typing import NewType class PacketId: def __init__(self, major: int, minor: int) -> None: self._major = major self._minor = minor TcpPacketId = NewType('TcpPacketId', PacketId) packet = PacketId(100, 100) tcp_packet = TcpPacketId(packet) tcp_packet = TcpPacketId(127, 0) [out] main:12: error: Too many arguments for "TcpPacketId" main:12: error: Argument 1 to "TcpPacketId" has incompatible type "int"; expected "PacketId" [case testNewTypeWithTuples] from typing import NewType, Tuple TwoTuple = NewType('TwoTuple', Tuple[int, str]) a = TwoTuple((3, "a")) b = TwoTuple(("a", 3)) # E: Argument 1 to "TwoTuple" has incompatible type "Tuple[str, int]"; expected "Tuple[int, str]" reveal_type(a[0]) # E: Revealed type is 'builtins.int' reveal_type(a[1]) # E: Revealed type is 'builtins.str' [builtins fixtures/tuple.pyi] [out] [case testNewTypeWithLists] from typing import NewType, List UserId = NewType('UserId', int) IdList = NewType('IdList', List[UserId]) bad1 = IdList([1]) # E: List item 0 has incompatible type "int"; expected "UserId" foo = IdList([]) foo.append(3) # E: Argument 1 to "append" of "list" has incompatible type "int"; expected "UserId" foo.append(UserId(3)) foo.extend([UserId(1), UserId(2), UserId(3)]) foo.extend(IdList([UserId(1), UserId(2), UserId(3)])) bar = IdList([UserId(2)]) baz = foo + bar reveal_type(foo) # E: Revealed type is '__main__.IdList' reveal_type(bar) # E: Revealed type is '__main__.IdList' reveal_type(baz) # E: Revealed type is 'builtins.list[__main__.UserId*]' [builtins fixtures/list.pyi] [out] [case testNewTypeWithGenerics] from typing import TypeVar, Generic, NewType, Any T = TypeVar('T') class Base(Generic[T]): def __init__(self, item: T) -> None: self.item = item def getter(self) -> T: return self.item Derived1 = NewType('Derived1', Base[str]) Derived2 = NewType('Derived2', Base) # Implicit 'Any' Derived3 = NewType('Derived3', Base[Any]) # Explicit 'Any' Derived1(Base(1)) # E: Argument 1 to "Base" has incompatible type "int"; expected "str" Derived1(Base('a')) Derived2(Base(1)) Derived2(Base('a')) Derived3(Base(1)) Derived3(Base('a')) reveal_type(Derived1(Base('a')).getter()) # E: Revealed type is 'builtins.str*' reveal_type(Derived3(Base('a')).getter()) # E: Revealed type is 'Any' [out] [case testNewTypeWithNamedTuple] from collections import namedtuple from typing import NewType, NamedTuple Vector1 = namedtuple('Vector1', ['x', 'y']) Point1 = NewType('Point1', Vector1) p1 = Point1(Vector1(1, 2)) reveal_type(p1.x) # E: Revealed type is 'Any' reveal_type(p1.y) # E: Revealed type is 'Any' Vector2 = NamedTuple('Vector2', [('x', int), ('y', int)]) Point2 = NewType('Point2', Vector2) p2 = Point2(Vector2(1, 2)) reveal_type(p2.x) # E: Revealed type is 'builtins.int' reveal_type(p2.y) # E: Revealed type is 'builtins.int' class Vector3: def __init__(self, x: int, y: int) -> None: self.x = x self.y = y Point3 = NewType('Point3', Vector3) p3 = Point3(Vector3(1, 3)) reveal_type(p3.x) # E: Revealed type is 'builtins.int' reveal_type(p3.y) # E: Revealed type is 'builtins.int' [out] [case testNewTypeWithCasts] from typing import NewType, cast UserId = NewType('UserId', int) foo = UserId(3) foo = cast(UserId, 3) foo = cast(UserId, "foo") foo = cast(UserId, UserId(4)) [out] [case testNewTypeWithTypeAliases] from typing import NewType Foo = int Bar = NewType('Bar', Foo) Bar2 = Bar def func1(x: Foo) -> Bar: return Bar(x) def func2(x: int) -> Bar: return Bar(x) def func3(x: Bar2) -> Bar: return x x = Bar(42) y = Bar2(42) y = func3(x) [out] [case testNewTypeWithNewType] from typing import NewType A = NewType('A', int) B = NewType('B', A) C = A D = C E = NewType('E', D) a = A(1) b = B(a) e = E(a) def funca(a: A) -> None: ... def funcb(b: B) -> None: ... funca(a) funca(b) funca(e) funcb(a) # E: Argument 1 to "funcb" has incompatible type "A"; expected "B" funcb(b) funcb(e) # E: Argument 1 to "funcb" has incompatible type "E"; expected "B" [out] -- Make sure NewType works as expected in a variety of different scopes/across files [case testNewTypeInLocalScope] from typing import NewType A = NewType('A', int) a = A(3) def func() -> None: A = NewType('A', str) B = NewType('B', str) a = A(3) # E: Argument 1 to "A" has incompatible type "int"; expected "str" a = A('xyz') b = B('xyz') class MyClass: C = NewType('C', float) def foo(self) -> 'MyClass.C': return MyClass.C(3.2) b = A(3) c = MyClass.C(3.5) [out] [case testNewTypeInMultipleFiles] import a import b list1 = [a.UserId(1), a.UserId(2)] list1.append(b.UserId(3)) # E: Argument 1 to "append" of "list" has incompatible type "b.UserId"; expected "a.UserId" [file a.py] from typing import NewType UserId = NewType('UserId', int) [file b.py] from typing import NewType UserId = NewType('UserId', int) [builtins fixtures/list.pyi] [out] [case testNewTypeWithIncremental] import m [file m.py] from typing import NewType UserId = NewType('UserId', int) def name_by_id(user_id: UserId) -> str: return "foo" name_by_id(UserId(42)) id = UserId(5) num = id + 1 [file m.py.2] from typing import NewType UserId = NewType('UserId', int) def name_by_id(user_id: UserId) -> str: return "foo" name_by_id(UserId(42)) id = UserId(5) num = id + 1 reveal_type(id) reveal_type(num) [rechecked m] [stale] [out1] [out2] tmp/m.py:13: error: Revealed type is 'm.UserId' tmp/m.py:14: error: Revealed type is 'builtins.int' -- Check misuses of NewType fail [case testNewTypeBadInitializationFails] from typing import NewType a = NewType('b', int) # E: String argument 1 'b' to NewType(...) does not match variable name 'a' b = NewType('b', 3) # E: Argument 2 to NewType(...) must be a valid type c = NewType(2, int) # E: Argument 1 to NewType(...) must be a string literal foo = "d" d = NewType(foo, int) # E: Argument 1 to NewType(...) must be a string literal e = NewType(name='e', tp=int) # E: NewType(...) expects exactly two positional arguments f = NewType('f', tp=int) # E: NewType(...) expects exactly two positional arguments [out] [case testNewTypeWithAnyFails] from typing import NewType, Any A = NewType('A', Any) # E: Argument 2 to NewType(...) must be subclassable (got "Any") [out] [case testNewTypeWithUnionsFails] from typing import NewType, Union Foo = NewType('Foo', Union[int, float]) # E: Argument 2 to NewType(...) must be subclassable (got "Union[int, float]") [out] [case testNewTypeWithTypeTypeFails] from typing import NewType, Type Foo = NewType('Foo', Type[int]) # E: Argument 2 to NewType(...) must be subclassable (got "Type[int]") a = Foo(type(3)) [builtins fixtures/args.pyi] [out] [case testNewTypeWithTypeVarsFails] from typing import NewType, TypeVar, List T = TypeVar('T') A = NewType('A', T) B = NewType('B', List[T]) [builtins fixtures/list.pyi] [out] main:3: error: Argument 2 to NewType(...) must be subclassable (got T?) main:3: error: Invalid type "__main__.T" main:4: error: Invalid type "__main__.T" [case testNewTypeRedefiningVariablesFails] from typing import NewType a = 3 a = NewType('a', int) b = NewType('b', int) b = NewType('b', float) # this line throws two errors c = NewType('c', str) # type: str [out] main:4: error: Cannot redefine 'a' as a NewType main:7: error: Cannot assign to a type main:7: error: Cannot redefine 'b' as a NewType main:9: error: Cannot declare the type of a NewType declaration [case testNewTypeAddingExplicitTypesFails] from typing import NewType UserId = NewType('UserId', int) a = 3 # type: UserId # E: Incompatible types in assignment (expression has type "int", variable has type "UserId") [out] [case testNewTypeTestSubclassingFails] from typing import NewType class A: pass B = NewType('B', A) class C(B): pass # E: Cannot subclass NewType [out] [case testCannotUseNewTypeWithProtocols] from typing import Protocol, NewType class P(Protocol): attr: int class D: attr: int C = NewType('C', P) # E: NewType cannot be used with protocol classes x: C = C(D()) # We still accept this, treating 'C' as non-protocol subclass. reveal_type(x.attr) # E: Revealed type is 'builtins.int' x.bad_attr # E: "C" has no attribute "bad_attr" C(1) # E: Argument 1 to "C" has incompatible type "int"; expected "P" [out] [case testNewTypeAny] from typing import NewType Any = NewType('Any', int) Any(5) [case testNewTypeAndIsInstance] from typing import NewType T = NewType('T', int) d: object if isinstance(d, T): # E: Cannot use isinstance() with a NewType type reveal_type(d) # E: Revealed type is '__main__.T' [builtins fixtures/isinstancelist.pyi] mypy-0.560/test-data/unit/check-optional.test0000644€tŠÔÚ€2›s®0000004363613215007205025351 0ustar jukkaDROPBOX\Domain Users00000000000000-- Tests for strict Optional behavior [case testImplicitNoneType] x = None x() # E: "None" not callable [case testExplicitNoneType] x = None # type: None x() # E: "None" not callable [case testNoneMemberOfOptional] from typing import Optional x = None # type: Optional[int] [case testTypeMemberOfOptional] from typing import Optional x = 0 # type: Optional[int] [case testNoneNotMemberOfType] x = None # type: int [out] main:1: error: Incompatible types in assignment (expression has type "None", variable has type "int") [case testTypeNotMemberOfNone] x = 0 # type: None [out] main:1: error: Incompatible types in assignment (expression has type "int", variable has type "None") [case testOptionalNotMemberOfType] from typing import Optional def f(a: int) -> None: pass x = None # type: Optional[int] f(x) # E: Argument 1 to "f" has incompatible type "Optional[int]"; expected "int" [case testIsinstanceCases] from typing import Optional x = None # type: Optional[int] if isinstance(x, int): reveal_type(x) # E: Revealed type is 'builtins.int' else: reveal_type(x) # E: Revealed type is 'builtins.None' [builtins fixtures/isinstance.pyi] [case testIfCases] from typing import Optional x = None # type: Optional[int] if x: reveal_type(x) # E: Revealed type is 'builtins.int' else: reveal_type(x) # E: Revealed type is 'Union[builtins.int, builtins.None]' [builtins fixtures/bool.pyi] [case testIfNotCases] from typing import Optional x = None # type: Optional[int] if not x: reveal_type(x) # E: Revealed type is 'Union[builtins.int, builtins.None]' else: reveal_type(x) # E: Revealed type is 'builtins.int' [builtins fixtures/bool.pyi] [case testIsNotNoneCases] from typing import Optional x = None # type: Optional[int] if x is not None: reveal_type(x) # E: Revealed type is 'builtins.int' else: reveal_type(x) # E: Revealed type is 'builtins.None' [builtins fixtures/bool.pyi] [case testIsNoneCases] from typing import Optional x = None # type: Optional[int] if x is None: reveal_type(x) # E: Revealed type is 'builtins.None' else: reveal_type(x) # E: Revealed type is 'builtins.int' reveal_type(x) # E: Revealed type is 'Union[builtins.int, builtins.None]' [builtins fixtures/bool.pyi] [case testAnyCanBeNone] from typing import Optional, Any x = None # type: Any if x is None: reveal_type(x) # E: Revealed type is 'builtins.None' else: reveal_type(x) # E: Revealed type is 'Any' [builtins fixtures/bool.pyi] [case testOrCases] from typing import Optional x = None # type: Optional[str] y1 = x or 'a' reveal_type(y1) # E: Revealed type is 'builtins.str' y2 = x or 1 reveal_type(y2) # E: Revealed type is 'Union[builtins.str, builtins.int]' z1 = 'a' or x reveal_type(z1) # E: Revealed type is 'Union[builtins.str, builtins.None]' z2 = int() or x reveal_type(z2) # E: Revealed type is 'Union[builtins.int, builtins.str, builtins.None]' [case testAndCases] from typing import Optional x = None # type: Optional[str] y1 = x and 'b' reveal_type(y1) # E: Revealed type is 'Union[builtins.str, builtins.None]' y2 = x and 1 # x could be '', so... reveal_type(y2) # E: Revealed type is 'Union[builtins.str, builtins.None, builtins.int]' z1 = 'b' and x reveal_type(z1) # E: Revealed type is 'Union[builtins.str, builtins.None]' z2 = int() and x reveal_type(z2) # E: Revealed type is 'Union[builtins.int, builtins.str, builtins.None]' [case testLambdaReturningNone] f = lambda: None x = f() # E: Function does not return a value [case testNoneArgumentType] def f(x: None) -> None: pass f(None) [case testInferOptionalFromDefaultNone] def f(x: int = None) -> None: x + 1 # E: Unsupported left operand type for + (some union) f(None) [out] [case testNoInferOptionalFromDefaultNone] # flags: --no-implicit-optional def f(x: int = None) -> None: # E: Incompatible default for argument "x" (default has type "None", argument has type "int") pass [out] [case testInferOptionalFromDefaultNoneComment] def f(x=None): # type: (int) -> None x + 1 # E: Unsupported left operand type for + (some union) f(None) [out] [case testNoInferOptionalFromDefaultNoneComment] # flags: --no-implicit-optional def f(x=None): # E: Incompatible default for argument "x" (default has type "None", argument has type "int") # type: (int) -> None pass [out] [case testInferOptionalType] x = None if bool(): # scope limit assignment x = 1 # in scope of the assignment, x is an int reveal_type(x) # E: Revealed type is 'builtins.int' # out of scope of the assignment, it's an Optional[int] reveal_type(x) # E: Revealed type is 'Union[builtins.int, builtins.None]' [builtins fixtures/bool.pyi] [case testInferOptionalTypeLocallyBound] x = None x = 1 reveal_type(x) # E: Revealed type is 'builtins.int' [case testInferOptionalAnyType] from typing import Any x = None a = None # type: Any if bool(): x = a reveal_type(x) # E: Revealed type is 'Any' reveal_type(x) # E: Revealed type is 'Union[Any, builtins.None]' [builtins fixtures/bool.pyi] [case testInferOptionalTypeFromOptional] from typing import Optional y = None # type: Optional[int] x = None x = y reveal_type(x) # E: Revealed type is 'Union[builtins.int, builtins.None]' [case testInferOptionalListType] x = [None] x.append(1) # E: Argument 1 to "append" of "list" has incompatible type "int"; expected "None" [builtins fixtures/list.pyi] [case testInferNonOptionalListType] x = [] x.append(1) x() # E: "List[int]" not callable [builtins fixtures/list.pyi] [case testInferOptionalDictKeyValueTypes] x = {None: None} x["bar"] = 1 [builtins fixtures/dict.pyi] [out] main:2: error: Invalid index type "str" for "Dict[None, None]"; expected type "None" main:2: error: Incompatible types in assignment (expression has type "int", target has type "None") [case testInferNonOptionalDictType] x = {} x["bar"] = 1 x() # E: "Dict[str, int]" not callable [builtins fixtures/dict.pyi] [case testNoneClassVariable] from typing import Optional class C: x = None # type: int def __init__(self) -> None: self.x = 0 [case testNoneClassVariableInInit] from typing import Optional class C: x = None # type: int def __init__(self) -> None: self.x = None # E: Incompatible types in assignment (expression has type "None", variable has type "int") [out] [case testMultipleAssignmentNoneClassVariableInInit] from typing import Optional class C: x, y = None, None # type: int, str def __init__(self) -> None: self.x = None # E: Incompatible types in assignment (expression has type "None", variable has type "int") self.y = None # E: Incompatible types in assignment (expression has type "None", variable has type "str") [out] [case testOverloadWithNone] from foo import * [file foo.pyi] from typing import overload @overload def f(x: None) -> str: pass @overload def f(x: int) -> int: pass reveal_type(f(None)) # E: Revealed type is 'builtins.str' reveal_type(f(0)) # E: Revealed type is 'builtins.int' [case testOptionalTypeOrTypePlain] from typing import Optional def f(a: Optional[int]) -> int: return a or 0 [out] [case testOptionalTypeOrTypeTypeVar] from typing import Optional, TypeVar T = TypeVar('T') def f(a: Optional[T], b: T) -> T: return a or b [out] [case testOptionalTypeOrTypeBothOptional] from typing import Optional def f(a: Optional[int], b: Optional[int]) -> None: reveal_type(a or b) def g(a: int, b: Optional[int]) -> None: reveal_type(a or b) [out] main:3: error: Revealed type is 'Union[builtins.int, builtins.None]' main:5: error: Revealed type is 'Union[builtins.int, builtins.None]' [case testOptionalTypeOrTypeComplexUnion] from typing import Union def f(a: Union[int, str, None]) -> None: reveal_type(a or 'default') [out] main:3: error: Revealed type is 'Union[builtins.int, builtins.str]' [case testOptionalTypeOrTypeNoTriggerPlain] from typing import Optional def f(a: Optional[int], b: int) -> int: return b or a [out] main:3: error: Incompatible return value type (got "Optional[int]", expected "int") [case testOptionalTypeOrTypeNoTriggerTypeVar] from typing import Optional, TypeVar T = TypeVar('T') def f(a: Optional[T], b: T) -> T: return b or a [out] main:4: error: Incompatible return value type (got "Optional[T]", expected "T") [case testNoneOrStringIsString] def f() -> str: a = None b = '' return a or b [out] [case testNoneOrTypeVarIsTypeVar] from typing import TypeVar T = TypeVar('T') def f(b: T) -> T: a = None return a or b [out] [case testYieldNothingInFunctionReturningGenerator] from typing import Generator def f() -> Generator[None, None, None]: yield [out] [case testNoneAndStringIsNone] a = None b = "foo" reveal_type(a and b) # E: Revealed type is 'builtins.None' [case testNoneMatchesObjectInOverload] import a a.f(None) [file a.pyi] from typing import overload @overload def f() -> None: ... @overload def f(o: object) -> None: ... [case testGenericSubclassReturningNone] from typing import Generic, TypeVar T = TypeVar('T') class Base(Generic[T]): def f(self) -> T: pass class SubNone(Base[None]): def f(self) -> None: pass class SubInt(Base[int]): def f(self) -> int: return 1 [case testUseOfNoneReturningFunction] from typing import Optional def f() -> None: pass def g(x: Optional[int]) -> int: pass x = f() # E: "f" does not return a value f() + 1 # E: "f" does not return a value g(f()) # E: "f" does not return a value [case testEmptyReturn] def f() -> None: return [case testReturnNone] def f() -> None: return None [case testNoneCallable] from typing import Callable def f() -> None: pass x = f # type: Callable[[], None] [case testOptionalCallable] from typing import Callable, Optional T = Optional[Callable[..., None]] [case testAnyTypeInPartialTypeList] # flags: --check-untyped-defs def f(): ... def lookup_field(name, obj): try: pass except: attr = f() else: attr = None [case testTernaryWithNone] reveal_type(None if bool() else 0) # E: Revealed type is 'Union[builtins.int, builtins.None]' [builtins fixtures/bool.pyi] [case testListWithNone] reveal_type([0, None, 0]) # E: Revealed type is 'builtins.list[Union[builtins.int, builtins.None]]' [builtins fixtures/list.pyi] [case testOptionalWhitelistSuppressesOptionalErrors] # flags: --strict-optional-whitelist import a import b [file a.py] from typing import Optional x = None # type: Optional[str] x + "foo" [file b.py] from typing import Optional x = None # type: Optional[int] x + 1 [case testOptionalWhitelistPermitsOtherErrors] # flags: --strict-optional-whitelist import a import b [file a.py] from typing import Optional x = None # type: Optional[str] x + "foo" [file b.py] from typing import Optional x = None # type: Optional[int] x + 1 1 + "foo" [out] tmp/b.py:4: error: Unsupported operand types for + ("int" and "str") [case testOptionalWhitelistPermitsWhitelistedFiles] # flags: --strict-optional-whitelist **/a.py import a import b [file a.py] from typing import Optional x = None # type: Optional[str] x + "foo" [file b.py] from typing import Optional x = None # type: Optional[int] x + 1 [out] tmp/a.py:3: error: Unsupported left operand type for + (some union) [case testNoneContextInference] from typing import Dict, List def f() -> List[None]: return [] def g() -> Dict[None, None]: return {} [builtins fixtures/dict.pyi] [case testRaiseFromNone] raise BaseException from None [builtins fixtures/exception.pyi] [case testOptionalNonPartialTypeWithNone] from typing import Generator def f() -> Generator[str, None, None]: pass x = f() reveal_type(x) # E: Revealed type is 'typing.Generator[builtins.str, builtins.None, builtins.None]' l = [f()] reveal_type(l) # E: Revealed type is 'builtins.list[typing.Generator*[builtins.str, builtins.None, builtins.None]]' [builtins fixtures/list.pyi] [case testNoneListTernary] x = [None] if "" else [1] # E: List item 0 has incompatible type "int"; expected "None" [builtins fixtures/list.pyi] [case testListIncompatibleErrorMessage] from typing import List, Callable def foo(l: List[Callable[[], str]]) -> None: pass def f() -> int: return 42 foo([f]) # E: List item 0 has incompatible type "Callable[[], int]"; expected "Callable[[], str]" [builtins fixtures/list.pyi] [case testInferEqualsNotOptional] from typing import Optional x = '' # type: Optional[str] if x == '': reveal_type(x) # E: Revealed type is 'builtins.str' else: reveal_type(x) # E: Revealed type is 'Union[builtins.str, builtins.None]' [builtins fixtures/ops.pyi] [case testInferEqualsNotOptionalWithUnion] from typing import Union x = '' # type: Union[str, int, None] if x == '': reveal_type(x) # E: Revealed type is 'Union[builtins.str, builtins.int]' else: reveal_type(x) # E: Revealed type is 'Union[builtins.str, builtins.int, builtins.None]' [builtins fixtures/ops.pyi] [case testInferEqualsNotOptionalWithOverlap] from typing import Union x = '' # type: Union[str, int, None] if x == object(): reveal_type(x) # E: Revealed type is 'Union[builtins.str, builtins.int]' else: reveal_type(x) # E: Revealed type is 'Union[builtins.str, builtins.int, builtins.None]' [builtins fixtures/ops.pyi] [case testInferEqualsStillOptionalWithNoOverlap] from typing import Optional x = '' # type: Optional[str] if x == 0: reveal_type(x) # E: Revealed type is 'Union[builtins.str, builtins.None]' else: reveal_type(x) # E: Revealed type is 'Union[builtins.str, builtins.None]' [builtins fixtures/ops.pyi] [case testInferEqualsStillOptionalWithBothOptional] from typing import Union x = '' # type: Union[str, int, None] y = '' # type: Union[str, None] if x == y: reveal_type(x) # E: Revealed type is 'Union[builtins.str, builtins.int, builtins.None]' else: reveal_type(x) # E: Revealed type is 'Union[builtins.str, builtins.int, builtins.None]' [builtins fixtures/ops.pyi] [case testWarnNoReturnWorksWithStrictOptional] # flags: --warn-no-return def f() -> None: 1 + 1 # no error def g() -> int: 1 + 1 # [out] main:5: error: Missing return statement [case testGenericTypeAliasesOptional] from typing import TypeVar, Generic, Optional T = TypeVar('T') class Node(Generic[T]): def __init__(self, x: T) -> None: self.x = x ONode = Optional[Node[T]] def f(x: T) -> ONode[T]: if 1 > 0: return Node(x) else: return None x = None # type: ONode[int] x = f(1) x = f('x') # E: Argument 1 to "f" has incompatible type "str"; expected "int" x.x = 1 # E: Item "None" of "Optional[Node[int]]" has no attribute "x" if x is not None: x.x = 1 # OK here [builtins fixtures/ops.pyi] [case testOptionalTypeNarrowedInBooleanStatement] from typing import Optional x: Optional[int] = None x is not None and x + 42 x is not None and x + '42' # E: Unsupported operand types for + ("int" and "str") [builtins fixtures/isinstance.pyi] [case testInvalidBooleanBranchIgnored] from typing import Optional x = None x is not None and x + 42 [builtins fixtures/isinstance.pyi] [case testOptionalLambdaInference] from typing import Optional, Callable f = None # type: Optional[Callable[[int], None]] f = lambda x: None f(0) [builtins fixtures/function.pyi] [case testDontSimplifyNoneUnionsWithStrictOptional] from typing import Any, TypeVar, Union A = None # type: Any class C(A): pass T = TypeVar('T') S = TypeVar('S') def u(x: T, y: S) -> Union[S, T]: pass a = None # type: Any # Test both orders reveal_type(u(C(), None)) # E: Revealed type is 'Union[builtins.None, __main__.C*]' reveal_type(u(None, C())) # E: Revealed type is 'Union[__main__.C*, builtins.None]' reveal_type(u(a, None)) # E: Revealed type is 'Union[builtins.None, Any]' reveal_type(u(None, a)) # E: Revealed type is 'Union[Any, builtins.None]' reveal_type(u(1, None)) # E: Revealed type is 'Union[builtins.None, builtins.int*]' reveal_type(u(None, 1)) # E: Revealed type is 'Union[builtins.int*, builtins.None]' [case testOptionalAndAnyBaseClass] from typing import Any, Optional A = None # type: Any class C(A): pass x = None # type: Optional[C] x.foo() # E: Item "None" of "Optional[C]" has no attribute "foo" [case testIsinstanceAndOptionalAndAnyBase] from typing import Any, Optional B = None # type: Any class A(B): pass def f(a: Optional[A]): reveal_type(a) # E: Revealed type is 'Union[__main__.A, builtins.None]' if a is not None: reveal_type(a) # E: Revealed type is '__main__.A' else: reveal_type(a) # E: Revealed type is 'builtins.None' reveal_type(a) # E: Revealed type is 'Union[__main__.A, builtins.None]' [builtins fixtures/isinstance.pyi] [case testFlattenOptionalUnion] from typing import Optional, Union x: Optional[Union[int, str]] reveal_type(x) # E: Revealed type is 'Union[builtins.int, builtins.str, builtins.None]' y: Optional[Union[int, None]] reveal_type(y) # E: Revealed type is 'Union[builtins.int, builtins.None]' [case testOverloadWithNoneAndOptional] from typing import overload, Optional @overload def f(x: int) -> str: ... @overload def f(x: Optional[int]) -> Optional[str]: ... def f(x): return x reveal_type(f(1)) # E: Revealed type is 'builtins.str' reveal_type(f(None)) # E: Revealed type is 'Union[builtins.str, builtins.None]' x: Optional[int] reveal_type(f(x)) # E: Revealed type is 'Union[builtins.str, builtins.None]' [case testUnionTruthinessTracking] from typing import Optional, Any def test_or_shortcut(value: Optional[Any]) -> None: if not value: pass if not value or value.get('foo') == 'hello': pass [builtins fixtures/bool.pyi] [case testNarrowingFromObjectToOptional] from typing import Optional x: object y: Optional[int] x = y reveal_type(x) # E: Revealed type is 'Union[builtins.int, builtins.None]' [out] [case testNarrowOptionalOutsideLambda] from typing import Optional class A: a: int def f(x: Optional[A]) -> None: assert x lambda: x.a [builtins fixtures/isinstancelist.pyi] [case testNarrowOptionalOutsideLambdaWithDeferred] from typing import Optional class A: a: int def f(self, x: Optional['A']) -> None: assert x lambda: (self.y, x.a) # E: Cannot determine type of 'y' self.y = int() [builtins fixtures/isinstancelist.pyi] mypy-0.560/test-data/unit/check-overloading.test0000644€tŠÔÚ€2›s®0000010310213215007205026016 0ustar jukkaDROPBOX\Domain Users00000000000000-- Test cases for function overloading [case testOverloadNotImportedNoCrash] @overload def f(a): pass @overload def f(a): pass def f(a): pass f(0) @overload # E: Name 'overload' is not defined def g(a:int): pass def g(a): pass # E: Name 'g' already defined g(0) @something # E: Name 'something' is not defined def r(a:int): pass def r(a): pass # E: Name 'r' already defined r(0) [out] main:1: error: Name 'overload' is not defined main:3: error: Name 'f' already defined main:3: error: Name 'overload' is not defined main:5: error: Name 'f' already defined [case testTypeCheckOverloadWithImplementation] from typing import overload, Any @overload def f(x: 'A') -> 'B': ... @overload def f(x: 'B') -> 'A': ... def f(x: Any) -> Any: pass reveal_type(f(A())) # E: Revealed type is '__main__.B' reveal_type(f(B())) # E: Revealed type is '__main__.A' class A: pass class B: pass [builtins fixtures/isinstance.pyi] [case testOverloadNeedsImplementation] from typing import overload, Any @overload # E: An overloaded function outside a stub file must have an implementation def f(x: 'A') -> 'B': ... @overload def f(x: 'B') -> 'A': ... reveal_type(f(A())) # E: Revealed type is '__main__.B' reveal_type(f(B())) # E: Revealed type is '__main__.A' class A: pass class B: pass [builtins fixtures/isinstance.pyi] [case testSingleOverloadNoImplementation] from typing import overload, Any @overload # E: Single overload definition, multiple required def f(x: 'A') -> 'B': ... class A: pass class B: pass [builtins fixtures/isinstance.pyi] [case testOverloadByAnyOtherName] from typing import overload as rose from typing import Any @rose def f(x: 'A') -> 'B': ... @rose def f(x: 'B') -> 'A': ... def f(x: Any) -> Any: pass reveal_type(f(A())) # E: Revealed type is '__main__.B' reveal_type(f(B())) # E: Revealed type is '__main__.A' class A: pass class B: pass [builtins fixtures/isinstance.pyi] [case testTypeCheckOverloadWithDecoratedImplementation] from typing import overload, Any def deco(fun): ... @overload def f(x: 'A') -> 'B': ... @overload def f(x: 'B') -> 'A': ... @deco def f(x: Any) -> Any: pass reveal_type(f(A())) # E: Revealed type is '__main__.B' reveal_type(f(B())) # E: Revealed type is '__main__.A' class A: pass class B: pass [builtins fixtures/isinstance.pyi] [case testOverloadDecoratedImplementationNotLast] from typing import overload, Any def deco(fun): ... @overload def f(x: 'A') -> 'B': ... @deco # E: The implementation for an overloaded function must come last def f(x: Any) -> Any: pass @overload def f(x: 'B') -> 'A': ... class A: pass class B: pass [builtins fixtures/isinstance.pyi] [case testOverloadImplementationNotLast] from typing import overload, Any @overload def f(x: 'A') -> 'B': ... def f(x: Any) -> Any: # E: The implementation for an overloaded function must come last pass @overload def f(x: 'B') -> 'A': ... class A: pass class B: pass [builtins fixtures/isinstance.pyi] [case testDecoratedRedefinitionIsNotOverload] from typing import overload, Any def deco(fun): ... @deco def f(x: 'A') -> 'B': ... @deco # E: Name 'f' already defined def f(x: 'B') -> 'A': ... @deco # E: Name 'f' already defined def f(x: Any) -> Any: ... class A: pass class B: pass [builtins fixtures/isinstance.pyi] [case testTypeCheckOverloadWithImplementationPy2] # flags: --python-version 2.7 from typing import overload @overload def f(x): # type: (A) -> B pass @overload def f(x): # type: (B) -> A pass def f(x): pass reveal_type(f(A())) # E: Revealed type is '__main__.B' reveal_type(f(B())) # E: Revealed type is '__main__.A' class A: pass class B: pass [builtins fixtures/isinstance.pyi] [case testTypeCheckOverloadWithImplementationError] from typing import overload, Any @overload def f(x: 'A') -> 'B': ... @overload def f(x: 'B') -> 'A': ... def f(x: Any) -> Any: foo = 1 foo = "bar" # E: Incompatible types in assignment (expression has type "str", variable has type "int") @overload def g(x: 'A') -> 'B': ... @overload def g(x: 'B') -> 'A': ... def g(x): foo = 1 foo = "bar" reveal_type(f(A())) # E: Revealed type is '__main__.B' reveal_type(f(B())) # E: Revealed type is '__main__.A' class A: pass class B: pass [builtins fixtures/isinstance.pyi] [case testTypeCheckOverloadWithImplTooSpecificArg] from typing import overload, Any class A: pass class B: pass a = A() @overload def f(x: 'A') -> 'B': ... @overload def f(x: 'B') -> 'A': ... def f(x: 'A') -> Any: # E: Overloaded function implementation does not accept all possible arguments of signature 2 pass reveal_type(f(A())) # E: Revealed type is '__main__.B' reveal_type(f(B())) # E: Revealed type is '__main__.A' [builtins fixtures/isinstance.pyi] [case testTypeCheckOverloadWithImplTooSpecificRetType] from typing import overload, Any class A: pass class B: pass a = A() @overload def f(x: 'A') -> 'B': ... @overload def f(x: 'B') -> 'A': ... def f(x: Any) -> 'B': # E: Overloaded function implementation cannot produce return type of signature 2 return B() reveal_type(f(A())) # E: Revealed type is '__main__.B' reveal_type(f(B())) # E: Revealed type is '__main__.A' [builtins fixtures/isinstance.pyi] [case testTypeCheckOverloadWithImplTypeVar] from typing import overload, Any, TypeVar T = TypeVar('T') class A: pass class B: pass a = A() @overload def f(x: 'A') -> 'A': ... @overload def f(x: 'B') -> 'B': ... def f(x: T) -> T: ... reveal_type(f(A())) # E: Revealed type is '__main__.A' reveal_type(f(B())) # E: Revealed type is '__main__.B' [builtins fixtures/isinstance.pyi] [case testTypeCheckOverloadWithImplTypeVarProblems] from typing import overload, Any, TypeVar T = TypeVar('T', bound='A') class A: pass class B: pass a = A() @overload def f(x: 'A') -> 'A': ... @overload def f(x: 'B') -> 'B': ... def f(x: Any) -> T: # E: Type variable mismatch between overload signature 2 and implementation ... reveal_type(f(A())) # E: Revealed type is '__main__.A' reveal_type(f(B())) # E: Revealed type is '__main__.B' [builtins fixtures/isinstance.pyi] [case testTypeCheckOverloadedFunctionBody] from foo import * [file foo.pyi] from typing import overload @overload def f(x: 'A'): x = B() # E: Incompatible types in assignment (expression has type "B", variable has type "A") x = A() @overload def f(x: 'B'): x = A() # E: Incompatible types in assignment (expression has type "A", variable has type "B") x = B() class A: pass class B: pass [out] [case testTypeCheckOverloadedMethodBody] from foo import * [file foo.pyi] from typing import overload class A: @overload def f(self, x: 'A'): x = B() # E: Incompatible types in assignment (expression has type "B", variable has type "A") x = A() @overload def f(self, x: 'B'): x = A() # E: Incompatible types in assignment (expression has type "A", variable has type "B") x = B() class B: pass [out] [case testCallToOverloadedFunction] from foo import * [file foo.pyi] from typing import overload f(C()) # E: No overload variant of "f" matches argument types [foo.C] f(A()) f(B()) @overload def f(x: 'A') -> None: pass @overload def f(x: 'B') -> None: pass class A: pass class B: pass class C: pass [case testOverloadedFunctionReturnValue] from foo import * [file foo.pyi] from typing import overload a, b = None, None # type: (A, B) b = f(a) # E: Incompatible types in assignment (expression has type "A", variable has type "B") a = f(b) # E: Incompatible types in assignment (expression has type "B", variable has type "A") a = f(a) b = f(b) @overload def f(x: 'A') -> 'A': pass @overload def f(x: 'B') -> 'B': pass class A: pass class B: pass [case testCallToOverloadedMethod] from foo import * [file foo.pyi] from typing import overload A().f(C()) # E: No overload variant of "f" of "A" matches argument types [foo.C] A().f(A()) A().f(B()) class A: @overload def f(self, x: 'A') -> None: pass @overload def f(self, x: 'B') -> None: pass class B: pass class C: pass [case testOverloadedMethodReturnValue] from foo import * [file foo.pyi] from typing import overload a, b = None, None # type: (A, B) b = a.f(a) # E: Incompatible types in assignment (expression has type "A", variable has type "B") a = a.f(b) # E: Incompatible types in assignment (expression has type "B", variable has type "A") a = a.f(a) b = a.f(b) class A: @overload def f(self, x: 'A') -> 'A': pass @overload def f(self, x: 'B') -> 'B': pass class B: pass [case testOverloadsWithDifferentArgumentCounts] from foo import * [file foo.pyi] from typing import overload a, b = None, None # type: (A, B) a = f(a) b = f(a) # E: Incompatible types in assignment (expression has type "A", variable has type "B") f(b) # E: No overload variant of "f" matches argument types [foo.B] b = f(b, a) a = f(b, a) # E: Incompatible types in assignment (expression has type "B", variable has type "A") f(a, a) # E: No overload variant of "f" matches argument types [foo.A, foo.A] f(b, b) # E: No overload variant of "f" matches argument types [foo.B, foo.B] @overload def f(x: 'A') -> 'A': pass @overload def f(x: 'B', y: 'A') -> 'B': pass class A: pass class B: pass [case testGenericOverloadVariant] from foo import * [file foo.pyi] from typing import overload, TypeVar, Generic t = TypeVar('t') ab, ac, b, c = None, None, None, None # type: (A[B], A[C], B, C) b = f(ab) c = f(ac) b = f(ac) # E: Incompatible types in assignment (expression has type "C", variable has type "B") b = f(b) c = f(b) # E: Incompatible types in assignment (expression has type "B", variable has type "C") @overload def f(x: 'A[t]') -> t: pass @overload def f(x: 'B') -> 'B': pass class A(Generic[t]): pass class B: pass class C: pass [case testOverloadedInit] from foo import * [file foo.pyi] from typing import overload a, b = None, None # type: (A, B) a = A(a) a = A(b) a = A(object()) # E: No overload variant of "A" matches argument types [builtins.object] class A: @overload def __init__(self, a: 'A') -> None: pass @overload def __init__(self, b: 'B') -> None: pass class B: pass [case testIntersectionTypeCompatibility] from foo import * [file foo.pyi] from typing import overload, Callable o = None # type: object a = None # type: A a = f # E: Incompatible types in assignment (expression has type overloaded function, variable has type "A") o = f @overload def f(a: 'A') -> None: pass @overload def f(a: Callable[[], None]) -> None: pass class A: pass [case testCompatibilityOfIntersectionTypeObjectWithStdType] from foo import * [file foo.pyi] from typing import overload t, a = None, None # type: (type, A) a = A # E: Incompatible types in assignment (expression has type "Type[A]", variable has type "A") t = A class A: @overload def __init__(self, a: 'A') -> None: pass @overload def __init__(self, a: 'B') -> None: pass class B: pass [case testOverloadedGetitem] from foo import * [file foo.pyi] from typing import overload a, b = None, None # type: int, str a = A()[a] b = A()[a] # E: Incompatible types in assignment (expression has type "int", variable has type "str") b = A()[b] a = A()[b] # E: Incompatible types in assignment (expression has type "str", variable has type "int") class A: @overload def __getitem__(self, a: int) -> int: pass @overload def __getitem__(self, b: str) -> str: pass [case testOverloadedGetitemWithGenerics] from foo import * [file foo.pyi] from typing import TypeVar, Generic, overload t = TypeVar('t') a, b, c = None, None, None # type: (A, B, C[A]) a = c[a] b = c[a] # E: Incompatible types in assignment (expression has type "A", variable has type "B") a = c[b] b = c[b] # E: Incompatible types in assignment (expression has type "A", variable has type "B") class C(Generic[t]): @overload def __getitem__(self, a: 'A') -> t: pass @overload def __getitem__(self, b: 'B') -> t: pass class A: pass class B: pass [case testImplementingOverloadedMethod] from foo import * [file foo.pyi] from typing import overload from abc import abstractmethod, ABCMeta class I(metaclass=ABCMeta): @overload @abstractmethod def f(self) -> None: pass @overload @abstractmethod def f(self, a: 'A') -> None: pass class A(I): @overload def f(self) -> None: pass @overload def f(self, a: 'A') -> None: pass [case testOverloadWithFunctionType] from foo import * [file foo.pyi] from typing import overload, Callable class A: pass @overload def f(x: A) -> None: pass @overload def f(x: Callable[[], None]) -> None: pass f(A()) [builtins fixtures/function.pyi] [case testVarArgsOverload] from foo import * [file foo.pyi] from typing import overload, Any @overload def f(x: 'A', *more: Any) -> 'A': pass @overload def f(x: 'B', *more: Any) -> 'A': pass f(A()) f(A(), A, A) f(B()) f(B(), B) f(B(), B, B) f(object()) # E: No overload variant of "f" matches argument types [builtins.object] class A: pass class B: pass [builtins fixtures/list.pyi] [case testVarArgsOverload2] from foo import * [file foo.pyi] from typing import overload @overload def f(x: 'A', *more: 'B') -> 'A': pass @overload def f(x: 'B', *more: 'A') -> 'A': pass f(A(), B()) f(A(), B(), B()) f(A(), A(), B()) # E: No overload variant of "f" matches argument types [foo.A, foo.A, foo.B] f(A(), B(), A()) # E: No overload variant of "f" matches argument types [foo.A, foo.B, foo.A] class A: pass class B: pass [builtins fixtures/list.pyi] [case testOverloadWithTypeObject] from foo import * [file foo.pyi] from typing import overload @overload def f(a: 'A', t: type) -> None: pass @overload def f(a: 'B', t: type) -> None: pass f(A(), B) f(B(), A) class A: pass class B: pass [builtins fixtures/function.pyi] [case testOverloadedInitAndTypeObjectInOverload] from foo import * [file foo.pyi] from typing import overload @overload def f(t: type) -> 'A': pass @overload def f(t: 'A') -> 'B': pass a, b = None, None # type: (A, B) a = f(A) b = f(a) b = f(A) # E: Incompatible types in assignment (expression has type "A", variable has type "B") a = f(a) # E: Incompatible types in assignment (expression has type "B", variable has type "A") class A: @overload def __init__(self) -> None: pass @overload def __init__(self, a: 'A') -> None: pass class B: pass [case testOverlappingErasedSignatures] from foo import * [file foo.pyi] from typing import overload, List @overload def f(a: List[int]) -> int: pass @overload def f(a: List[str]) -> int: pass list_int = [] # type: List[int] list_str = [] # type: List[str] list_object = [] # type: List[object] n = f(list_int) m = f(list_str) n = 1 m = 1 n = 'x' # E: Incompatible types in assignment (expression has type "str", variable has type "int") m = 'x' # E: Incompatible types in assignment (expression has type "str", variable has type "int") f(list_object) # E: Argument 1 to "f" has incompatible type "List[object]"; expected "List[int]" [builtins fixtures/list.pyi] [case testOverlappingOverloadSignatures] from foo import * [file foo.pyi] from typing import overload class A: pass class B(A): pass @overload def f(x: B) -> int: pass # E: Overloaded function signatures 1 and 2 overlap with incompatible return types @overload def f(x: A) -> str: pass [case testContravariantOverlappingOverloadSignatures] from foo import * [file foo.pyi] from typing import overload class A: pass class B(A): pass @overload def f(x: A) -> A: pass @overload def f(x: B) -> B: pass # This is more specific than the first item, and thus # will never be called. [case testPartiallyCovariantOverlappingOverloadSignatures] from foo import * [file foo.pyi] from typing import overload class A: pass class B(A): pass @overload def f(x: B) -> A: pass # E: Overloaded function signatures 1 and 2 overlap with incompatible return types @overload def f(x: A) -> B: pass [case testPartiallyContravariantOverloadSignatures] from foo import * [file foo.pyi] from typing import overload class A: pass class B(A): pass @overload def g(x: A) -> int: pass # Fine, since A us supertype of B. @overload def g(x: B) -> str: pass [case testCovariantOverlappingOverloadSignatures] from foo import * [file foo.pyi] from typing import overload class A: pass class B(A): pass @overload def g(x: B) -> B: pass @overload def g(x: A) -> A: pass [case testCovariantOverlappingOverloadSignaturesWithSomeSameArgTypes] from foo import * [file foo.pyi] from typing import overload class A: pass class B(A): pass @overload def g(x: int, y: B) -> B: pass @overload def g(x: int, y: A) -> A: pass [case testCovariantOverlappingOverloadSignaturesWithAnyType] from foo import * [file foo.pyi] from typing import Any, overload @overload def g(x: int) -> int: pass @overload def g(x: Any) -> Any: pass [case testContravariantOverlappingOverloadSignaturesWithAnyType] from foo import * [file foo.pyi] from typing import Any, overload @overload def g(x: Any) -> Any: pass # E: Overloaded function signatures 1 and 2 overlap with incompatible return types @overload def g(x: int) -> int: pass [case testOverloadedLtAndGtMethods] from foo import * [file foo.pyi] from typing import overload class A: def __lt__(self, x: A) -> int: pass def __gt__(self, x: A) -> int: pass class B: @overload def __lt__(self, x: B) -> int: pass @overload def __lt__(self, x: A) -> int: pass @overload def __gt__(self, x: B) -> int: pass @overload def __gt__(self, x: A) -> int: pass A() < A() A() < B() B() < A() B() < B() A() < object() # E: Unsupported operand types for < ("A" and "object") B() < object() # E: No overload variant of "__lt__" of "B" matches argument types [builtins.object] [case testOverloadedForwardMethodAndCallingReverseMethod] from foo import * [file foo.pyi] from typing import overload class A: @overload def __add__(self, x: 'A') -> int: pass @overload def __add__(self, x: int) -> int: pass class B: def __radd__(self, x: A) -> int: pass A() + A() A() + 1 A() + B() A() + '' # E: No overload variant of "__add__" of "A" matches argument types [builtins.str] [case testOverrideOverloadedMethodWithMoreGeneralArgumentTypes] from foo import * [file foo.pyi] from typing import overload class IntSub(int): pass class StrSub(str): pass class A: @overload def f(self, x: IntSub) -> int: return 0 @overload def f(self, x: StrSub) -> str: return '' class B(A): @overload def f(self, x: int) -> int: return 0 @overload def f(self, x: str) -> str: return '' [out] [case testOverrideOverloadedMethodWithMoreSpecificArgumentTypes] from foo import * [file foo.pyi] from typing import overload class IntSub(int): pass class StrSub(str): pass class A: @overload def f(self, x: int) -> int: return 0 @overload def f(self, x: str) -> str: return '' class B(A): @overload def f(self, x: IntSub) -> int: return 0 @overload def f(self, x: str) -> str: return '' class C(A): @overload def f(self, x: int) -> int: return 0 @overload def f(self, x: StrSub) -> str: return '' class D(A): @overload def f(self, x: int) -> int: return 0 @overload def f(self, x: str) -> str: return '' [out] tmp/foo.pyi:12: error: Signature of "f" incompatible with supertype "A" tmp/foo.pyi:17: error: Signature of "f" incompatible with supertype "A" [case testOverloadingAndDucktypeCompatibility] from foo import * [file foo.pyi] from typing import overload, _promote class A: pass @_promote(A) class B: pass @overload def f(n: B) -> B: return n @overload def f(n: A) -> A: return n f(B()) + 'x' # E: Unsupported left operand type for + ("B") f(A()) + 'x' # E: Unsupported left operand type for + ("A") [case testOverloadingAndIntFloatSubtyping] from foo import * [file foo.pyi] from typing import overload @overload def f(x: float) -> None: pass @overload def f(x: str) -> None: pass f(1.1) f('') f(1) f(()) # E: No overload variant of "f" matches argument types [Tuple[]] [builtins fixtures/primitives.pyi] [out] [case testOverloadingVariableInputs] from foo import * [file foo.pyi] from typing import overload @overload def f(x: int, y: int) -> None: pass @overload def f(x: int) -> None: pass f(1) f(1, 2) z = (1, 2) f(*z) [builtins fixtures/primitives.pyi] [out] [case testTypeInferenceSpecialCaseWithOverloading] from foo import * [file foo.pyi] from typing import overload class A: def __add__(self, x: A) -> A: pass class B: def __radd__(self, x: A) -> B: pass @overload def f(x: A) -> A: pass @overload def f(x: B) -> B: pass f(A() + B())() # E: "B" not callable [case testKeywordArgOverload] from foo import * [file foo.pyi] from typing import overload @overload def f(x: int, y: str) -> int: pass @overload def f(x: str, y: int) -> str: pass f(x=1, y='')() # E: "int" not callable f(y=1, x='')() # E: "str" not callable [case testIgnoreOverloadVariantBasedOnKeywordArg] from foo import * [file foo.pyi] from typing import overload @overload def f(x: int) -> int: pass @overload def f(y: int) -> str: pass f(x=1)() # E: "int" not callable f(y=1)() # E: "str" not callable [case testOverloadWithTupleVarArg] from foo import * [file foo.pyi] from typing import overload @overload def f(x: int, y: str) -> int: pass @overload def f(*x: str) -> str: pass f(*(1,))() # E: No overload variant of "f" matches argument types [Tuple[builtins.int]] f(*('',))() # E: "str" not callable f(*(1, ''))() # E: "int" not callable f(*(1, '', 1))() # E: No overload variant of "f" matches argument types [Tuple[builtins.int, builtins.str, builtins.int]] [case testPreferExactSignatureMatchInOverload] from foo import * [file foo.pyi] from typing import overload, List @overload def f(x: int, y: List[int] = None) -> int: pass @overload def f(x: int, y: List[str] = None) -> int: pass f(y=[1], x=0)() # E: "int" not callable f(y=[''], x=0)() # E: "int" not callable a = f(y=[['']], x=0) # E: List item 0 has incompatible type "List[str]"; expected "int" a() # E: "int" not callable [builtins fixtures/list.pyi] [case testOverloadWithDerivedFromAny] from foo import * [file foo.pyi] from typing import Any, overload Base = None # type: Any class C: @overload def __init__(self, a: str) -> None: pass @overload def __init__(self, a: int) -> None: pass class Derived(Base): def to_dict(self) -> C: return C(self) # fails without the fix for #1363 C(Derived()) # fails without the hack C(Base()) # Always ok [case testOverloadWithBoundedTypeVar] from foo import * [file foo.pyi] from typing import overload, TypeVar T = TypeVar('T', bound=str) @overload def f(x: T) -> T: pass @overload def f(x: int) -> bool: pass class mystr(str): pass f('x')() # E: "str" not callable f(1)() # E: "bool" not callable f(1.1) # E: No overload variant of "f" matches argument types [builtins.float] f(mystr())() # E: "mystr" not callable [builtins fixtures/primitives.pyi] [case testOverloadedCallWithVariableTypes] from foo import * [file foo.pyi] from typing import overload, TypeVar, List T = TypeVar('T', bound=str) @overload def f(x: T) -> T: pass @overload def f(x: List[T]) -> None: pass class mystr(str): pass U = TypeVar('U', bound=mystr) V = TypeVar('V') def g(x: U, y: V) -> None: f(x)() # E: "mystr" not callable f(y) # E: No overload variant of "f" matches argument types [V`-2] a = f([x]) # E: "f" does not return a value f([y]) # E: Value of type variable "T" of "f" cannot be "V" f([x, y]) # E: Value of type variable "T" of "f" cannot be "object" [builtins fixtures/list.pyi] [out] [case testOverlapWithTypeVars] from foo import * [file foo.pyi] from typing import overload, TypeVar, Sequence T = TypeVar('T', bound=str) @overload def f(x: Sequence[T]) -> None: pass @overload def f(x: Sequence[int]) -> int: pass # These are considered overlapping despite the bound on T due to runtime type erasure. [out] tmp/foo.pyi:4: error: Overloaded function signatures 1 and 2 overlap with incompatible return types [case testOverlapWithTypeVarsWithValues] from foo import * [file foo.pyi] from typing import overload, TypeVar AnyStr = TypeVar('AnyStr', bytes, str) @overload def f(x: int) -> int: pass @overload def f(x: AnyStr) -> str: pass f(1)() # E: "int" not callable f('1')() # E: "str" not callable f(b'1')() # E: "str" not callable f(1.0) # E: No overload variant of "f" matches argument types [builtins.float] @overload def g(x: AnyStr, *a: AnyStr) -> None: pass @overload def g(x: int, *a: AnyStr) -> None: pass g('foo') g('foo', 'bar') g('foo', b'bar') # E: Value of type variable "AnyStr" of "g" cannot be "object" g(1) g(1, 'foo') g(1, 'foo', b'bar') # E: Value of type variable "AnyStr" of "g" cannot be "object" [builtins fixtures/primitives.pyi] [case testBadOverlapWithTypeVarsWithValues] from foo import * [file foo.pyi] from typing import overload, TypeVar AnyStr = TypeVar('AnyStr', bytes, str) @overload def f(x: AnyStr) -> None: pass # E: Overloaded function signatures 1 and 2 overlap with incompatible return types @overload def f(x: str) -> bool: pass [builtins fixtures/primitives.pyi] [case testOverlappingOverloadCounting] from foo import * [file foo.pyi] from typing import overload class A: pass class B(A): pass @overload def f(x: int) -> None: pass @overload def f(x: B) -> str: pass # E: Overloaded function signatures 2 and 3 overlap with incompatible return types @overload def f(x: A) -> int: pass [case testOverloadWithTupleMatchingTypeVar] from foo import * [file foo.pyi] from typing import TypeVar, Generic, Tuple, overload T = TypeVar('T') class A(Generic[T]): @overload def f(self, arg: T) -> None: pass @overload def f(self, arg: T, default: int) -> None: pass b = A() # type: A[Tuple[int, int]] b.f((0, 0)) b.f((0, '')) # E: Argument 1 to "f" of "A" has incompatible type "Tuple[int, str]"; expected "Tuple[int, int]" [case testSingleOverloadStub] from foo import * [file foo.pyi] from typing import overload @overload def f(a: int) -> None: pass def f(a: int) -> None: pass [out] tmp/foo.pyi:2: error: Single overload definition, multiple required tmp/foo.pyi:4: error: An implementation for an overloaded function is not allowed in a stub file [case testSingleOverload2] from foo import * [file foo.pyi] from typing import overload def f(a: int) -> None: pass @overload def f(a: str) -> None: pass [out] tmp/foo.pyi:3: error: Name 'f' already defined tmp/foo.pyi:3: error: Single overload definition, multiple required [case testNonconsecutiveOverloads] from foo import * [file foo.pyi] from typing import overload @overload def f(a: int) -> None: pass 1 @overload def f(a: str) -> None: pass [out] tmp/foo.pyi:2: error: Single overload definition, multiple required tmp/foo.pyi:5: error: Name 'f' already defined tmp/foo.pyi:5: error: Single overload definition, multiple required [case testNonconsecutiveOverloadsMissingFirstOverload] from foo import * [file foo.pyi] from typing import overload def f(a: int) -> None: pass 1 @overload def f(a: str) -> None: pass [out] tmp/foo.pyi:4: error: Name 'f' already defined tmp/foo.pyi:4: error: Single overload definition, multiple required [case testNonconsecutiveOverloadsMissingLaterOverload] from foo import * [file foo.pyi] from typing import overload @overload def f(a: int) -> None: pass 1 def f(a: str) -> None: pass [out] tmp/foo.pyi:2: error: Single overload definition, multiple required tmp/foo.pyi:5: error: Name 'f' already defined on line 2 [case testOverloadTuple] from foo import * [file foo.pyi] from typing import overload, Tuple @overload def f(x: int, y: Tuple[str, ...]) -> None: pass @overload def f(x: int, y: str) -> None: pass f(1, ('2', '3')) f(1, (2, '3')) # E: Argument 2 to "f" has incompatible type "Tuple[int, str]"; expected "Tuple[str, ...]" f(1, ('2',)) f(1, '2') f(1, (2, 3)) # E: Argument 2 to "f" has incompatible type "Tuple[int, int]"; expected "Tuple[str, ...]" x = ('2', '3') # type: Tuple[str, ...] f(1, x) y = (2, 3) # type: Tuple[int, ...] f(1, y) # E: Argument 2 to "f" has incompatible type "Tuple[int, ...]"; expected "Tuple[str, ...]" [builtins fixtures/tuple.pyi] [case testCallableSpecificOverload] from foo import * [file foo.pyi] from typing import overload, Callable @overload def f(a: Callable[[], int]) -> None: pass @overload def f(a: str) -> None: pass f(0) # E: No overload variant of "f" matches argument types [builtins.int] [case testCustomRedefinitionDecorator] from typing import Any, Callable, Type class Chain(object): def chain(self, function: Callable[[Any], int]) -> 'Chain': return self class Test(object): do_chain = Chain() @do_chain.chain def do_chain(self) -> int: return 2 @do_chain.chain # E: Name 'do_chain' already defined def do_chain(self) -> int: return 3 t = Test() reveal_type(t.do_chain) # E: Revealed type is '__main__.Chain' [case testOverloadWithOverlappingItemsAndAnyArgument1] from typing import overload, Any @overload def f(x: int) -> int: ... @overload def f(x: object) -> object: ... def f(x): pass a: Any reveal_type(f(a)) # E: Revealed type is 'Any' [case testOverloadWithOverlappingItemsAndAnyArgument2] from typing import overload, Any @overload def f(x: int) -> int: ... @overload def f(x: float) -> float: ... def f(x): pass a: Any reveal_type(f(a)) # E: Revealed type is 'Any' [case testOverloadWithOverlappingItemsAndAnyArgument3] from typing import overload, Any @overload def f(x: int) -> int: ... @overload def f(x: str) -> str: ... def f(x): pass a: Any reveal_type(f(a)) # E: Revealed type is 'Any' [case testOverloadWithOverlappingItemsAndAnyArgument4] from typing import overload, Any @overload def f(x: int, y: int, z: str) -> int: ... @overload def f(x: object, y: int, z: str) -> object: ... def f(x): pass a: Any # Any causes ambiguity reveal_type(f(a, 1, '')) # E: Revealed type is 'Any' # Any causes no ambiguity reveal_type(f(1, a, a)) # E: Revealed type is 'builtins.int' reveal_type(f('', a, a)) # E: Revealed type is 'builtins.object' # Like above, but use keyword arguments. reveal_type(f(y=1, z='', x=a)) # E: Revealed type is 'Any' reveal_type(f(y=a, z='', x=1)) # E: Revealed type is 'builtins.int' reveal_type(f(z='', x=1, y=a)) # E: Revealed type is 'builtins.int' reveal_type(f(z='', x=a, y=1)) # E: Revealed type is 'Any' [case testOverloadWithOverlappingItemsAndAnyArgument5] from typing import overload, Any, Union @overload def f(x: int) -> int: ... @overload def f(x: Union[int, float]) -> float: ... def f(x): pass a: Any reveal_type(f(a)) # E: Revealed type is 'Any' [case testOverloadWithOverlappingItemsAndAnyArgument6] from typing import overload, Any @overload def f(x: int, y: int) -> int: ... @overload def f(x: float, y: int, z: str) -> float: ... @overload def f(x: object, y: int, z: str, a: None) -> object: ... def f(x): pass a: Any # Any causes ambiguity reveal_type(f(*a)) # E: Revealed type is 'Any' reveal_type(f(a, *a)) # E: Revealed type is 'Any' reveal_type(f(1, *a)) # E: Revealed type is 'Any' reveal_type(f(1.1, *a)) # E: Revealed type is 'Any' reveal_type(f('', *a)) # E: Revealed type is 'builtins.object' [case testOverloadWithOverlappingItemsAndAnyArgument7] from typing import overload, Any @overload def f(x: int, y: int, z: int) -> int: ... @overload def f(x: object, y: int, z: int) -> object: ... def f(x): pass a: Any # TODO: We could infer 'int' here reveal_type(f(1, *a)) # E: Revealed type is 'Any' [case testOverloadWithOverlappingItemsAndAnyArgument8] from typing import overload, Any @overload def f(x: int, y: int, z: int) -> str: ... @overload def f(x: object, y: int, z: int) -> str: ... def f(x): pass a: Any # The return type is not ambiguous so Any arguments cause no ambiguity. reveal_type(f(a, 1, 1)) # E: Revealed type is 'builtins.str' reveal_type(f(1, *a)) # E: Revealed type is 'builtins.str' [case testOverloadOnOverloadWithType] from typing import Any, Type, TypeVar, overload from mod import MyInt T = TypeVar('T') @overload def make(cls: Type[T]) -> T: pass @overload def make() -> Any: pass def make(*args): pass c = make(MyInt) reveal_type(c) # E: Revealed type is 'mod.MyInt*' [file mod.pyi] from typing import overload class MyInt: @overload def __init__(self, x: str) -> None: pass @overload def __init__(self, x: str, y: int) -> None: pass [out] [case testOverloadTupleInstance] from typing import overload, Tuple, Any class A: ... class A1(A): ... class B: ... class C: ... class D: ... @overload def f(x: A) -> A: ... @overload def f(x: Tuple[C]) -> B: ... @overload def f(x: Tuple[A1, int]) -> C: ... # E: Overloaded function signatures 3 and 5 overlap with incompatible return types @overload def f(x: Tuple[A, str]) -> D: ... @overload def f(x: Tuple[A, int]) -> D: ... @overload def f(x: Tuple[()]) -> D: ... def f(x: Any) -> Any:... [case testOverloadTupleEllipsisNumargs] from typing import overload, Tuple, Any class A: ... class B: ... @overload def r1(x: Tuple[()]) -> B: ... # E: Overloaded function signatures 1 and 4 overlap with incompatible return types @overload def r1(x: Tuple[A]) -> B: ... # E: Overloaded function signatures 2 and 4 overlap with incompatible return types @overload def r1(x: Tuple[A, A]) -> B: ... # E: Overloaded function signatures 3 and 4 overlap with incompatible return types @overload def r1(x: Tuple[A, ...]) -> A: ... def r1(x: Any) -> Any: ... @overload def r2(x: Tuple[A, ...]) -> A: ... @overload def r2(x: Tuple[A, A]) -> B: ... @overload def r2(x: Tuple[A]) -> B: ... @overload def r2(x: Tuple[()]) -> B: ... def r2(x: Any) -> Any: ... [builtins fixtures/tuple.pyi] [case testOverloadTupleEllipsisVariance] from typing import overload, Tuple, Any class A: ... class A1(A): ... class B: ... class C: ... class D: ... @overload def r(x: Tuple[A1, ...]) -> A: ... # E: Overloaded function signatures 1 and 2 overlap with incompatible return types @overload def r(x: Tuple[A, ...]) -> B: ... @overload def r(x: Tuple[B, ...]) -> C: ... def r(x: Any) -> Any:... @overload def g(x: A) -> A: ... @overload def g(x: Tuple[A1, ...]) -> B: ... # E: Overloaded function signatures 2 and 3 overlap with incompatible return types @overload def g(x: Tuple[A, A]) -> C: ... @overload def g(x: Tuple[A, B]) -> D: ... def g(x: Any) -> Any:... [builtins fixtures/tuple.pyi] mypy-0.560/test-data/unit/check-protocols.test0000644€tŠÔÚ€2›s®0000015423013215007205025541 0ustar jukkaDROPBOX\Domain Users00000000000000-- Simple protocol types -- --------------------- [case testCannotInstantiateProtocol] from typing import Protocol class P(Protocol): def meth(self) -> None: pass P() # E: Cannot instantiate protocol class "P" [case testSimpleProtocolOneMethod] from typing import Protocol class P(Protocol): def meth(self) -> None: pass class B: pass class C: def meth(self) -> None: pass x: P def fun(x: P) -> None: x.meth() x.meth(x) # E: Too many arguments for "meth" of "P" x.bad # E: "P" has no attribute "bad" x = C() x = B() # E: Incompatible types in assignment (expression has type "B", variable has type "P") fun(C()) fun(B()) # E: Argument 1 to "fun" has incompatible type "B"; expected "P" def fun2() -> P: return C() def fun3() -> P: return B() # E: Incompatible return value type (got "B", expected "P") [case testSimpleProtocolOneAbstractMethod] from typing import Protocol from abc import abstractmethod class P(Protocol): @abstractmethod def meth(self) -> None: pass class B: pass class C: def meth(self) -> None: pass class D(B): def meth(self) -> None: pass x: P def fun(x: P) -> None: x.meth() x.meth(x) # E: Too many arguments for "meth" of "P" x.bad # E: "P" has no attribute "bad" x = C() x = D() x = B() # E: Incompatible types in assignment (expression has type "B", variable has type "P") fun(C()) fun(D()) fun(B()) # E: Argument 1 to "fun" has incompatible type "B"; expected "P" fun(x) [case testProtocolMethodBodies] from typing import Protocol, List class P(Protocol): def meth(self) -> int: return 'no way' # E: Incompatible return value type (got "str", expected "int") # explicit ellipsis is OK in protocol methods class P2(Protocol): def meth2(self) -> List[int]: ... [builtins fixtures/list.pyi] [case testSimpleProtocolOneMethodOverride] from typing import Protocol, Union class P(Protocol): def meth(self) -> Union[int, str]: pass class SubP(P, Protocol): def meth(self) -> int: pass class B: pass class C: def meth(self) -> int: pass z: P x: SubP def fun(x: SubP) -> str: x.bad # E: "SubP" has no attribute "bad" return x.meth() # E: Incompatible return value type (got "int", expected "str") z = x x = C() x = B() # E: Incompatible types in assignment (expression has type "B", variable has type "SubP") reveal_type(fun(C())) # E: Revealed type is 'builtins.str' fun(B()) # E: Argument 1 to "fun" has incompatible type "B"; expected "SubP" [case testSimpleProtocolTwoMethodsMerge] from typing import Protocol class P1(Protocol): def meth1(self) -> int: pass class P2(Protocol): def meth2(self) -> str: pass class P(P1, P2, Protocol): pass class B: pass class C1: def meth1(self) -> int: pass class C2(C1): def meth2(self) -> str: pass class C: def meth1(self) -> int: pass def meth2(self) -> str: pass class AnotherP(Protocol): def meth1(self) -> int: pass def meth2(self) -> str: pass x: P reveal_type(x.meth1()) # E: Revealed type is 'builtins.int' reveal_type(x.meth2()) # E: Revealed type is 'builtins.str' c: C c1: C1 c2: C2 y: AnotherP x = c x = B() # E: Incompatible types in assignment (expression has type "B", variable has type "P") x = c1 # E: Incompatible types in assignment (expression has type "C1", variable has type "P") \ # N: 'C1' is missing following 'P' protocol member: \ # N: meth2 x = c2 x = y y = x [case testSimpleProtocolTwoMethodsExtend] from typing import Protocol class P1(Protocol): def meth1(self) -> int: pass class P2(P1, Protocol): def meth2(self) -> str: pass class Cbad: def meth1(self) -> int: pass class C: def meth1(self) -> int: pass def meth2(self) -> str: pass x: P2 reveal_type(x.meth1()) # E: Revealed type is 'builtins.int' reveal_type(x.meth2()) # E: Revealed type is 'builtins.str' x = C() # OK x = Cbad() # E: Incompatible types in assignment (expression has type "Cbad", variable has type "P2") \ # N: 'Cbad' is missing following 'P2' protocol member: \ # N: meth2 [case testProtocolMethodVsAttributeErrors] from typing import Protocol class P(Protocol): def meth(self) -> int: pass class C: meth: int x: P = C() # E: Incompatible types in assignment (expression has type "C", variable has type "P") \ # N: Following member(s) of "C" have conflicts: \ # N: meth: expected "Callable[[], int]", got "int" [case testProtocolMethodVsAttributeErrors2] from typing import Protocol class P(Protocol): @property def meth(self) -> int: pass class C: def meth(self) -> int: pass x: P = C() # E: Incompatible types in assignment (expression has type "C", variable has type "P") \ # N: Following member(s) of "C" have conflicts: \ # N: meth: expected "int", got "Callable[[], int]" [builtins fixtures/property.pyi] [case testCannotAssignNormalToProtocol] from typing import Protocol class P(Protocol): def meth(self) -> int: pass class C: def meth(self) -> int: pass x: C y: P x = y # E: Incompatible types in assignment (expression has type "P", variable has type "C") [case testIndependentProtocolSubtyping] from typing import Protocol class P1(Protocol): def meth(self) -> int: pass class P2(Protocol): def meth(self) -> int: pass x1: P1 x2: P2 x1 = x2 x2 = x1 def f1(x: P1) -> None: pass def f2(x: P2) -> None: pass f1(x2) f2(x1) [case testNoneDisablesProtocolImplementation] from typing import Protocol class MyHashable(Protocol): def __my_hash__(self) -> int: return 0 class C: __my_hash__ = None var: MyHashable = C() # E: Incompatible types in assignment (expression has type "C", variable has type "MyHashable") [case testNoneDisablesProtocolSubclassingWithStrictOptional] # flags: --strict-optional from typing import Protocol class MyHashable(Protocol): def __my_hash__(self) -> int: return 0 class C(MyHashable): __my_hash__ = None # E: Incompatible types in assignment \ (expression has type "None", base class "MyHashable" defined the type as "Callable[[MyHashable], int]") [case testProtocolsWithNoneAndStrictOptional] # flags: --strict-optional from typing import Protocol class P(Protocol): x = 0 # type: int class C: x = None x: P = C() # Error! def f(x: P) -> None: pass f(C()) # Error! [out] main:9: error: Incompatible types in assignment (expression has type "C", variable has type "P") main:9: note: Following member(s) of "C" have conflicts: main:9: note: x: expected "int", got "None" main:11: error: Argument 1 to "f" has incompatible type "C"; expected "P" main:11: note: Following member(s) of "C" have conflicts: main:11: note: x: expected "int", got "None" -- Semanal errors in protocol types -- -------------------------------- [case testBasicSemanalErrorsInProtocols] from typing import Protocol, Generic, TypeVar, Iterable T = TypeVar('T', covariant=True) S = TypeVar('S', covariant=True) class P1(Protocol[T, T]): # E: Duplicate type variables in Generic[...] or Protocol[...] def meth(self) -> T: pass class P2(Protocol[T], Protocol[S]): # E: Only single Generic[...] or Protocol[...] can be in bases def meth(self) -> T: pass class P3(Protocol[T], Generic[S]): # E: Only single Generic[...] or Protocol[...] can be in bases def meth(self) -> T: pass class P4(Protocol[T]): attr: Iterable[S] # E: Invalid type "__main__.S" class P5(Iterable[S], Protocol[T]): # E: If Generic[...] or Protocol[...] is present it should list all type variables def meth(self) -> T: pass [case testProhibitSelfDefinitionInProtocols] from typing import Protocol class P(Protocol): def __init__(self, a: int) -> None: self.a = a # E: Protocol members cannot be defined via assignment to self \ # E: "P" has no attribute "a" class B: pass class C: def __init__(self, a: int) -> None: pass x: P x = B() # The above has an incompatible __init__, but mypy ignores this for nominal subtypes? x = C(1) class P2(Protocol): a: int def __init__(self) -> None: self.a = 1 class B2(P2): a: int x2: P2 = B2() # OK [case testProtocolAndRuntimeAreDefinedAlsoInTypingExtensions] from typing_extensions import Protocol, runtime @runtime class P(Protocol): def meth(self) -> int: pass x: object if isinstance(x, P): reveal_type(x) # E: Revealed type is '__main__.P' reveal_type(x.meth()) # E: Revealed type is 'builtins.int' class C: def meth(self) -> int: pass z: P = C() [builtins fixtures/dict.pyi] [case testProtocolsCannotInheritFromNormal] from typing import Protocol class C: pass class D: pass class P(C, Protocol): # E: All bases of a protocol must be protocols attr: int class P2(P, D, Protocol): # E: All bases of a protocol must be protocols pass P2() # E: Cannot instantiate abstract class 'P2' with abstract attribute 'attr' p: P2 reveal_type(p.attr) # E: Revealed type is 'builtins.int' -- Generic protocol types -- ---------------------- [case testGenericMethodWithProtocol] from typing import Protocol, TypeVar T = TypeVar('T') class P(Protocol): def meth(self, x: int) -> int: return x class C: def meth(self, x: T) -> T: return x x: P = C() [case testGenericMethodWithProtocol2] from typing import Protocol, TypeVar T = TypeVar('T') class P(Protocol): def meth(self, x: T) -> T: return x class C: def meth(self, x: int) -> int: return x x: P = C() [out] main:11: error: Incompatible types in assignment (expression has type "C", variable has type "P") main:11: note: Following member(s) of "C" have conflicts: main:11: note: Expected: main:11: note: def [T] meth(self, x: T) -> T main:11: note: Got: main:11: note: def meth(self, x: int) -> int [case testAutomaticProtocolVariance] from typing import TypeVar, Protocol T = TypeVar('T') # In case of these errors we proceed with declared variance. class Pco(Protocol[T]): # E: Invariant type variable 'T' used in protocol where covariant one is expected def meth(self) -> T: pass class Pcontra(Protocol[T]): # E: Invariant type variable 'T' used in protocol where contravariant one is expected def meth(self, x: T) -> None: pass class Pinv(Protocol[T]): attr: T class A: pass class B(A): pass x1: Pco[B] y1: Pco[A] x1 = y1 # E: Incompatible types in assignment (expression has type "Pco[A]", variable has type "Pco[B]") y1 = x1 # E: Incompatible types in assignment (expression has type "Pco[B]", variable has type "Pco[A]") x2: Pcontra[B] y2: Pcontra[A] y2 = x2 # E: Incompatible types in assignment (expression has type "Pcontra[B]", variable has type "Pcontra[A]") x2 = y2 # E: Incompatible types in assignment (expression has type "Pcontra[A]", variable has type "Pcontra[B]") x3: Pinv[B] y3: Pinv[A] y3 = x3 # E: Incompatible types in assignment (expression has type "Pinv[B]", variable has type "Pinv[A]") x3 = y3 # E: Incompatible types in assignment (expression has type "Pinv[A]", variable has type "Pinv[B]") [case testProtocolVarianceWithCallableAndList] from typing import Protocol, TypeVar, Callable, List T = TypeVar('T') S = TypeVar('S') T_co = TypeVar('T_co', covariant=True) class P(Protocol[T, S]): # E: Invariant type variable 'T' used in protocol where covariant one is expected \ # E: Invariant type variable 'S' used in protocol where contravariant one is expected def fun(self, callback: Callable[[T], S]) -> None: pass class P2(Protocol[T_co]): # E: Covariant type variable 'T_co' used in protocol where invariant one is expected lst: List[T_co] [builtins fixtures/list.pyi] [case testProtocolVarianceWithUnusedVariable] from typing import Protocol, TypeVar T = TypeVar('T') class P(Protocol[T]): # E: Invariant type variable 'T' used in protocol where covariant one is expected attr: int [case testGenericProtocolsInference1] from typing import Protocol, Sequence, TypeVar T = TypeVar('T', covariant=True) class Closeable(Protocol[T]): def close(self) -> T: pass class F: def close(self) -> int: return 0 def close(arg: Closeable[T]) -> T: return arg.close() def close_all(args: Sequence[Closeable[T]]) -> T: for arg in args: arg.close() return args[0].close() arg: Closeable[int] reveal_type(close(F())) # E: Revealed type is 'builtins.int*' reveal_type(close(arg)) # E: Revealed type is 'builtins.int*' reveal_type(close_all([F()])) # E: Revealed type is 'builtins.int*' reveal_type(close_all([arg])) # E: Revealed type is 'builtins.int*' [builtins fixtures/isinstancelist.pyi] [case testProtocolGenericInference2] from typing import Generic, TypeVar, Protocol T = TypeVar('T') S = TypeVar('S') class P(Protocol[T, S]): x: T y: S class C: x: int y: int def fun3(x: P[T, T]) -> T: pass reveal_type(fun3(C())) # E: Revealed type is 'builtins.int*' [case testProtocolGenericInferenceCovariant] from typing import Generic, TypeVar, Protocol T = TypeVar('T', covariant=True) S = TypeVar('S', covariant=True) U = TypeVar('U') class P(Protocol[T, S]): def x(self) -> T: pass def y(self) -> S: pass class C: def x(self) -> int: pass def y(self) -> int: pass def fun4(x: U, y: P[U, U]) -> U: pass reveal_type(fun4('a', C())) # E: Revealed type is 'builtins.object*' [case testUnrealtedGenericProtolsEquivalent] from typing import TypeVar, Protocol T = TypeVar('T') class PA(Protocol[T]): attr: int def meth(self) -> T: pass def other(self, arg: T) -> None: pass class PB(Protocol[T]): # exactly the same as above attr: int def meth(self) -> T: pass def other(self, arg: T) -> None: pass def fun(x: PA[T]) -> PA[T]: y: PB[T] = x z: PB[T] return z x: PA y: PB x = y y = x xi: PA[int] yi: PB[int] xi = yi yi = xi [case testGenericSubProtocols] from typing import TypeVar, Protocol, Tuple, Generic T = TypeVar('T') S = TypeVar('S') class P1(Protocol[T]): attr1: T class P2(P1[T], Protocol[T, S]): attr2: Tuple[T, S] class C: def __init__(self, a1: int, a2: Tuple[int, int]) -> None: self.attr1 = a1 self.attr2 = a2 c: C var: P2[int, int] = c var2: P2[int, str] = c # E: Incompatible types in assignment (expression has type "C", variable has type "P2[int, str]") \ # N: Following member(s) of "C" have conflicts: \ # N: attr2: expected "Tuple[int, str]", got "Tuple[int, int]" class D(Generic[T]): attr1: T class E(D[T]): attr2: Tuple[T, T] def f(x: T) -> T: z: P2[T, T] = E[T]() y: P2[T, T] = D[T]() # E: Incompatible types in assignment (expression has type "D[T]", variable has type "P2[T, T]") \ # N: 'D' is missing following 'P2' protocol member: \ # N: attr2 return x [builtins fixtures/isinstancelist.pyi] [case testGenericSubProtocolsExtensionInvariant] from typing import TypeVar, Protocol, Union T = TypeVar('T') S = TypeVar('S') class P1(Protocol[T]): attr1: T class P2(Protocol[T]): attr2: T class P(P1[T], P2[S], Protocol): pass class C: attr1: int attr2: str class A: attr1: A class B: attr2: B class D(A, B): pass x: P = D() # Same as P[Any, Any] var: P[Union[int, P], Union[P, str]] = C() # E: Incompatible types in assignment (expression has type "C", variable has type "P[Union[int, P[Any, Any]], Union[P[Any, Any], str]]") \ # N: Following member(s) of "C" have conflicts: \ # N: attr1: expected "Union[int, P[Any, Any]]", got "int" \ # N: attr2: expected "Union[P[Any, Any], str]", got "str" [case testGenericSubProtocolsExtensionCovariant] from typing import TypeVar, Protocol, Union T = TypeVar('T', covariant=True) S = TypeVar('S', covariant=True) class P1(Protocol[T]): def attr1(self) -> T: pass class P2(Protocol[T]): def attr2(self) -> T: pass class P(P1[T], P2[S], Protocol): pass class C: def attr1(self) -> int: pass def attr2(self) -> str: pass var: P[Union[int, P], Union[P, str]] = C() # OK for covariant var2: P[Union[str, P], Union[P, int]] = C() [out] main:18: error: Incompatible types in assignment (expression has type "C", variable has type "P[Union[str, P[Any, Any]], Union[P[Any, Any], int]]") main:18: note: Following member(s) of "C" have conflicts: main:18: note: Expected: main:18: note: def attr1(self) -> Union[str, P[Any, Any]] main:18: note: Got: main:18: note: def attr1(self) -> int main:18: note: Expected: main:18: note: def attr2(self) -> Union[P[Any, Any], int] main:18: note: Got: main:18: note: def attr2(self) -> str [case testSelfTypesWithProtocolsBehaveAsWithNominal] from typing import Protocol, TypeVar T = TypeVar('T', bound=Shape) class Shape(Protocol): def combine(self: T, other: T) -> T: pass class NonProtoShape: def combine(self: T, other: T) -> T: pass class Circle: def combine(self: T, other: Shape) -> T: pass class Triangle: def combine(self, other: Shape) -> Shape: pass class Bad: def combine(self, other: int) -> str: pass def f(s: Shape) -> None: pass s: Shape f(NonProtoShape()) f(Circle()) s = Triangle() s = Bad() n2: NonProtoShape = s [out] main:26: error: Incompatible types in assignment (expression has type "Triangle", variable has type "Shape") main:26: note: Following member(s) of "Triangle" have conflicts: main:26: note: Expected: main:26: note: def combine(self, other: Triangle) -> Triangle main:26: note: Got: main:26: note: def combine(self, other: Shape) -> Shape main:27: error: Incompatible types in assignment (expression has type "Bad", variable has type "Shape") main:27: note: Following member(s) of "Bad" have conflicts: main:27: note: Expected: main:27: note: def combine(self, other: Bad) -> Bad main:27: note: Got: main:27: note: def combine(self, other: int) -> str main:29: error: Incompatible types in assignment (expression has type "Shape", variable has type "NonProtoShape") [case testBadVarianceInProtocols] from typing import Protocol, TypeVar T_co = TypeVar('T_co', covariant=True) T_contra = TypeVar('T_contra', contravariant=True) class Proto(Protocol[T_co, T_contra]): # type: ignore def one(self, x: T_co) -> None: # E: Cannot use a covariant type variable as a parameter pass def other(self) -> T_contra: # E: Cannot use a contravariant type variable as return type pass # Check that we respect user overrides of variance after the errors are reported x: Proto[int, float] y: Proto[float, int] y = x # OK [builtins fixtures/list.pyi] [case testSubtleBadVarianceInProtocols] from typing import Protocol, TypeVar, Iterable, Sequence T_co = TypeVar('T_co', covariant=True) T_contra = TypeVar('T_contra', contravariant=True) class Proto(Protocol[T_co, T_contra]): # E: Covariant type variable 'T_co' used in protocol where contravariant one is expected \ # E: Contravariant type variable 'T_contra' used in protocol where covariant one is expected def one(self, x: Iterable[T_co]) -> None: pass def other(self) -> Sequence[T_contra]: pass # Check that we respect user overrides of variance after the errors are reported x: Proto[int, float] y: Proto[float, int] y = x # OK [builtins fixtures/list.pyi] -- Recursive protocol types -- ------------------------ [case testRecursiveProtocols1] from typing import Protocol, Sequence, List, Generic, TypeVar T = TypeVar('T') class Traversable(Protocol): @property def leaves(self) -> Sequence[Traversable]: pass class C: pass class D(Generic[T]): leaves: List[D[T]] t: Traversable t = D[int]() # OK t = C() # E: Incompatible types in assignment (expression has type "C", variable has type "Traversable") [builtins fixtures/list.pyi] [case testRecursiveProtocols2] from typing import Protocol, TypeVar T = TypeVar('T') class Linked(Protocol[T]): val: T def next(self) -> Linked[T]: pass class L: val: int def next(self) -> L: pass def last(seq: Linked[T]) -> T: pass reveal_type(last(L())) # E: Revealed type is 'builtins.int*' [builtins fixtures/list.pyi] [case testRecursiveProtocolSubtleMismatch] from typing import Protocol, TypeVar T = TypeVar('T') class Linked(Protocol[T]): val: T def next(self) -> Linked[T]: pass class L: val: int def next(self) -> int: pass def last(seq: Linked[T]) -> T: pass last(L()) # E: Argument 1 to "last" has incompatible type "L"; expected "Linked[]" [case testMutuallyRecursiveProtocols] from typing import Protocol, Sequence, List class P1(Protocol): @property def attr1(self) -> Sequence[P2]: pass class P2(Protocol): @property def attr2(self) -> Sequence[P1]: pass class C: pass class A: attr1: List[B] class B: attr2: List[A] t: P1 t = A() # OK t = B() # E: Incompatible types in assignment (expression has type "B", variable has type "P1") t = C() # E: Incompatible types in assignment (expression has type "C", variable has type "P1") [builtins fixtures/list.pyi] [case testMutuallyRecursiveProtocolsTypesWithSubteMismatch] from typing import Protocol, Sequence, List class P1(Protocol): @property def attr1(self) -> Sequence[P2]: pass class P2(Protocol): @property def attr2(self) -> Sequence[P1]: pass class C: pass class A: attr1: List[B] class B: attr2: List[C] t: P1 t = A() # E: Incompatible types in assignment (expression has type "A", variable has type "P1") \ # N: Following member(s) of "A" have conflicts: \ # N: attr1: expected "Sequence[P2]", got "List[B]" [builtins fixtures/list.pyi] [case testMutuallyRecursiveProtocolsTypesWithSubteMismatchWriteable] from typing import Protocol class P1(Protocol): @property def attr1(self) -> P2: pass class P2(Protocol): attr2: P1 class A: attr1: B class B: attr2: A x: P1 = A() # E: Incompatible types in assignment (expression has type "A", variable has type "P1") \ # N: Following member(s) of "A" have conflicts: \ # N: attr1: expected "P2", got "B" [builtins fixtures/property.pyi] -- FIXME: things like this should work [case testWeirdRecursiveInferenceForProtocols-skip] from typing import Protocol, TypeVar, Generic T_co = TypeVar('T_co', covariant=True) T = TypeVar('T') class P(Protocol[T_co]): def meth(self) -> P[T_co]: pass class C(Generic[T]): def meth(self) -> C[T]: pass x: C[int] def f(arg: P[T]) -> T: pass reveal_type(f(x)) #E: Revealed type is 'builtins.int*' -- @property, @classmethod and @staticmethod in protocol types -- ----------------------------------------------------------- [case testCannotInstantiateAbstractMethodExplicitProtocolSubtypes] from typing import Protocol from abc import abstractmethod class P(Protocol): @abstractmethod def meth(self) -> int: pass class A(P): pass A() # E: Cannot instantiate abstract class 'A' with abstract attribute 'meth' class C(A): def meth(self) -> int: pass class C2(P): def meth(self) -> int: pass C() C2() [case testCannotInstantiateAbstractVariableExplicitProtocolSubtypes] from typing import Protocol class P(Protocol): attr: int class A(P): pass A() # E: Cannot instantiate abstract class 'A' with abstract attribute 'attr' class C(A): attr: int class C2(P): def __init__(self) -> None: self.attr = 1 C() C2() class P2(Protocol): attr: int = 1 class B(P2): pass B() # OK, attr is not abstract [case testClassVarsInProtocols] from typing import Protocol, ClassVar class PInst(Protocol): v: int class PClass(Protocol): v: ClassVar[int] class CInst: v: int class CClass: v: ClassVar[int] x: PInst y: PClass x = CInst() x = CClass() # E: Incompatible types in assignment (expression has type "CClass", variable has type "PInst") \ # N: Protocol member PInst.v expected instance variable, got class variable y = CClass() y = CInst() # E: Incompatible types in assignment (expression has type "CInst", variable has type "PClass") \ # N: Protocol member PClass.v expected class variable, got instance variable [case testPropertyInProtocols] from typing import Protocol class PP(Protocol): @property def attr(self) -> int: pass class P(Protocol): attr: int x: P y: PP y = x x2: P y2: PP x2 = y2 # E: Incompatible types in assignment (expression has type "PP", variable has type "P") \ # N: Protocol member P.attr expected settable variable, got read-only attribute [builtins fixtures/property.pyi] [case testSettablePropertyInProtocols] from typing import Protocol class PPS(Protocol): @property def attr(self) -> int: pass @attr.setter def attr(self, x: int) -> None: pass class PP(Protocol): @property def attr(self) -> int: pass class P(Protocol): attr: int x: P z: PPS z = x x2: P z2: PPS x2 = z2 y3: PP z3: PPS y3 = z3 y4: PP z4: PPS z4 = y4 # E: Incompatible types in assignment (expression has type "PP", variable has type "PPS") \ # N: Protocol member PPS.attr expected settable variable, got read-only attribute [builtins fixtures/property.pyi] [case testStaticAndClassMethodsInProtocols] from typing import Protocol, Type, TypeVar class P(Protocol): def meth(self, x: int) -> str: pass class PC(Protocol): @classmethod def meth(cls, x: int) -> str: pass class B: @staticmethod def meth(x: int) -> str: pass class C: def meth(self, x: int) -> str: pass x: P x = C() x = B() y: PC y = B() y = C() # E: Incompatible types in assignment (expression has type "C", variable has type "PC") \ # N: Protocol member PC.meth expected class or static method [builtins fixtures/classmethod.pyi] [case testOverloadedMethodsInProtocols] from typing import overload, Protocol, Union class P(Protocol): @overload def f(self, x: int) -> int: pass @overload def f(self, x: str) -> str: pass class C: def f(self, x: Union[int, str]) -> None: pass class D: def f(self, x: int) -> None: pass x: P = C() x = D() [out] main:17: error: Incompatible types in assignment (expression has type "D", variable has type "P") main:17: note: Following member(s) of "D" have conflicts: main:17: note: Expected: main:17: note: @overload main:17: note: def f(self, x: int) -> int main:17: note: @overload main:17: note: def f(self, x: str) -> str main:17: note: Got: main:17: note: def f(self, x: int) -> None [case testCannotInstantiateProtocolWithOverloadedUnimplementedMethod] from typing import overload, Protocol class P(Protocol): @overload def meth(self, x: int) -> int: pass @overload def meth(self, x: str) -> bytes: pass class C(P): pass C() # E: Cannot instantiate abstract class 'C' with abstract attribute 'meth' [case testCanUseOverloadedImplementationsInProtocols] from typing import overload, Protocol, Union class P(Protocol): @overload def meth(self, x: int) -> int: pass @overload def meth(self, x: str) -> bool: pass def meth(self, x: Union[int, str]): if isinstance(x, int): return x return True class C(P): pass x = C() reveal_type(x.meth('hi')) # E: Revealed type is 'builtins.bool' [builtins fixtures/isinstance.pyi] [case testProtocolsWithIdenticalOverloads] from typing import overload, Protocol class PA(Protocol): @overload def meth(self, x: int) -> int: pass @overload def meth(self, x: str) -> bytes: pass class PB(Protocol): # identical to above @overload def meth(self, x: int) -> int: pass @overload def meth(self, x: str) -> bytes: pass x: PA y: PB x = y def fun(arg: PB) -> None: pass fun(x) [case testProtocolsWithIncompatibleOverloads] from typing import overload, Protocol class PA(Protocol): @overload def meth(self, x: int) -> int: pass @overload def meth(self, x: str) -> bytes: pass class PB(Protocol): @overload def meth(self, x: int) -> int: pass @overload def meth(self, x: bytes) -> str: pass x: PA y: PB x = y [out] main:16: error: Incompatible types in assignment (expression has type "PB", variable has type "PA") main:16: note: Following member(s) of "PB" have conflicts: main:16: note: Expected: main:16: note: @overload main:16: note: def meth(self, x: int) -> int main:16: note: @overload main:16: note: def meth(self, x: str) -> bytes main:16: note: Got: main:16: note: @overload main:16: note: def meth(self, x: int) -> int main:16: note: @overload main:16: note: def meth(self, x: bytes) -> str -- Join and meet with protocol types -- --------------------------------- [case testJoinProtocolWithProtocol] from typing import Protocol class P(Protocol): attr: int class P2(Protocol): attr: int attr2: str x: P y: P2 l0 = [x, x] l1 = [y, y] l = [x, y] reveal_type(l0) # E: Revealed type is 'builtins.list[__main__.P*]' reveal_type(l1) # E: Revealed type is 'builtins.list[__main__.P2*]' reveal_type(l) # E: Revealed type is 'builtins.list[__main__.P*]' [builtins fixtures/list.pyi] [case testJoinOfIncompatibleProtocols] from typing import Protocol class P(Protocol): attr: int class P2(Protocol): attr2: str x: P y: P2 reveal_type([x, y]) # E: Revealed type is 'builtins.list[builtins.object*]' [builtins fixtures/list.pyi] [case testJoinProtocolWithNormal] from typing import Protocol class P(Protocol): attr: int class C: attr: int x: P y: C l = [x, y] reveal_type(l) # E: Revealed type is 'builtins.list[__main__.P*]' [builtins fixtures/list.pyi] [case testMeetProtocolWithProtocol] from typing import Protocol, Callable, TypeVar class P(Protocol): attr: int class P2(Protocol): attr: int attr2: str T = TypeVar('T') def f(x: Callable[[T, T], None]) -> T: pass def g(x: P, y: P2) -> None: pass reveal_type(f(g)) # E: Revealed type is '__main__.P2*' [case testMeetOfIncompatibleProtocols] from typing import Protocol, Callable, TypeVar class P(Protocol): attr: int class P2(Protocol): attr2: str T = TypeVar('T') def f(x: Callable[[T, T], None]) -> T: pass def g(x: P, y: P2) -> None: pass x = f(g) # E: "f" does not return a value [case testMeetProtocolWithNormal] from typing import Protocol, Callable, TypeVar class P(Protocol): attr: int class C: attr: int T = TypeVar('T') def f(x: Callable[[T, T], None]) -> T: pass def g(x: P, y: C) -> None: pass reveal_type(f(g)) # E: Revealed type is '__main__.C*' [case testInferProtocolFromProtocol] from typing import Protocol, Sequence, TypeVar, Generic T = TypeVar('T') class Box(Protocol[T]): content: T class Linked(Protocol[T]): val: T def next(self) -> Linked[T]: pass class L(Generic[T]): val: Box[T] def next(self) -> L[T]: pass def last(seq: Linked[T]) -> T: pass reveal_type(last(L[int]())) # E: Revealed type is '__main__.Box*[builtins.int*]' reveal_type(last(L[str]()).content) # E: Revealed type is 'builtins.str*' [case testOverloadOnProtocol] from typing import overload, Protocol, runtime @runtime class P1(Protocol): attr1: int class P2(Protocol): attr2: str class C1: attr1: int class C2: attr2: str class C: pass @overload def f(x: P1) -> int: ... @overload def f(x: P2) -> str: ... def f(x): if isinstance(x, P1): return P1.attr1 if isinstance(x, P2): # E: Only @runtime protocols can be used with instance and class checks return P1.attr2 reveal_type(f(C1())) # E: Revealed type is 'builtins.int' reveal_type(f(C2())) # E: Revealed type is 'builtins.str' class D(C1, C2): pass # Compatible with both P1 and P2 # FIXME: the below is not right, see #1322 reveal_type(f(D())) # E: Revealed type is 'Any' f(C()) # E: No overload variant of "f" matches argument types [__main__.C] [builtins fixtures/isinstance.pyi] -- Unions of protocol types -- ------------------------ [case testBasicUnionsOfProtocols] from typing import Union, Protocol class P1(Protocol): attr1: int class P2(Protocol): attr2: int class C1: attr1: int class C2: attr2: int class C(C1, C2): pass class B: ... x: Union[P1, P2] x = C1() x = C2() x = C() x = B() # E: Incompatible types in assignment (expression has type "B", variable has type "Union[P1, P2]") [case testUnionsOfNormalClassesWithProtocols] from typing import Protocol, Union class P1(Protocol): attr1: int class P2(Protocol): attr2: int class C1: attr1: int class C2: attr2: int class C(C1, C2): pass class D1: attr1: int def f1(x: P1) -> None: pass def f2(x: P2) -> None: pass x: Union[C1, C2] y: Union[C1, D1] z: Union[C, D1] f1(x) # E: Argument 1 to "f1" has incompatible type "Union[C1, C2]"; expected "P1" f1(y) f1(z) f2(x) # E: Argument 1 to "f2" has incompatible type "Union[C1, C2]"; expected "P2" f2(z) # E: Argument 1 to "f2" has incompatible type "Union[C, D1]"; expected "P2" -- Type[] with protocol types -- -------------------------- [case testInstantiationProtocolInTypeForFunctions] from typing import Type, Protocol class P(Protocol): def m(self) -> None: pass class P1(Protocol): def m(self) -> None: pass class Pbad(Protocol): def mbad(self) -> int: pass class B(P): pass class C: def m(self) -> None: pass def f(cls: Type[P]) -> P: return cls() # OK def g() -> P: return P() # E: Cannot instantiate protocol class "P" f(P) # E: Only concrete class can be given where "Type[P]" is expected f(B) # OK f(C) # OK x: Type[P1] xbad: Type[Pbad] f(x) # OK f(xbad) # E: Argument 1 to "f" has incompatible type "Type[Pbad]"; expected "Type[P]" [case testInstantiationProtocolInTypeForAliases] from typing import Type, Protocol class P(Protocol): def m(self) -> None: pass class C: def m(self) -> None: pass def f(cls: Type[P]) -> P: return cls() # OK Alias = P GoodAlias = C Alias() # E: Cannot instantiate protocol class "P" GoodAlias() f(Alias) # E: Only concrete class can be given where "Type[P]" is expected f(GoodAlias) [case testInstantiationProtocolInTypeForVariables] from typing import Type, Protocol class P(Protocol): def m(self) -> None: pass class B(P): pass class C: def m(self) -> None: pass var: Type[P] var() var = P # E: Can only assign concrete classes to a variable of type "Type[P]" var = B # OK var = C # OK var_old = None # type: Type[P] # Old syntax for variable annotations var_old() var_old = P # E: Can only assign concrete classes to a variable of type "Type[P]" var_old = B # OK var_old = C # OK [case testInstantiationProtocolInTypeForClassMethods] from typing import Type, Protocol class Logger: @staticmethod def log(a: Type[C]): pass class C(Protocol): @classmethod def action(cls) -> None: cls() #OK for classmethods Logger.log(cls) #OK for classmethods [builtins fixtures/classmethod.pyi] -- isinstance() with @runtime protocols -- ------------------------------------ [case testSimpleRuntimeProtocolCheck] from typing import Protocol, runtime @runtime # E: @runtime can only be used with protocol classes class C: pass class P(Protocol): def meth(self) -> None: pass @runtime class R(Protocol): def meth(self) -> int: pass x: object if isinstance(x, P): # E: Only @runtime protocols can be used with instance and class checks reveal_type(x) # E: Revealed type is '__main__.P' if isinstance(x, R): reveal_type(x) # E: Revealed type is '__main__.R' reveal_type(x.meth()) # E: Revealed type is 'builtins.int' [builtins fixtures/isinstance.pyi] [case testRuntimeIterableProtocolCheck] from typing import Iterable, List, Union x: Union[int, List[str]] if isinstance(x, Iterable): reveal_type(x) # E: Revealed type is 'builtins.list[builtins.str]' [builtins fixtures/isinstancelist.pyi] [case testConcreteClassesInProtocolsIsInstance] from typing import Protocol, runtime, TypeVar, Generic T = TypeVar('T') @runtime class P1(Protocol): def meth1(self) -> int: pass @runtime class P2(Protocol): def meth2(self) -> int: pass @runtime class P(P1, P2, Protocol): pass class C1(Generic[T]): def meth1(self) -> T: pass class C2: def meth2(self) -> int: pass class C(C1[int], C2): pass c = C() if isinstance(c, P1): reveal_type(c) # E: Revealed type is '__main__.C' else: reveal_type(c) # Unreachable if isinstance(c, P): reveal_type(c) # E: Revealed type is '__main__.C' else: reveal_type(c) # Unreachable c1i: C1[int] if isinstance(c1i, P1): reveal_type(c1i) # E: Revealed type is '__main__.C1[builtins.int]' else: reveal_type(c1i) # Unreachable if isinstance(c1i, P): reveal_type(c1i) # Unreachable else: reveal_type(c1i) # E: Revealed type is '__main__.C1[builtins.int]' c1s: C1[str] if isinstance(c1s, P1): reveal_type(c1s) # Unreachable else: reveal_type(c1s) # E: Revealed type is '__main__.C1[builtins.str]' c2: C2 if isinstance(c2, P): reveal_type(c2) # Unreachable else: reveal_type(c2) # E: Revealed type is '__main__.C2' [builtins fixtures/isinstancelist.pyi] [case testConcreteClassesUnionInProtocolsIsInstance] from typing import Protocol, runtime, TypeVar, Generic, Union T = TypeVar('T') @runtime class P1(Protocol): def meth1(self) -> int: pass @runtime class P2(Protocol): def meth2(self) -> int: pass class C1(Generic[T]): def meth1(self) -> T: pass class C2: def meth2(self) -> int: pass x: Union[C1[int], C2] if isinstance(x, P1): reveal_type(x) # E: Revealed type is '__main__.C1[builtins.int]' else: reveal_type(x) # E: Revealed type is '__main__.C2' if isinstance(x, P2): reveal_type(x) # E: Revealed type is '__main__.C2' else: reveal_type(x) # E: Revealed type is '__main__.C1[builtins.int]' [builtins fixtures/isinstancelist.pyi] -- Non-Instances and protocol types (Callable vs __call__ etc.) -- ------------------------------------------------------------ [case testBasicTupleStructuralSubtyping] from typing import Tuple, TypeVar, Protocol T = TypeVar('T', covariant=True) class MyProto(Protocol[T]): def __len__(self) -> T: pass t: Tuple[int, str] def f(x: MyProto[int]) -> None: pass f(t) # OK y: MyProto[str] y = t # E: Incompatible types in assignment (expression has type "Tuple[int, str]", variable has type "MyProto[str]") [builtins fixtures/isinstancelist.pyi] [case testBasicNamedTupleStructuralSubtyping] from typing import NamedTuple, TypeVar, Protocol T = TypeVar('T', covariant=True) S = TypeVar('S', covariant=True) class P(Protocol[T, S]): @property def x(self) -> T: pass @property def y(self) -> S: pass class N(NamedTuple): x: int y: str class N2(NamedTuple): x: int class N3(NamedTuple): x: int y: int z: N z3: N3 def fun(x: P[int, str]) -> None: pass def fun2(x: P[int, int]) -> None: pass def fun3(x: P[T, T]) -> T: return x.x fun(z) fun2(z) # E: Argument 1 to "fun2" has incompatible type "N"; expected "P[int, int]" \ # N: Following member(s) of "N" have conflicts: \ # N: y: expected "int", got "str" fun(N2(1)) # E: Argument 1 to "fun" has incompatible type "N2"; expected "P[int, str]" \ # N: 'N2' is missing following 'P' protocol member: \ # N: y reveal_type(fun3(z)) # E: Revealed type is 'builtins.object*' reveal_type(fun3(z3)) # E: Revealed type is 'builtins.int*' [builtins fixtures/list.pyi] [case testBasicCallableStructuralSubtyping] from typing import Callable, Generic, TypeVar def apply(f: Callable[[int], int], x: int) -> int: return f(x) class Add5: def __call__(self, x: int) -> int: return x + 5 apply(Add5(), 5) T = TypeVar('T') def apply_gen(f: Callable[[T], T]) -> T: pass reveal_type(apply_gen(Add5())) # E: Revealed type is 'builtins.int*' def apply_str(f: Callable[[str], int], x: str) -> int: return f(x) apply_str(Add5(), 'a') # E: Argument 1 to "apply_str" has incompatible type "Add5"; expected "Callable[[str], int]" \ # N: "Add5.__call__" has type "Callable[[Arg(int, 'x')], int]" [builtins fixtures/isinstancelist.pyi] [case testMoreComplexCallableStructuralSubtyping] from mypy_extensions import Arg, VarArg from typing import Protocol, Callable def call_soon(cb: Callable[[Arg(int, 'x'), VarArg(str)], int]): pass class Good: def __call__(self, x: int, *rest: str) -> int: pass class Bad1: def __call__(self, x: int, *rest: int) -> int: pass class Bad2: def __call__(self, y: int, *rest: str) -> int: pass call_soon(Good()) call_soon(Bad1()) # E: Argument 1 to "call_soon" has incompatible type "Bad1"; expected "Callable[[int, VarArg(str)], int]" \ # N: "Bad1.__call__" has type "Callable[[Arg(int, 'x'), VarArg(int)], int]" call_soon(Bad2()) # E: Argument 1 to "call_soon" has incompatible type "Bad2"; expected "Callable[[int, VarArg(str)], int]" \ # N: "Bad2.__call__" has type "Callable[[Arg(int, 'y'), VarArg(str)], int]" [builtins fixtures/isinstancelist.pyi] [case testStructuralSupportForPartial] from typing import Callable, TypeVar, Generic, Any T = TypeVar('T') class partial(Generic[T]): def __init__(self, func: Callable[..., T], *args: Any) -> None: ... def __call__(self, *args: Any) -> T: ... def inc(a: int, temp: str) -> int: pass def foo(f: Callable[[int], T]) -> T: return f(1) reveal_type(foo(partial(inc, 'temp'))) # E: Revealed type is 'builtins.int*' [builtins fixtures/list.pyi] [case testStructuralInferenceForCallable] from typing import Callable, TypeVar, Tuple T = TypeVar('T') S = TypeVar('S') class Actual: def __call__(self, arg: int) -> str: pass def fun(cb: Callable[[T], S]) -> Tuple[T, S]: pass reveal_type(fun(Actual())) # E: Revealed type is 'Tuple[builtins.int*, builtins.str*]' [builtins fixtures/tuple.pyi] -- Standard protocol types (SupportsInt, Sized, etc.) -- -------------------------------------------------- -- More tests could be added for types from typing converted to protocols [case testBasicSizedProtocol] from typing import Sized class Foo: def __len__(self) -> int: return 42 def bar(a: Sized) -> int: return a.__len__() bar(Foo()) bar((1, 2)) bar(1) # E: Argument 1 to "bar" has incompatible type "int"; expected "Sized" [builtins fixtures/isinstancelist.pyi] [case testBasicSupportsIntProtocol] from typing import SupportsInt class Bar: def __int__(self): return 1 def foo(a: SupportsInt): pass foo(Bar()) foo('no way') # E: Argument 1 to "foo" has incompatible type "str"; expected "SupportsInt" [builtins fixtures/isinstancelist.pyi] -- Additional tests and corner cases for protocols -- ---------------------------------------------- [case testAnyWithProtocols] from typing import Protocol, Any, TypeVar T = TypeVar('T') class P1(Protocol): attr1: int class P2(Protocol[T]): attr2: T class P3(Protocol): attr: P3 def f1(x: P1) -> None: pass def f2(x: P2[str]) -> None: pass def f3(x: P3) -> None: pass class C1: attr1: Any class C2: attr2: Any class C3: attr: Any f1(C1()) f2(C2()) f3(C3()) f2(C3()) # E: Argument 1 to "f2" has incompatible type "C3"; expected "P2[str]" a: Any f1(a) f2(a) f3(a) [case testErrorsForProtocolsInDifferentPlaces] from typing import Protocol class P(Protocol): attr1: int attr2: str attr3: int class C: attr1: str @property def attr2(self) -> int: pass x: P = C() # E: Incompatible types in assignment (expression has type "C", variable has type "P") \ # N: 'C' is missing following 'P' protocol member: \ # N: attr3 \ # N: Following member(s) of "C" have conflicts: \ # N: attr1: expected "int", got "str" \ # N: attr2: expected "str", got "int" \ # N: Protocol member P.attr2 expected settable variable, got read-only attribute def f(x: P) -> P: return C() # E: Incompatible return value type (got "C", expected "P") \ # N: 'C' is missing following 'P' protocol member: \ # N: attr3 \ # N: Following member(s) of "C" have conflicts: \ # N: attr1: expected "int", got "str" \ # N: attr2: expected "str", got "int" \ # N: Protocol member P.attr2 expected settable variable, got read-only attribute f(C()) # E: Argument 1 to "f" has incompatible type "C"; expected "P" \ # N: 'C' is missing following 'P' protocol member: \ # N: attr3 \ # N: Following member(s) of "C" have conflicts: \ # N: attr1: expected "int", got "str" \ # N: attr2: expected "str", got "int" \ # N: Protocol member P.attr2 expected settable variable, got read-only attribute [builtins fixtures/list.pyi] [case testIterableProtocolOnClass] from typing import TypeVar, Iterator T = TypeVar('T', bound='A') class A: def __iter__(self: T) -> Iterator[T]: pass class B(A): pass reveal_type(list(b for b in B())) # E: Revealed type is 'builtins.list[__main__.B*]' reveal_type(list(B())) # E: Revealed type is 'builtins.list[__main__.B*]' [builtins fixtures/list.pyi] [case testIterableProtocolOnMetaclass] from typing import TypeVar, Iterator, Type T = TypeVar('T') class EMeta(type): def __iter__(self: Type[T]) -> Iterator[T]: pass class E(metaclass=EMeta): pass class C(E): pass reveal_type(list(c for c in C)) # E: Revealed type is 'builtins.list[__main__.C*]' reveal_type(list(C)) # E: Revealed type is 'builtins.list[__main__.C*]' [builtins fixtures/list.pyi] [case testClassesGetattrWithProtocols] from typing import Protocol class P(Protocol): attr: int class PP(Protocol): @property def attr(self) -> int: pass class C: def __getattr__(self, attr: str) -> int: pass class C2(C): def __setattr__(self, attr: str, val: int) -> None: pass class D: def __getattr__(self, attr: str) -> str: pass def fun(x: P) -> None: reveal_type(P.attr) # E: Revealed type is 'builtins.int' def fun_p(x: PP) -> None: reveal_type(P.attr) # E: Revealed type is 'builtins.int' fun(C()) # E: Argument 1 to "fun" has incompatible type "C"; expected "P" \ # N: Protocol member P.attr expected settable variable, got read-only attribute fun(C2()) fun_p(D()) # E: Argument 1 to "fun_p" has incompatible type "D"; expected "PP" \ # N: Following member(s) of "D" have conflicts: \ # N: attr: expected "int", got "str" fun_p(C()) # OK [builtins fixtures/list.pyi] [case testImplicitTypesInProtocols] from typing import Protocol class P(Protocol): x = 1 # E: All protocol members must have explicitly declared types class C: x: int class D: x: str x: P x = D() # E: Incompatible types in assignment (expression has type "D", variable has type "P") \ # N: Following member(s) of "D" have conflicts: \ # N: x: expected "int", got "str" x = C() # OK [builtins fixtures/list.pyi] [case testProtocolIncompatibilityWithGenericMethod] from typing import Protocol, TypeVar T = TypeVar('T') S = TypeVar('S') class A(Protocol): def f(self, x: T) -> None: pass class B: def f(self, x: S, y: T) -> None: pass x: A = B() [out] main:11: error: Incompatible types in assignment (expression has type "B", variable has type "A") main:11: note: Following member(s) of "B" have conflicts: main:11: note: Expected: main:11: note: def [T] f(self, x: T) -> None main:11: note: Got: main:11: note: def [S, T] f(self, x: S, y: T) -> None [case testProtocolIncompatibilityWithGenericMethodBounded] from typing import Protocol, TypeVar T = TypeVar('T') S = TypeVar('S', bound=int) class A(Protocol): def f(self, x: T) -> None: pass class B: def f(self, x: S, y: T) -> None: pass x: A = B() [out] main:11: error: Incompatible types in assignment (expression has type "B", variable has type "A") main:11: note: Following member(s) of "B" have conflicts: main:11: note: Expected: main:11: note: def [T] f(self, x: T) -> None main:11: note: Got: main:11: note: def [S <: int, T] f(self, x: S, y: T) -> None [case testProtocolIncompatibilityWithGenericRestricted] from typing import Protocol, TypeVar T = TypeVar('T') S = TypeVar('S', int, str) class A(Protocol): def f(self, x: T) -> None: pass class B: def f(self, x: S, y: T) -> None: pass x: A = B() [out] main:11: error: Incompatible types in assignment (expression has type "B", variable has type "A") main:11: note: Following member(s) of "B" have conflicts: main:11: note: Expected: main:11: note: def [T] f(self, x: T) -> None main:11: note: Got: main:11: note: def [S in (int, str), T] f(self, x: S, y: T) -> None [case testProtocolIncompatibilityWithManyOverloads] from typing import Protocol, overload class C1: pass class C2: pass class A(Protocol): @overload def f(self, x: int) -> int: pass @overload def f(self, x: str) -> str: pass @overload def f(self, x: C1) -> C2: pass @overload def f(self, x: C2) -> C1: pass class B: def f(self) -> None: pass x: A = B() [out] main:18: error: Incompatible types in assignment (expression has type "B", variable has type "A") main:18: note: Following member(s) of "B" have conflicts: main:18: note: Expected: main:18: note: @overload main:18: note: def f(self, x: int) -> int main:18: note: @overload main:18: note: def f(self, x: str) -> str main:18: note: <2 more overload(s) not shown> main:18: note: Got: main:18: note: def f(self) -> None [case testProtocolIncompatibilityWithManyConflicts] from typing import Protocol class A(Protocol): def f(self, x: int) -> None: pass def g(self, x: int) -> None: pass def h(self, x: int) -> None: pass def i(self, x: int) -> None: pass class B: def f(self, x: str) -> None: pass def g(self, x: str) -> None: pass def h(self, x: str) -> None: pass def i(self, x: str) -> None: pass x: A = B() [out] main:14: error: Incompatible types in assignment (expression has type "B", variable has type "A") main:14: note: Following member(s) of "B" have conflicts: main:14: note: Expected: main:14: note: def f(self, x: int) -> None main:14: note: Got: main:14: note: def f(self, x: str) -> None main:14: note: Expected: main:14: note: def g(self, x: int) -> None main:14: note: Got: main:14: note: def g(self, x: str) -> None main:14: note: <2 more conflict(s) not shown> [case testDontShowNotesForTupleAndIterableProtocol] from typing import Iterable, Sequence, Protocol, NamedTuple class N(NamedTuple): x: int def f1(x: Iterable[str]) -> None: pass def f2(x: Sequence[str]) -> None: pass # The errors below should be short f1(N(1)) # E: Argument 1 to "f1" has incompatible type "N"; expected "Iterable[str]" f2(N(2)) # E: Argument 1 to "f2" has incompatible type "N"; expected "Sequence[str]" [builtins fixtures/tuple.pyi] [case testNotManyFlagConflitsShownInProtocols] from typing import Protocol class AllSettable(Protocol): a: int b: int c: int d: int class AllReadOnly: @property def a(self) -> int: pass @property def b(self) -> int: pass @property def c(self) -> int: pass @property def d(self) -> int: pass x: AllSettable = AllReadOnly() [builtins fixtures/property.pyi] [out] main:19: error: Incompatible types in assignment (expression has type "AllReadOnly", variable has type "AllSettable") main:19: note: Protocol member AllSettable.a expected settable variable, got read-only attribute main:19: note: Protocol member AllSettable.b expected settable variable, got read-only attribute main:19: note: <2 more conflict(s) not shown> [case testProtocolsMoreConflictsNotShown] from typing_extensions import Protocol from typing import Generic, TypeVar T = TypeVar('T') class MockMapping(Protocol[T]): def a(self, x: T) -> int: pass def b(self, x: T) -> int: pass def c(self, x: T) -> int: pass d: T e: T f: T class MockDict(MockMapping[T]): more: int def f(x: MockMapping[int]) -> None: pass x: MockDict[str] f(x) # E: Argument 1 to "f" has incompatible type "MockDict[str]"; expected "MockMapping[int]" [case testProtocolNotesForComplexSignatures] from typing import Protocol, Optional class P(Protocol): def meth(self, x: int, *args: str) -> None: pass def other(self, *args, hint: Optional[str] = None, **kwargs: str) -> None: pass class C: def meth(self) -> int: pass def other(self) -> int: pass x: P = C() [builtins fixtures/dict.pyi] [out] main:10: error: Incompatible types in assignment (expression has type "C", variable has type "P") main:10: note: Following member(s) of "C" have conflicts: main:10: note: Expected: main:10: note: def meth(self, x: int, *args: str) -> None main:10: note: Got: main:10: note: def meth(self) -> int main:10: note: Expected: main:10: note: def other(self, *args: Any, hint: Optional[str] = ..., **kwargs: str) -> None main:10: note: Got: main:10: note: def other(self) -> int [case testObjectAllowedInProtocolBases] from typing import Protocol class P(Protocol, object): pass [out] [case testNoneSubtypeOfEmptyProtocol] from typing import Protocol class P(Protocol): pass x: P = None [out] [case testNoneSubtypeOfAllProtocolsWithoutStrictOptional] from typing import Protocol class P(Protocol): attr: int def meth(self, arg: str) -> str: pass x: P = None [out] [case testNoneSubtypeOfEmptyProtocolStrict] # flags: --strict-optional from typing import Protocol class P(Protocol): pass x: P = None class PBad(Protocol): x: int y: PBad = None # E: Incompatible types in assignment (expression has type "None", variable has type "PBad") [out] [case testOnlyMethodProtocolUsableWithIsSubclass] from typing import Protocol, runtime, Union, Type @runtime class P(Protocol): def meth(self) -> int: pass @runtime class PBad(Protocol): x: str class C: x: str def meth(self) -> int: pass class E: pass cls: Type[Union[C, E]] issubclass(cls, PBad) # E: Only protocols that don't have non-method members can be used with issubclass() \ # N: Protocol "PBad" has non-method member(s): x if issubclass(cls, P): reveal_type(cls) # E: Revealed type is 'Type[__main__.C]' else: reveal_type(cls) # E: Revealed type is 'Type[__main__.E]' [builtins fixtures/isinstance.pyi] [out] mypy-0.560/test-data/unit/check-python2.test0000644€tŠÔÚ€2›s®0000002024113215007205025112 0ustar jukkaDROPBOX\Domain Users00000000000000-- Type checker test cases for Python 2.x mode. [case testUnicode] u = u'foo' u = unicode() s = '' s = u'foo' # E: Incompatible types in assignment (expression has type "unicode", variable has type "str") s = b'foo' [builtins_py2 fixtures/python2.pyi] [case testTypeVariableUnicode] from typing import TypeVar T = TypeVar(u'T') [case testNamedTuple*sh Unicode] from typing import NamedTuple from collections import namedtuple N = NamedTuple(u'N', [(u'x', int)]) n = namedtuple(u'n', u'x y') [builtins fixtures/dict.pyi] [case testPrintStatement] print ''() # E: "str" not callable print 1, 1() # E: "int" not callable [case testPrintStatementWithTarget] class A: def write(self, s): # type: (str) -> None pass print >>A(), '' print >>None, '' print >>1, '' # E: "int" has no attribute "write" print >>(None + ''), None # E: Unsupported left operand type for + ("None") [case testDivision] class A: def __div__(self, x): # type: (int) -> str pass s = A() / 1 s = '' s = 1 # E: Incompatible types in assignment (expression has type "int", variable has type "str") [case testStrUnicodeCompatibility] import typing def f(x): # type: (unicode) -> None pass f('') f(u'') f(b'') [builtins_py2 fixtures/python2.pyi] [case testStaticMethodWithCommentSignature] class A: @staticmethod def f(x): # type: (int) -> str return '' A.f(1) A.f('') # E: Argument 1 to "f" of "A" has incompatible type "str"; expected "int" [builtins_py2 fixtures/staticmethod.pyi] [case testRaiseTuple] import typing raise BaseException, "a" raise BaseException, "a", None [builtins_py2 fixtures/exception.pyi] [case testTryExceptWithTuple] try: None except BaseException, e: e() # E: "BaseException" not callable [builtins_py2 fixtures/exception.pyi] [case testTryExceptUnsupported] try: pass except BaseException, (e, f): # E: Sorry, `except , ` is not supported pass try: pass except BaseException, [e, f, g]: # E: Sorry, `except , ` is not supported pass try: pass except BaseException, e[0]: # E: Sorry, `except , ` is not supported pass [builtins_py2 fixtures/exception.pyi] [case testAlternateNameSuggestions] class Foo(object): def say_hello(self): pass def say_hell(self): pass def say_hullo(self): pass def say_goodbye(self): pass def go_away(self): pass def go_around(self): pass def append(self): pass def extend(self): pass def _add(self): pass f = Foo() f.say_hallo() # E: "Foo" has no attribute "say_hallo"; maybe "say_hullo", "say_hello", or "say_hell"? f.go_array() # E: "Foo" has no attribute "go_array"; maybe "go_away"? f.add() # E: "Foo" has no attribute "add"; maybe "append", "extend", or "_add"? [case testTupleArgListDynamicallyTyped] def f(x, (y, z)): x = y + z f(1, 1) f(1, (1, 2)) [case testTupleArgListAnnotated] from typing import Tuple def f(x, (y, z)): # type: (object, Tuple[int, str]) -> None x() # E y() # E z() # E f(object(), (1, '')) f(1, 1) # E [builtins_py2 fixtures/tuple.pyi] [out] main:3: error: "object" not callable main:4: error: "int" not callable main:5: error: "str" not callable main:7: error: Argument 2 to "f" has incompatible type "int"; expected "Tuple[int, str]" [case testNestedTupleArgListAnnotated] from typing import Tuple def f(x, (y, (a, b))): # type: (object, Tuple[int, Tuple[str, int]]) -> None x() # E y() # E a() # E b() # E f(object(), (1, ('', 2))) f(1, 1) # E [builtins fixtures/tuple.pyi] [out] main:3: error: "object" not callable main:4: error: "int" not callable main:5: error: "str" not callable main:6: error: "int" not callable main:8: error: Argument 2 to "f" has incompatible type "int"; expected "Tuple[int, Tuple[str, int]]" [case testBackquoteExpr] `1`.x # E: "str" has no attribute "x" [case testPython2OnlyStdLibModuleWithoutStub] import asyncio import Bastion [out] main:1: error: Cannot find module named 'asyncio' main:1: note: (Perhaps setting MYPYPATH or using the "--ignore-missing-imports" flag would help) main:2: error: No library stub file for standard library module 'Bastion' main:2: note: (Stub files are from https://github.com/python/typeshed) [case testImportFromPython2Builtin] from __builtin__ import int as i x = 1 # type: i y = '' # type: i # E: Incompatible types in assignment (expression has type "str", variable has type "int") [case testImportPython2Builtin] import __builtin__ x = 1 # type: __builtin__.int y = '' # type: __builtin__.int # E: Incompatible types in assignment (expression has type "str", variable has type "int") [case testImportAsPython2Builtin] import __builtin__ as bi x = 1 # type: bi.int y = '' # type: bi.int # E: Incompatible types in assignment (expression has type "str", variable has type "int") [case testImportFromPython2BuiltinOverridingDefault] from __builtin__ import int x = 1 # type: int y = '' # type: int # E: Incompatible types in assignment (expression has type "str", variable has type "int") -- Copied from check-functions.test [case testEllipsisWithArbitraryArgsOnBareFunctionInPython2] def f(x, y, z): # type: (...) -> None pass -- Copied from check-functions.test [case testEllipsisWithSomethingAfterItFailsInPython2] def f(x, y, z): # type: (..., int) -> None pass [out] main:1: error: Ellipses cannot accompany other argument types in function type signature. [case testLambdaTupleArgInPython2] f = lambda (x, y): x + y f((0, 0)) [out] [case testLambdaSingletonTupleArgInPython2] f = lambda (x,): x + 1 f((0,)) [out] [case testLambdaNoTupleArgInPython2] f = lambda (x): x + 1 f(0) [out] [case testDefTupleEdgeCasesPython2] def f((x,)): return x def g((x)): return x f(0) + g(0) [out] [case testLambdaAsSortKeyForTuplePython2] from typing import Any, Tuple, Callable def bar(key): # type: (Callable[[Tuple[int, int]], int]) -> int pass def foo(): # type: () -> int return bar(key=lambda (a, b): a) [out] [case testImportBuiltins] import __builtin__ __builtin__.str [case testUnicodeAlias] from typing import List Alias = List[u'Foo'] class Foo: pass [builtins_py2 fixtures/python2.pyi] [case testExec] exec('print 1 + 1') [case testUnicodeDocStrings] # flags: --python-version=2.7 __doc__ = u"unicode" class A: u"unicode" def f(): # type: () -> None u"unicode" [case testMetaclassBasics] class M(type): x = 0 # type: int def test(cls): # type: () -> str return "test" class A(object): __metaclass__ = M reveal_type(A.x) # E: Revealed type is 'builtins.int' reveal_type(A.test()) # E: Revealed type is 'builtins.str' [case testImportedMetaclass] import m class A(object): __metaclass__ = m.M reveal_type(A.x) # E: Revealed type is 'builtins.int' reveal_type(A.test()) # E: Revealed type is 'builtins.str' [file m.py] class M(type): x = 0 def test(cls): # type: () -> str return "test" [case testDynamicMetaclass] class C(object): __metaclass__ = int() # E: Dynamic metaclass not supported for 'C' [case testMetaclassDefinedAsClass] class C(object): class __metaclass__: pass # E: Metaclasses defined as inner classes are not supported [case testErrorInMetaclass] x = 0 class A(object): __metaclass__ = m.M # E: Name 'm' is not defined class B(object): __metaclass__ = M # E: Name 'M' is not defined [case testMetaclassAndSkippedImportInPython2] # flags: --ignore-missing-imports from missing import M class A(object): __metaclass__ = M y = 0 reveal_type(A.y) # E: Revealed type is 'builtins.int' A.x # E: "Type[A]" has no attribute "x" [case testAnyAsBaseOfMetaclass] from typing import Any, Type M = None # type: Any class MM(M): pass class A(object): __metaclass__ = MM [case testSelfTypeNotSelfType2] class A: def g(self): # type: (None) -> None pass [out] main:2: error: Invalid type for self, or extra argument type in function annotation main:2: note: (Hint: typically annotations omit the type for self) [case testSuper] class A: def f(self): # type: () -> None pass class B(A): def g(self): # type: () -> None super(B, self).f() super().f() # E: Too few arguments for "super" mypy-0.560/test-data/unit/check-selftype.test0000644€tŠÔÚ€2›s®0000003360113215007205025346 0ustar jukkaDROPBOX\Domain Users00000000000000[case testSelfTypeInstance] from typing import TypeVar T = TypeVar('T', bound='A', covariant=True) class A: def copy(self: T) -> T: pass class B(A): pass reveal_type(A().copy) # E: Revealed type is 'def () -> __main__.A*' reveal_type(B().copy) # E: Revealed type is 'def () -> __main__.B*' reveal_type(A().copy()) # E: Revealed type is '__main__.A*' reveal_type(B().copy()) # E: Revealed type is '__main__.B*' [builtins fixtures/bool.pyi] [case testSelfTypeStaticAccess] from typing import TypeVar T = TypeVar('T', bound='A', covariant=True) class A: def copy(self: T) -> T: pass class B(A): pass # Erased instances appear on reveal_type; unrelated to self type def f(a: A) -> None: pass f(A.copy(A())) f(A.copy(B())) f(B.copy(B())) # TODO: make it an error # f(B.copy(A())) def g(a: B) -> None: pass g(A.copy(A())) # E: Argument 1 to "g" has incompatible type "A"; expected "B" g(A.copy(B())) g(B.copy(B())) [builtins fixtures/bool.pyi] [case testSelfTypeReturn] from typing import TypeVar, Type R = TypeVar('R') def _type(self: R) -> Type[R]: pass T = TypeVar('T', bound='A', covariant=True) class A: def copy(self: T) -> T: if B(): return A() # E: Incompatible return value type (got "A", expected "T") elif A(): return B() # E: Incompatible return value type (got "B", expected "T") reveal_type(_type(self)) # E: Revealed type is 'Type[T`-1]' return reveal_type(_type(self)()) # E: Revealed type is 'T`-1' class B(A): pass Q = TypeVar('Q', bound='C', covariant=True) class C: def __init__(self, a: int) -> None: pass def copy(self: Q) -> Q: if self: return reveal_type(_type(self)(1)) # E: Revealed type is 'Q`-1' else: return _type(self)() # E: Too few arguments for "C" [builtins fixtures/bool.pyi] [case testSelfTypeClass] from typing import TypeVar, Type T = TypeVar('T', bound='A') class A: @classmethod def new(cls: Type[T]) -> T: return reveal_type(cls()) # E: Revealed type is 'T`-1' class B(A): pass Q = TypeVar('Q', bound='C', covariant=True) class C: def __init__(self, a: int) -> None: pass @classmethod def new(cls: Type[Q]) -> Q: if cls: return cls(1) else: return cls() # E: Too few arguments for "C" reveal_type(A.new) # E: Revealed type is 'def () -> __main__.A*' reveal_type(B.new) # E: Revealed type is 'def () -> __main__.B*' reveal_type(A.new()) # E: Revealed type is '__main__.A*' reveal_type(B.new()) # E: Revealed type is '__main__.B*' [builtins fixtures/classmethod.pyi] [case testSelfTypeOverride] from typing import TypeVar, cast T = TypeVar('T', bound='A', covariant=True) class A: def copy(self: T) -> T: pass class B(A): pass Q = TypeVar('Q', bound='C', covariant=True) class C(A): def copy(self: Q) -> Q: pass reveal_type(C().copy) # E: Revealed type is 'def () -> __main__.C*' reveal_type(C().copy()) # E: Revealed type is '__main__.C*' reveal_type(cast(A, C()).copy) # E: Revealed type is 'def () -> __main__.A*' reveal_type(cast(A, C()).copy()) # E: Revealed type is '__main__.A*' [builtins fixtures/bool.pyi] [case testSelfTypeSuper] from typing import TypeVar, cast T = TypeVar('T', bound='A', covariant=True) class A: def copy(self: T) -> T: pass Q = TypeVar('Q', bound='B', covariant=True) class B(A): def copy(self: Q) -> Q: reveal_type(self) # E: Revealed type is 'Q`-1' reveal_type(super().copy) # E: Revealed type is 'def () -> Q`-1' return super().copy() [builtins fixtures/bool.pyi] [case testSelfTypeRecursiveBinding] from typing import TypeVar, Callable, Type T = TypeVar('T', bound='A', covariant=True) class A: # TODO: This is potentially unsafe, as we use T in an argument type def copy(self: T, factory: Callable[[T], T]) -> T: return factory(self) @classmethod def new(cls: Type[T], factory: Callable[[T], T]) -> T: reveal_type(cls) # E: Revealed type is 'Type[T`-1]' reveal_type(cls()) # E: Revealed type is 'T`-1' cls(2) # E: Too many arguments for "A" return cls() class B(A): pass reveal_type(A().copy) # E: Revealed type is 'def (factory: def (__main__.A*) -> __main__.A*) -> __main__.A*' reveal_type(B().copy) # E: Revealed type is 'def (factory: def (__main__.B*) -> __main__.B*) -> __main__.B*' reveal_type(A.new) # E: Revealed type is 'def (factory: def (__main__.A*) -> __main__.A*) -> __main__.A*' reveal_type(B.new) # E: Revealed type is 'def (factory: def (__main__.B*) -> __main__.B*) -> __main__.B*' [builtins fixtures/classmethod.pyi] [case testSelfTypeBound] from typing import TypeVar, Callable, cast TA = TypeVar('TA', bound='A', covariant=True) class A: def copy(self: TA) -> TA: pass class C(A): def copy(self: C) -> C: pass class D(A): def copy(self: A) -> A: # E: Return type of "copy" incompatible with supertype "A" pass TB = TypeVar('TB', bound='B', covariant=True) class B(A): x = 1 def copy(self: TB) -> TB: reveal_type(self.x) # E: Revealed type is 'builtins.int' return cast(TB, None) [builtins fixtures/bool.pyi] -- # TODO: fail for this -- [case testSelfTypeBare] -- from typing import TypeVar, Type -- -- T = TypeVar('T', bound='E') -- -- class E: -- def copy(self: T, other: T) -> T: pass [case testSelfTypeClone] from typing import TypeVar, Type T = TypeVar('T', bound='C') class C: def copy(self: T) -> T: return self @classmethod def new(cls: Type[T]) -> T: return cls() class D(C): pass reveal_type(D.new) # E: Revealed type is 'def () -> __main__.D*' reveal_type(D().new) # E: Revealed type is 'def () -> __main__.D*' reveal_type(D.new()) # E: Revealed type is '__main__.D*' reveal_type(D().new()) # E: Revealed type is '__main__.D*' Q = TypeVar('Q', bound=C) def clone(arg: Q) -> Q: reveal_type(arg.copy) # E: Revealed type is 'def () -> Q`-1' reveal_type(arg.copy()) # E: Revealed type is 'Q`-1' reveal_type(arg.new) # E: Revealed type is 'def () -> Q`-1' reveal_type(arg.new()) # E: Revealed type is 'Q`-1' return arg.copy() def make(cls: Type[Q]) -> Q: reveal_type(cls.new) # E: Revealed type is 'def () -> Q`-1' reveal_type(cls().new) # E: Revealed type is 'def () -> Q`-1' reveal_type(cls().new()) # E: Revealed type is 'Q`-1' return cls.new() [builtins fixtures/classmethod.pyi] [case testSelfTypeGeneric] from typing import TypeVar T = TypeVar('T', int, str) class A: pass class B(A): def __init__(self, arg: T) -> None: super(B, self).__init__() [case testSelfTypeNonsensical] from typing import TypeVar, Type T = TypeVar('T', bound=str) class A: def foo(self: T) -> T: # E: The erased type of self 'builtins.str' is not a supertype of its class '__main__.A' return self @classmethod def cfoo(cls: Type[T]) -> T: # E: The erased type of self 'Type[builtins.str]' is not a supertype of its class 'Type[__main__.A]' return cls() Q = TypeVar('Q', bound='B') class B: def foo(self: Q) -> Q: return self @classmethod def cfoo(cls: Type[Q]) -> Q: return cls() class C: def foo(self: C) -> C: return self @classmethod def cfoo(cls: Type[C]) -> C: return cls() class D: def foo(self: Q) -> Q: # E: The erased type of self '__main__.B' is not a supertype of its class '__main__.D' return self @staticmethod def bar(self: str) -> str: return self @classmethod def cfoo(cls: Type[Q]) -> Q: # E: The erased type of self 'Type[__main__.B]' is not a supertype of its class 'Type[__main__.D]' return cls() [builtins fixtures/classmethod.pyi] [case testSelfTypeLambdaDefault] from typing import Callable class C: @classmethod def foo(cls, arg: Callable[[int], str] = lambda a: '' ) -> None: pass def bar(self, arg: Callable[[int], str] = lambda a: '' ) -> None: pass [builtins fixtures/classmethod.pyi] [case testSelfTypeNew] from typing import TypeVar, Type T = TypeVar('T', bound=A) class A: def __new__(cls: Type[T]) -> T: return cls() def __init_subclass__(cls: Type[T]) -> None: pass class B: def __new__(cls: Type[T]) -> T: # E: The erased type of self 'Type[__main__.A]' is not a supertype of its class 'Type[__main__.B]' return cls() def __init_subclass__(cls: Type[T]) -> None: # E: The erased type of self 'Type[__main__.A]' is not a supertype of its class 'Type[__main__.B]' pass class C: def __new__(cls: Type[C]) -> C: return cls() def __init_subclass__(cls: Type[C]) -> None: pass class D: def __new__(cls: D) -> D: # E: The erased type of self '__main__.D' is not a supertype of its class 'Type[__main__.D]' return cls def __init_subclass__(cls: D) -> None: # E: The erased type of self '__main__.D' is not a supertype of its class 'Type[__main__.D]' pass class E: def __new__(cls) -> E: reveal_type(cls) # E: Revealed type is 'def () -> __main__.E' return cls() def __init_subclass__(cls) -> None: reveal_type(cls) # E: Revealed type is 'def () -> __main__.E' [case testSelfTypePropertyUnion] from typing import Union class A: @property def f(self: A) -> int: pass class B: @property def f(self: B) -> int: pass x: Union[A, B] reveal_type(x.f) # E: Revealed type is 'builtins.int' [builtins fixtures/property.pyi] [case testSelfTypeProperSupertypeAttribute] from typing import Callable, TypeVar class K: pass T = TypeVar('T', bound=K) class A(K): @property def g(self: K) -> int: return 0 @property def gt(self: T) -> T: return self f: Callable[[object], int] ft: Callable[[T], T] class B(A): pass reveal_type(A().g) # E: Revealed type is 'builtins.int' reveal_type(A().gt) # E: Revealed type is '__main__.A*' reveal_type(A().f()) # E: Revealed type is 'builtins.int' reveal_type(A().ft()) # E: Revealed type is '__main__.A*' reveal_type(B().g) # E: Revealed type is 'builtins.int' reveal_type(B().gt) # E: Revealed type is '__main__.B*' reveal_type(B().f()) # E: Revealed type is 'builtins.int' reveal_type(B().ft()) # E: Revealed type is '__main__.B*' [builtins fixtures/property.pyi] [case testSelfTypeProperSupertypeAttributeTuple] from typing import Callable, TypeVar, Tuple T = TypeVar('T') class A(Tuple[int, int]): @property def g(self: object) -> int: return 0 @property def gt(self: T) -> T: return self f: Callable[[object], int] ft: Callable[[T], T] class B(A): pass reveal_type(A().g) # E: Revealed type is 'builtins.int' reveal_type(A().gt) # E: Revealed type is 'Tuple[builtins.int, builtins.int, fallback=__main__.A]' reveal_type(A().f()) # E: Revealed type is 'builtins.int' reveal_type(A().ft()) # E: Revealed type is 'Tuple[builtins.int, builtins.int, fallback=__main__.A]' reveal_type(B().g) # E: Revealed type is 'builtins.int' reveal_type(B().gt) # E: Revealed type is 'Tuple[builtins.int, builtins.int, fallback=__main__.B]' reveal_type(B().f()) # E: Revealed type is 'builtins.int' reveal_type(B().ft()) # E: Revealed type is 'Tuple[builtins.int, builtins.int, fallback=__main__.B]' [builtins fixtures/property.pyi] [case testSelfTypeProperSupertypeAttributeMeta] from typing import Callable, TypeVar, Type T = TypeVar('T') class A(type): @property def g(cls: object) -> int: return 0 @property def gt(cls: T) -> T: return cls f: Callable[[object], int] ft: Callable[[T], T] class B(A): pass class X(metaclass=B): def __init__(self, x: int) -> None: pass class Y(X): pass X1: Type[X] reveal_type(X.g) # E: Revealed type is 'builtins.int' reveal_type(X.gt) # E: Revealed type is 'def (x: builtins.int) -> __main__.X' reveal_type(X.f()) # E: Revealed type is 'builtins.int' reveal_type(X.ft()) # E: Revealed type is 'def (x: builtins.int) -> __main__.X' reveal_type(Y.g) # E: Revealed type is 'builtins.int' reveal_type(Y.gt) # E: Revealed type is 'def (x: builtins.int) -> __main__.Y' reveal_type(Y.f()) # E: Revealed type is 'builtins.int' reveal_type(Y.ft()) # E: Revealed type is 'def (x: builtins.int) -> __main__.Y' reveal_type(X1.g) # E: Revealed type is 'builtins.int' reveal_type(X1.gt) # E: Revealed type is 'Type[__main__.X]' reveal_type(X1.f()) # E: Revealed type is 'builtins.int' reveal_type(X1.ft()) # E: Revealed type is 'Type[__main__.X]' [builtins fixtures/property.pyi] [case testSelfTypeProperSupertypeAttributeGeneric] from typing import Callable, TypeVar, Generic Q = TypeVar('Q', covariant=True) class K(Generic[Q]): q: Q T = TypeVar('T') class A(K[Q]): @property def g(self: K[object]) -> int: return 0 @property def gt(self: K[T]) -> T: return self.q f: Callable[[object], int] ft: Callable[[T], T] class B(A[Q]): pass a: A[int] b: B[str] reveal_type(a.g) # E: Revealed type is 'builtins.int' --reveal_type(a.gt) # E: Revealed type is 'builtins.int' reveal_type(a.f()) # E: Revealed type is 'builtins.int' reveal_type(a.ft()) # E: Revealed type is '__main__.A*[builtins.int]' reveal_type(b.g) # E: Revealed type is 'builtins.int' --reveal_type(b.gt) # E: Revealed type is '__main__.B*[builtins.str]' reveal_type(b.f()) # E: Revealed type is 'builtins.int' reveal_type(b.ft()) # E: Revealed type is '__main__.B*[builtins.str]' [builtins fixtures/property.pyi] [case testSelfTypeNotSelfType] # Friendlier error messages for common mistakes. See #2950 class A: def f(x: int) -> None: ... # def g(self: None) -> None: ... see in check-python2.test [out] main:3: error: Self argument missing for a non-static method (or an invalid type for self) [case testUnionPropertyField] from typing import Union class A: x: int class B: @property def x(self) -> int: return 1 class C: @property def x(self) -> int: return 1 ab: Union[A, B, C] reveal_type(ab.x) # E: Revealed type is 'builtins.int' [builtins fixtures/property.pyi] mypy-0.560/test-data/unit/check-semanal-error.test0000644€tŠÔÚ€2›s®0000000500413215007205026256 0ustar jukkaDROPBOX\Domain Users00000000000000-- Type checking after an error during semantic analysis -- ----------------------------------------------------- -- -- This tests both the semantic analyzer (that it does not generate -- corrupt state on error) and the type checker (that it can deal with -- whatever state the semantic analyzer sets up). -- TODO: -- - invalid type in annotation -- - invalid function comment type annotation -- - invalid multiple assignment type annotation -- - using a type variable as a value -- - using special names defined in typing as values [case testMissingModuleImport1] import m # E m.foo() m.x = m.y 1() # E [out] main:1: error: Cannot find module named 'm' main:1: note: (Perhaps setting MYPYPATH or using the "--ignore-missing-imports" flag would help) main:4: error: "int" not callable [case testMissingModuleImport2] from m import x # E x.foo() x.a = x.b 1() # E [out] main:1: error: Cannot find module named 'm' main:1: note: (Perhaps setting MYPYPATH or using the "--ignore-missing-imports" flag would help) main:4: error: "int" not callable [case testMissingModuleImport3] from m import * # E x # E 1() # E [out] main:1: error: Cannot find module named 'm' main:1: note: (Perhaps setting MYPYPATH or using the "--ignore-missing-imports" flag would help) main:2: error: Name 'x' is not defined main:3: error: "int" not callable [case testInvalidBaseClass1] class A(X): # E: Name 'X' is not defined x = 1 A().foo(1) A().x = '' # E: Incompatible types in assignment (expression has type "str", variable has type "int") [case testInvalidBaseClass2] X = 1 class A(X): # E x = 1 A().foo(1) A().x = '' # E [out] main:2: error: Invalid type "__main__.X" main:2: error: Invalid base class main:5: error: Incompatible types in assignment (expression has type "str", variable has type "int") [case testInvalidNumberOfTypeArgs] from typing import TypeVar T = TypeVar('T') class C: # Forgot to add type params here def __init__(self, t: T) -> None: pass c = C(t=3) # type: C[int] # E: "C" expects no type arguments, but 1 given [case testBreakOutsideLoop] break # E: 'break' outside loop [case testContinueOutsideLoop] continue # E: 'continue' outside loop [case testYieldOutsideFunction] yield # E: 'yield' outside function [case testYieldFromOutsideFunction] x = 1 yield from x # E: 'yield from' outside function [case testImportFuncDup] import m def m() -> None: ... # ok [file m.py] [out] [case testIgnoredImportDup] import m # type: ignore from m import f # type: ignore def m() -> None: ... # ok def f() -> None: ... # ok [out] mypy-0.560/test-data/unit/check-serialize.test0000644€tŠÔÚ€2›s®0000006460713215007205025514 0ustar jukkaDROPBOX\Domain Users00000000000000-- Serialization test cases (incremental type checking) -- -- These test that modules deserialized from cache files behave -- identically to modules that have undergone full type checking. -- -- These tests are written using the same syntax as test cases in -- check-incremental.test. Look at the comment at that the top of -- that file for the details of how these tests work. -- -- There is probably some overlap with check-incremental.test, but it -- is perhaps not worth trying to simplify these, since a few redundant -- test cases are cheap but accidentally losing test coverage is bad. -- -- These are intended to be straightforward, and do not test import -- cycles and other tricky business. Add test cases for complex things -- to check-incremental.test. -- -- Basic things -- [case testSerializeModuleAttribute] import a [file a.py] import b [file a.py.2] import b y = b.x # type: int [file b.py] x = '' -- We only do the following two sections once here to avoid repetition. -- Most other test cases are similar. [rechecked a] [stale] [out2] tmp/a.py:2: error: Incompatible types in assignment (expression has type "str", variable has type "int") -- -- Functions -- [case testSerializeAnnotatedFunction] import a [file a.py] import b [file a.py.2] import b b.f(1) x = b.f('') # type: str [file b.py] def f(x: str) -> int: pass [out2] tmp/a.py:2: error: Argument 1 to "f" has incompatible type "int"; expected "str" tmp/a.py:3: error: Incompatible types in assignment (expression has type "int", variable has type "str") [case testSerializeUnannotatedFunction] import a [file a.py] import b [file a.py.2] import b b.f(x=1) b.f() [file b.py] def f(x): pass [out2] tmp/a.py:3: error: Too few arguments for "f" [case testSerializeGenericFunction] import a [file a.py] import b [file a.py.2] from b import f reveal_type(f(1)) reveal_type(f(x='')) [file b.py] from typing import TypeVar T = TypeVar('T') def f(x: T) -> T: return x [out2] tmp/a.py:2: error: Revealed type is 'builtins.int*' tmp/a.py:3: error: Revealed type is 'builtins.str*' [case testSerializeFunctionReturningGenericFunction] import a [file a.py] import b [file a.py.2] import b reveal_type(b.f) reveal_type(b.f()('')) [file b.py] from typing import TypeVar, Callable T = TypeVar('T') def f() -> Callable[[T], T]: pass [out2] tmp/a.py:2: error: Revealed type is 'def () -> def [T] (T`-1) -> T`-1' tmp/a.py:3: error: Revealed type is 'builtins.str*' [case testSerializeArgumentKinds] import a [file a.py] import b [file a.py.2] from b import f f(1, z=1) f(1, '', z=1) f(1, y='', z=1) f(1, '', 2, 3, z=1) f(1, '', zz=1, z=1) f(1, '', foo='', z=1) [file b.py] def f(x: int, y: str = '', *args: int, z: int, zz: int = 1, **kw: str) -> None: pass [builtins fixtures/dict.pyi] [out2] [case testSerializeCallableWithBoundTypeArguments] import a [file a.py] import b [file a.py.2] import b x = b.f [file b.py] from typing import TypeVar, Generic T = TypeVar('T') class C(Generic[T]): def f(self, x: T) -> None: pass c: C[int] f = c.f [out] [out2] [case testSerializePositionalOnlyArgument] import a [file a.py] import b [file a.py.2] import b b.f(1) b.f('') b.f(__x=1) [file b.py] def f(__x: int) -> None: pass [out2] tmp/a.py:3: error: Argument 1 to "f" has incompatible type "str"; expected "int" tmp/a.py:4: error: Unexpected keyword argument "__x" for "f" [case testSerializeArgumentKindsErrors] import a [file a.py] import b [file a.py.2] from b import f f('', z=1) # Line 2 f(1, 2, z=1) # 3 f(1, y=1, z=1) # 4 f(1, '', 2, '', z=1) # 5 f(1, '', z='') # 6 f(1, '', zz='', z=1) # 7 f(1, '', z=1, foo=1) # 8 [file b.py] def f(x: int, y: str = '', *args: int, z: int, zz: int = 1, **kw: str) -> None: pass [builtins fixtures/dict.pyi] [out2] tmp/a.py:2: error: Argument 1 to "f" has incompatible type "str"; expected "int" tmp/a.py:3: error: Argument 2 to "f" has incompatible type "int"; expected "str" tmp/a.py:4: error: Argument 2 to "f" has incompatible type "int"; expected "str" tmp/a.py:5: error: Argument 4 to "f" has incompatible type "str"; expected "int" tmp/a.py:6: error: Argument 3 to "f" has incompatible type "str"; expected "int" tmp/a.py:7: error: Argument 3 to "f" has incompatible type "str"; expected "int" tmp/a.py:8: error: Argument 4 to "f" has incompatible type "int"; expected "str" [case testSerializeOverloadedFunction] import a [file a.py] import b [file a.py.2] import b reveal_type(b.f(1)) reveal_type(b.f('')) [file b.pyi] from typing import overload @overload def f(x: int) -> int: pass @overload def f(x: str) -> str: pass [out2] tmp/a.py:2: error: Revealed type is 'builtins.int' tmp/a.py:3: error: Revealed type is 'builtins.str' [case testSerializeDecoratedFunction] import a [file a.py] import b [file a.py.2] import b reveal_type(b.f('')) b.f(x=1) [file b.py] from typing import Callable def dec(f: Callable[[int], int]) -> Callable[[str], str]: pass @dec def f(x: int) -> int: pass [out2] tmp/a.py:2: error: Revealed type is 'builtins.str' tmp/a.py:3: error: Unexpected keyword argument "x" for "f" -- -- Classes -- [case testSerializeClassAttribute] import a [file a.py] import b [file a.py.2] import b b.A().x = '' [file b.py] class A: x = 1 [out2] tmp/a.py:2: error: Incompatible types in assignment (expression has type "str", variable has type "int") [case testSerializeMethod] import a [file a.py] import b [file a.py.2] import b b.A().f('') [file b.py] class A: def f(self, x: int) -> None: pass [out2] tmp/a.py:2: error: Argument 1 to "f" of "A" has incompatible type "str"; expected "int" [case testSerialize__init__] import a [file a.py] import b [file a.py.2] from b import A A('') class B(A): def f(self) -> None: super().__init__('') [file b.py] class A: def __init__(self, x: int) -> None: pass [out2] tmp/a.py:2: error: Argument 1 to "A" has incompatible type "str"; expected "int" tmp/a.py:5: error: Argument 1 to "__init__" of "A" has incompatible type "str"; expected "int" [case testSerializeOverloaded__init__] import a [file a.py] import b [file a.py.2] from b import A A(object()) # E A(x='') A(0) class B(A): def f(self) -> None: super().__init__(object()) # E super().__init__('') super().__init__(0) [file b.pyi] from typing import overload class A: @overload def __init__(self, x: int) -> None: pass @overload def __init__(self, x: str) -> None: pass [out2] tmp/a.py:2: error: No overload variant of "A" matches argument types [builtins.object] tmp/a.py:7: error: No overload variant of "__init__" of "A" matches argument types [builtins.object] [case testSerialize__new__] import a [file a.py] import b [file a.py.2] from b import A A('') [file b.py] class A: def __new__(cls, x: int) -> 'A': pass [out2] tmp/a.py:2: error: Argument 1 to "A" has incompatible type "str"; expected "int" [case testSerializeClassVar] import a [file a.py] import b [file a.py.2] from b import A A.x = '' A().x = 1 [file b.py] from typing import ClassVar class A: x: ClassVar[int] [out2] tmp/a.py:2: error: Incompatible types in assignment (expression has type "str", variable has type "int") tmp/a.py:3: error: Cannot assign to class variable "x" via instance [case testSerializeGenericClass] import a [file a.py] import b [file a.py.2] from b import A a1: A[int, str] = A(1) a2: A[int, str] = A('') reveal_type(a1.y) reveal_type(a1.f()) [file b.py] from typing import TypeVar, Generic T = TypeVar('T') S = TypeVar('S') class A(Generic[T, S]): x: T y: S def __init__(self, x: T) -> None: self.x = x def f(self) -> T: return self.x [out2] tmp/a.py:3: error: Argument 1 to "A" has incompatible type "str"; expected "int" tmp/a.py:4: error: Revealed type is 'builtins.str*' tmp/a.py:5: error: Revealed type is 'builtins.int*' [case testSerializeAbstractClass] import a [file a.py] import b [file a.py.2] from b import A A() class B(A): def f(self) -> None: pass x: int B() a: A a.f() a.x = 1 [file b.py] from abc import ABCMeta, abstractmethod, abstractproperty class A(metaclass=ABCMeta): @abstractmethod def f(self) -> None: pass @abstractproperty def x(self) -> int: return 0 [out2] tmp/a.py:2: error: Cannot instantiate abstract class 'A' with abstract attributes 'f' and 'x' tmp/a.py:9: error: Property "x" defined in "A" is read-only [case testSerializeStaticMethod] import a [file a.py] import b [file a.py.2] from b import A A.f(1) A.f() A().f() [file b.py] class A: @staticmethod def f() -> None: pass [builtins fixtures/staticmethod.pyi] [out2] tmp/a.py:2: error: Too many arguments for "f" of "A" [case testSerializeClassMethod] import a [file a.py] import b [file a.py.2] from b import A A.f(1) A.f() A().f() [file b.py] class A: @classmethod def f(cls) -> None: pass [builtins fixtures/classmethod.pyi] [out2] tmp/a.py:2: error: Too many arguments for "f" of "A" [case testSerializeReadOnlyProperty] import a [file a.py] import b [file a.py.2] from b import A reveal_type(A().x) A().x = 0 [file b.py] class A: @property def x(self) -> int: return 0 [builtins fixtures/property.pyi] [out2] tmp/a.py:2: error: Revealed type is 'builtins.int' tmp/a.py:3: error: Property "x" defined in "A" is read-only [case testSerializeReadWriteProperty] import a [file a.py] import b [file a.py.2] from b import A reveal_type(A().x) A().x = '' A().x = 0 [file b.py] class A: @property def x(self) -> int: return 0 @x.setter def x(self, v: int) -> None: pass [builtins fixtures/property.pyi] [out2] tmp/a.py:2: error: Revealed type is 'builtins.int' tmp/a.py:3: error: Incompatible types in assignment (expression has type "str", variable has type "int") [case testSerializeSelfType] import a [file a.py] import b [file a.py.2] from b import A reveal_type(A().f()) class B(A): pass reveal_type(B().f()) [file b.py] from typing import TypeVar T = TypeVar('T', bound='A') class A: def f(self: T) -> T: return self [out2] tmp/a.py:2: error: Revealed type is 'b.A*' tmp/a.py:4: error: Revealed type is 'a.B*' [case testSerializeInheritance] import a [file a.py] import b [file a.py.2] from b import A, B, C C().f(1) # E C().g(1) # E reveal_type(C().h()) a: A = C() b: B = C() i: int = C() # E [file b.py] class A: def f(self) -> int: pass class B: def g(self) -> str: pass def h(self) -> object: pass class C(A, B): def h(self) -> int: pass [out2] tmp/a.py:2: error: Too many arguments for "f" of "A" tmp/a.py:3: error: Too many arguments for "g" of "B" tmp/a.py:4: error: Revealed type is 'builtins.int' tmp/a.py:7: error: Incompatible types in assignment (expression has type "C", variable has type "int") [case testSerializeGenericInheritance] import a [file a.py] import b [file a.py.2] from b import B b: B[int] reveal_type(b.f()) [file b.py] from typing import TypeVar, Generic T = TypeVar('T') class A(Generic[T]): def f(self) -> T: pass class B(A[A[T]]): pass [out2] tmp/a.py:3: error: Revealed type is 'b.A*[builtins.int*]' [case testSerializeFixedLengthTupleBaseClass] import a [file a.py] import b [file a.py.2] from b import A a: A a.f(1) reveal_type((a[0], a[1])) [file b.py] from typing import Tuple class A(Tuple[int, str]): def f(self) -> None: pass [builtins fixtures/tuple.pyi] [out2] tmp/a.py:3: error: Too many arguments for "f" of "A" tmp/a.py:4: error: Revealed type is 'Tuple[builtins.int, builtins.str]' [case testSerializeVariableLengthTupleBaseClass] import a [file a.py] import b [file a.py.2] from b import A a: A a.f(1) reveal_type((a[0], a[1])) [file b.py] from typing import Tuple class A(Tuple[int, ...]): def f(self) -> None: pass [builtins fixtures/tuple.pyi] [out2] tmp/a.py:3: error: Too many arguments for "f" of "A" tmp/a.py:4: error: Revealed type is 'Tuple[builtins.int*, builtins.int*]' [case testSerializePlainTupleBaseClass] import a [file a.py] import b [file a.py.2] from b import A a: A a.f(1) reveal_type((a[0], a[1])) [file b.py] from typing import Tuple class A(tuple): def f(self) -> None: pass [builtins fixtures/tuple.pyi] [out2] tmp/a.py:3: error: Too many arguments for "f" of "A" tmp/a.py:4: error: Revealed type is 'Tuple[Any, Any]' [case testSerializeNamedTupleBaseClass] import a [file a.py] import b [file a.py.2] from b import A a: A a.f(1) reveal_type((a[0], a[1])) reveal_type((a.x, a.y)) [file b.py] from typing import NamedTuple class A(NamedTuple('N', [('x', int), ('y', str)])): def f(self) -> None: pass [builtins fixtures/tuple.pyi] [out2] tmp/a.py:3: error: Too many arguments for "f" of "A" tmp/a.py:4: error: Revealed type is 'Tuple[builtins.int, builtins.str]' tmp/a.py:5: error: Revealed type is 'Tuple[builtins.int, builtins.str]' [case testSerializeAnyBaseClass] import a [file a.py] import b [file a.py.2] from b import B B().f(1) reveal_type(B().xyz) [file b.py] from typing import Any A: Any class B(A): def f(self) -> None: pass [builtins fixtures/tuple.pyi] [out2] tmp/a.py:2: error: Too many arguments for "f" of "B" tmp/a.py:3: error: Revealed type is 'Any' [case testSerializeIndirectAnyBaseClass] import a [file a.py] import b [file a.py.2] from b import C C().f(1) C().g(1) reveal_type(C().xyz) [file b.py] from typing import Any A: Any class B(A): def f(self) -> None: pass class C(B): def g(self) -> None: pass [builtins fixtures/tuple.pyi] [out2] tmp/a.py:2: error: Too many arguments for "f" of "B" tmp/a.py:3: error: Too many arguments for "g" of "C" tmp/a.py:4: error: Revealed type is 'Any' [case testSerializeNestedClass] import a [file a.py] import b [file a.py.2] import b b.A.B().f(1) b.A.B.C().g(1) b.b.f(1) b.c.g(1) [file b.py] class A: class B: def f(self) -> None: pass class C: def g(self) -> None: pass b: A.B c: A.B.C [builtins fixtures/tuple.pyi] [out2] tmp/a.py:2: error: Too many arguments for "f" of "B" tmp/a.py:3: error: Too many arguments for "g" of "C" tmp/a.py:4: error: Too many arguments for "f" of "B" tmp/a.py:5: error: Too many arguments for "g" of "C" [case testSerializeCallableVsTypeObjectDistinction] import a [file a.py] import b [file a.py.2] import b t: type t = b.A t = b.f # E [file b.py] class A: pass def f() -> None: pass [builtins fixtures/tuple.pyi] [out2] tmp/a.py:4: error: Incompatible types in assignment (expression has type "Callable[[], None]", variable has type "type") [case testSerializeOverloadedVsTypeObjectDistinction] import a [file a.py] import b [file a.py.2] import b t: type t = b.A t = b.f # E [file b.pyi] from typing import overload class A: @overload def __init__(self) -> None: pass @overload def __init__(self, x: int) -> None: pass @overload def f() -> None: pass @overload def f(x: int) -> None: pass [builtins fixtures/tuple.pyi] [out2] tmp/a.py:4: error: Incompatible types in assignment (expression has type overloaded function, variable has type "type") [case testSerializeNamedTupleInMethod4] from ntcrash import C reveal_type(C().a) reveal_type(C().b) reveal_type(C().c) [file ntcrash.py] from typing import NamedTuple class C: def __init__(self) -> None: A = NamedTuple('A', [('x', int)]) self.a = A(0) self.b = A(0) # type: A self.c = A [out1] main:2: error: Revealed type is 'Tuple[builtins.int, fallback=ntcrash.C.A@4]' main:3: error: Revealed type is 'Tuple[builtins.int, fallback=ntcrash.C.A@4]' main:4: error: Revealed type is 'def (x: builtins.int) -> Tuple[builtins.int, fallback=ntcrash.C.A@4]' [out2] main:2: error: Revealed type is 'Tuple[builtins.int, fallback=ntcrash.C.A@4]' main:3: error: Revealed type is 'Tuple[builtins.int, fallback=ntcrash.C.A@4]' main:4: error: Revealed type is 'def (x: builtins.int) -> Tuple[builtins.int, fallback=ntcrash.C.A@4]' -- -- Strict optional -- [case testSerializeOptionalType] # flags: --strict-optional import a [file a.py] import b [file a.py.2] import b reveal_type(b.x) b.f(b.x) [file b.py] from typing import Optional x: Optional[int] def f(x: int) -> None: pass [out2] tmp/a.py:2: error: Revealed type is 'Union[builtins.int, builtins.None]' tmp/a.py:3: error: Argument 1 to "f" has incompatible type "Optional[int]"; expected "int" -- -- # type: ignore -- [case testSerializeIgnoredUndefinedType] import b reveal_type(b.x) [file b.py] x: NonExistent # type: ignore [out1] main:2: error: Revealed type is 'Any' [out2] main:2: error: Revealed type is 'Any' [case testSerializeIgnoredInvalidType] import b reveal_type(b.x) [file b.py] A = 0 x: A # type: ignore [out1] main:2: error: Revealed type is 'A?' [out2] main:2: error: Revealed type is 'A?' [case testSerializeIgnoredMissingBaseClass] import b reveal_type(b.B()) reveal_type(b.B().x) [file b.py] class B(A): pass # type: ignore [out1] main:2: error: Revealed type is 'b.B' main:3: error: Revealed type is 'Any' [out2] main:2: error: Revealed type is 'b.B' main:3: error: Revealed type is 'Any' [case testSerializeIgnoredInvalidBaseClass] import b reveal_type(b.B()) reveal_type(b.B().x) [file b.py] A = 0 class B(A): pass # type: ignore [out1] main:2: error: Revealed type is 'b.B' main:3: error: Revealed type is 'Any' [out2] main:2: error: Revealed type is 'b.B' main:3: error: Revealed type is 'Any' [case testSerializeIgnoredImport] import a [file a.py] import b [file a.py.2] import b reveal_type(b.m) reveal_type(b.x) [file b.py] import m # type: ignore from m import x # type: ignore [out2] tmp/a.py:2: error: Revealed type is 'Any' tmp/a.py:3: error: Revealed type is 'Any' -- -- TypeVar -- [case testSerializeSimpleTypeVar] import a [file a.py] import b [file a.py.2] import b def f(x: b.T) -> b.T: return x reveal_type(f) [file b.py] from typing import TypeVar T = TypeVar('T') [out2] tmp/a.py:3: error: Revealed type is 'def [b.T] (x: b.T`-1) -> b.T`-1' [case testSerializeBoundedTypeVar] import a [file a.py] import b [file a.py.2] import b def f(x: b.T) -> b.T: return x reveal_type(f) reveal_type(b.g) [file b.py] from typing import TypeVar T = TypeVar('T', bound=int) def g(x: T) -> T: return x [out2] tmp/a.py:3: error: Revealed type is 'def [b.T <: builtins.int] (x: b.T`-1) -> b.T`-1' tmp/a.py:4: error: Revealed type is 'def [T <: builtins.int] (x: T`-1) -> T`-1' [case testSerializeTypeVarWithValues] import a [file a.py] import b [file a.py.2] import b def f(x: b.T) -> b.T: return x reveal_type(f) reveal_type(b.g) [file b.py] from typing import TypeVar T = TypeVar('T', int, str) def g(x: T) -> T: return x [out2] tmp/a.py:3: error: Revealed type is 'def [b.T in (builtins.int, builtins.str)] (x: b.T`-1) -> b.T`-1' tmp/a.py:4: error: Revealed type is 'def [T in (builtins.int, builtins.str)] (x: T`-1) -> T`-1' [case testSerializeTypeVarInClassBody] import a [file a.py] import b [file a.py.2] from b import A def f(x: A.T) -> A.T: return x reveal_type(f) [file b.py] from typing import TypeVar class A: T = TypeVar('T', int, str) [out2] tmp/a.py:3: error: Revealed type is 'def [A.T in (builtins.int, builtins.str)] (x: A.T`-1) -> A.T`-1' -- -- NewType -- [case testSerializeNewType] import a [file a.py] import b [file a.py.2] import b y: b.N y = 1 i = y b.x = 1 b.x = y y = b.N(1) y = b.N('') [file b.py] from typing import NewType N = NewType('N', int) x: N [out2] tmp/a.py:3: error: Incompatible types in assignment (expression has type "int", variable has type "N") tmp/a.py:5: error: Incompatible types in assignment (expression has type "int", variable has type "N") tmp/a.py:8: error: Argument 1 to "N" has incompatible type "str"; expected "int" -- -- Named tuples -- [case testSerializeNamedTuple] import a [file a.py] import b [file a.py.2] import b from typing import Tuple y: b.N t: Tuple[int] y = t b.x = t t = y b.x = t reveal_type(b.N(x=1)) reveal_type(y[0]) b.N(x='') [file b.py] from typing import NamedTuple N = NamedTuple('N', [('x', int)]) x: N [out2] tmp/a.py:5: error: Incompatible types in assignment (expression has type "Tuple[int]", variable has type "N") tmp/a.py:6: error: Incompatible types in assignment (expression has type "Tuple[int]", variable has type "N") tmp/a.py:9: error: Revealed type is 'Tuple[builtins.int, fallback=b.N]' tmp/a.py:10: error: Revealed type is 'builtins.int' tmp/a.py:11: error: Argument 1 to "N" has incompatible type "str"; expected "int" -- -- Types and type aliases -- [case testSerializeTypeAliases] import a [file a.py] import b [file a.py.2] import b d: b.D a: b.A u: b.U l: b.L t: b.T c: b.C ty: b.Ty reveal_type(d) reveal_type(a) reveal_type(u) reveal_type(l) reveal_type(t) reveal_type(c) reveal_type(ty) c2: b.C2 reveal_type(c2) ty2: b.Ty2 reveal_type(ty2) [file b.py] from typing import Any, Union, List, Tuple, Callable, Type class DD: pass D = DD A = Any U = Union[int, str] L = List[int] T = Tuple[int, str] C = Callable[[int], str] C2 = Callable[..., str] Ty = Type[int] Ty2 = type [builtins fixtures/list.pyi] [out2] tmp/a.py:9: error: Revealed type is 'b.DD' tmp/a.py:10: error: Revealed type is 'Any' tmp/a.py:11: error: Revealed type is 'Union[builtins.int, builtins.str]' tmp/a.py:12: error: Revealed type is 'builtins.list[builtins.int]' tmp/a.py:13: error: Revealed type is 'Tuple[builtins.int, builtins.str]' tmp/a.py:14: error: Revealed type is 'def (builtins.int) -> builtins.str' tmp/a.py:15: error: Revealed type is 'Type[builtins.int]' tmp/a.py:17: error: Revealed type is 'def (*Any, **Any) -> builtins.str' tmp/a.py:19: error: Revealed type is 'builtins.type' [case testSerializeGenericTypeAlias] import b from b import X # Work around https://github.com/python/mypy/issues/2887 t: b.Y[int] reveal_type(t) [file b.py] from typing import TypeVar, Tuple X = TypeVar('X') Y = Tuple[X, str] [builtins fixtures/tuple.pyi] [out1] main:4: error: Revealed type is 'Tuple[builtins.int, builtins.str]' [out2] main:4: error: Revealed type is 'Tuple[builtins.int, builtins.str]' [case testSerializeTuple] # Don't repreat types tested by testSerializeTypeAliases here. import a [file a.py] import b [file a.py.2] import b reveal_type(b.x) reveal_type(b.y) [file b.py] from typing import Tuple x: Tuple[int, ...] y: tuple [builtins fixtures/tuple.pyi] [out2] tmp/a.py:2: error: Revealed type is 'builtins.tuple[builtins.int]' tmp/a.py:3: error: Revealed type is 'builtins.tuple[Any]' [case testSerializeNone] import a [file a.py] import b [file a.py.2] import b reveal_type(b.x) [file b.py] x: None [out2] tmp/a.py:2: error: Revealed type is 'builtins.None' -- -- TypedDict -- [case testSerializeTypedDictInMethod] from ntcrash import C reveal_type(C().a) reveal_type(C().b) reveal_type(C().c) [file ntcrash.py] from mypy_extensions import TypedDict class C: def __init__(self) -> None: A = TypedDict('A', {'x': int}) self.a = A(x=0) self.b = A(x=0) # type: A self.c = A [builtins fixtures/dict.pyi] [out1] main:2: error: Revealed type is 'TypedDict('ntcrash.C.A@4', {'x': builtins.int})' main:3: error: Revealed type is 'TypedDict('ntcrash.C.A@4', {'x': builtins.int})' main:4: error: Revealed type is 'def () -> ntcrash.C.A@4' [out2] main:2: error: Revealed type is 'TypedDict('ntcrash.C.A@4', {'x': builtins.int})' main:3: error: Revealed type is 'TypedDict('ntcrash.C.A@4', {'x': builtins.int})' main:4: error: Revealed type is 'def () -> ntcrash.C.A@4' [case testSerializeNonTotalTypedDict] from m import d reveal_type(d) [file m.py] from mypy_extensions import TypedDict D = TypedDict('D', {'x': int, 'y': str}, total=False) d: D [builtins fixtures/dict.pyi] [out1] main:2: error: Revealed type is 'TypedDict('m.D', {'x'?: builtins.int, 'y'?: builtins.str})' [out2] main:2: error: Revealed type is 'TypedDict('m.D', {'x'?: builtins.int, 'y'?: builtins.str})' -- -- Modules -- [case testSerializeImport] import b b.c.f() b.c.g() [file b.py] import c [file c.py] def f() -> None: pass def g(x: int) -> None: pass [out1] main:3: error: Too few arguments for "g" [out2] main:3: error: Too few arguments for "g" [case testSerializeImportAs] import b b.d.f() b.d.g() [file b.py] import c as d [file c.py] def f() -> None: pass def g(x: int) -> None: pass [out1] main:3: error: Too few arguments for "g" [out2] main:3: error: Too few arguments for "g" [case testSerializeFromImportedClass] import b b.A(1) reveal_type(b.A()) [file b.py] from c import A [file c.py] class A: pass [out1] main:2: error: Too many arguments for "A" main:3: error: Revealed type is 'c.A' [out2] main:2: error: Too many arguments for "A" main:3: error: Revealed type is 'c.A' [case testSerializeFromImportedClassAs] import b b.B(1) reveal_type(b.B()) [file b.py] from c import A as B [file c.py] class A: pass [out1] main:2: error: Too many arguments for "A" main:3: error: Revealed type is 'c.A' [out2] main:2: error: Too many arguments for "A" main:3: error: Revealed type is 'c.A' [case testSerializeFromImportedModule] import b b.d.f() b.d.g() [file b.py] from c import d [file c/__init__.py] [file c/d.py] def f() -> None: pass def g(x: int) -> None: pass [out1] main:3: error: Too few arguments for "g" [out2] main:3: error: Too few arguments for "g" [case testSerializeQualifiedImport] import b b.c.d.f() b.c.d.g() [file b.py] import c.d [file c/__init__.py] [file c/d.py] def f() -> None: pass def g(x: int) -> None: pass [out1] main:3: error: Too few arguments for "g" [out2] main:3: error: Too few arguments for "g" [case testSerializeQualifiedImportAs] import b b.e.f() b.e.g() [file b.py] import c.d as e [file c/__init__.py] [file c/d.py] def f() -> None: pass def g(x: int) -> None: pass [out1] main:3: error: Too few arguments for "g" [out2] main:3: error: Too few arguments for "g" [case testSerialize__init__ModuleImport] import b b.c.f() b.c.g() a: b.c.d.A reveal_type(a) [file b.py] import c [file c/__init__.py] import d def f() -> None: pass def g(x: int) -> None: pass [file d.py] class A: pass [out1] main:3: error: Too few arguments for "g" main:5: error: Revealed type is 'd.A' [out2] main:3: error: Too few arguments for "g" main:5: error: Revealed type is 'd.A' [case testSerializeImportInClassBody] import b b.A.c.f() b.A.c.g() [file b.py] class A: import c [file c.py] def f() -> None: pass def g(x: int) -> None: pass [out1] main:3: error: Too few arguments for "g" [out2] main:3: error: Too few arguments for "g" [case testSerializeImportedTypeAlias] import b x: b.B reveal_type(x) [file b.py] from c import B [file c.py] from typing import Any class A: pass B = A [out1] main:3: error: Revealed type is 'c.A' [out2] main:3: error: Revealed type is 'c.A' [case testSerializeStarImport] import a [file a.py] import b [file a.py.2] import b b.f(1) x: b.A reveal_type(x) [file b.py] from c import * [file c.py] def f() -> None: pass class A: pass [out2] tmp/a.py:2: error: Too many arguments for "f" tmp/a.py:4: error: Revealed type is 'c.A' [case testSerializeRelativeImport] import b.c b.c.f(1) [file b/__init__.py] [file b/c.py] from .d import f [file b/d.py] def f() -> None: pass [out1] main:2: error: Too many arguments for "f" [out2] main:2: error: Too many arguments for "f" mypy-0.560/test-data/unit/check-statements.test0000644€tŠÔÚ€2›s®0000012001613215007205025677 0ustar jukkaDROPBOX\Domain Users00000000000000-- Return statement -- ---------------- [case testReturnValue] import typing def f() -> 'A': return A() def g() -> 'B': return A() class A: pass class B: pass [out] main:5: error: Incompatible return value type (got "A", expected "B") [case testReturnSubtype] import typing def f() -> 'B': return A() def g() -> 'A': return B() class A: pass class B(A): pass [out] main:3: error: Incompatible return value type (got "A", expected "B") [case testReturnWithoutAValue] import typing def f() -> 'A': return def g() -> None: return class A: pass [out] main:3: error: Return value expected [case testReturnNoneInFunctionReturningNone] import typing def f() -> None: return None def g() -> None: return f() [out] [case testReturnInGenerator] from typing import Generator def f() -> Generator[int, None, str]: yield 1 return "foo" [out] [case testEmptyReturnInGenerator] from typing import Generator def f() -> Generator[int, None, str]: yield 1 return # E: Return value expected [out] [case testNoReturnInGenerator] from typing import Generator def f() -> Generator[int, None, str]: # E: Missing return statement yield 1 [out] [case testEmptyReturnInNoneTypedGenerator] from typing import Generator def f() -> Generator[int, None, None]: yield 1 return [out] [case testNonEmptyReturnInNoneTypedGenerator] from typing import Generator def f() -> Generator[int, None, None]: yield 1 return 42 # E: No return value expected [out] [case testReturnInIterator] from typing import Iterator def f() -> Iterator[int]: yield 1 return "foo" [out] -- If statement -- ------------ [case testIfStatement] a = None # type: A a2 = None # type: A a3 = None # type: A b = None # type: bool if a: a = b # E: Incompatible types in assignment (expression has type "bool", variable has type "A") elif a2: a = b # E: Incompatible types in assignment (expression has type "bool", variable has type "A") elif a3: a = b # E: Incompatible types in assignment (expression has type "bool", variable has type "A") else: a = b # E: Incompatible types in assignment (expression has type "bool", variable has type "A") if b: pass elif b: pass if b: pass class A: pass [builtins fixtures/bool.pyi] -- Loops -- ----- [case testWhileStatement] a = None # type: A b = None # type: bool while a: a = b # Fail else: a = b # Fail while b: b = b class A: pass [builtins fixtures/bool.pyi] [out] main:5: error: Incompatible types in assignment (expression has type "bool", variable has type "A") main:7: error: Incompatible types in assignment (expression has type "bool", variable has type "A") [case testForStatement] a = None # type: A b = None # type: object for a in [A()]: a = b # Fail else: a = b # Fail class A: pass [builtins fixtures/list.pyi] [out] main:5: error: Incompatible types in assignment (expression has type "object", variable has type "A") main:7: error: Incompatible types in assignment (expression has type "object", variable has type "A") [case testBreakStatement] import typing while None: break [builtins fixtures/bool.pyi] [out] [case testContinueStatement] import typing while None: continue [builtins fixtures/bool.pyi] [out] [case testForStatementTypeComments] from typing import List, Union x = [] # type: List[int] for y in x: # type: str # E: Incompatible types in assignment (expression has type "int", variable has type "str") pass for z in x: # type: int pass for w in x: # type: Union[int, str] reveal_type(w) # E: Revealed type is 'Union[builtins.int, builtins.str]' for v in x: # type: int, int # E: Invalid tuple literal type pass [builtins fixtures/list.pyi] [case testForStatementMultipleTypeComments] from typing import List, Tuple x = [] # type: List[Tuple[int, int]] for y in x: # type: int, int # E: Invalid tuple literal type pass for z in x: # type: Tuple[int, int] pass for w,v in x: # type: int, str # E: Incompatible types in assignment (expression has type "int", variable has type "str") pass for a, b in x: # type: int, int, int # E: Incompatible number of tuple items pass [builtins fixtures/list.pyi] -- Operator assignment -- ------------------- [case testPlusAssign] a, b, c = None, None, None # type: (A, B, C) a += b # Fail b += a # Fail c += a # Fail a += c class A: def __add__(self, x: 'C') -> 'A': pass class B: def __add__(self, x: A) -> 'C': pass class C: pass [out] main:3: error: Unsupported operand types for + ("A" and "B") main:4: error: Incompatible types in assignment (expression has type "C", variable has type "B") main:5: error: Unsupported left operand type for + ("C") [case testMinusAssign] a, b, c = None, None, None # type: (A, B, C) a -= b # Fail b -= a # Fail c -= a # Fail a -= c class A: def __sub__(self, x: 'C') -> 'A': pass class B: def __sub__(self, x: A) -> 'C': pass class C: pass [out] main:3: error: Unsupported operand types for - ("A" and "B") main:4: error: Incompatible types in assignment (expression has type "C", variable has type "B") main:5: error: Unsupported left operand type for - ("C") [case testMulAssign] a, c = None, None # type: (A, C) a *= a # Fail c *= a # Fail a *= c class A: def __mul__(self, x: 'C') -> 'A': pass class C: pass [out] main:3: error: Unsupported operand types for * ("A" and "A") main:4: error: Unsupported left operand type for * ("C") [case testMatMulAssign] a, c = None, None # type: (A, C) a @= a # E: Unsupported operand types for @ ("A" and "A") c @= a # E: Unsupported left operand type for @ ("C") a @= c class A: def __matmul__(self, x: 'C') -> 'A': pass class C: pass [case testDivAssign] a, c = None, None # type: (A, C) a /= a # Fail c /= a # Fail a /= c class A: def __truediv__(self, x: 'C') -> 'A': pass class C: pass [out] main:3: error: Unsupported operand types for / ("A" and "A") main:4: error: Unsupported left operand type for / ("C") [case testPowAssign] a, c = None, None # type: (A, C) a **= a # Fail c **= a # Fail a **= c class A: def __pow__(self, x: 'C') -> 'A': pass class C: pass [out] main:3: error: Unsupported operand types for ** ("A" and "A") main:4: error: Unsupported left operand type for ** ("C") [case testSubtypesInOperatorAssignment] a, b = None, None # type: (A, B) b += b b += a a += b class A: def __add__(self, x: 'A') -> 'B': pass class B(A): pass [out] [case testAdditionalOperatorsInOpAssign] a, c = None, None # type: (A, C) a &= a # Fail a >>= a # Fail a //= a # Fail a &= c a >>= c a //= c class A: def __and__(self, x: 'C') -> 'A': pass def __rshift__(self, x: 'C') -> 'A': pass def __floordiv__(self, x: 'C') -> 'A': pass class C: pass [out] main:3: error: Unsupported operand types for & ("A" and "A") main:4: error: Unsupported operand types for >> ("A" and "A") main:5: error: Unsupported operand types for // ("A" and "A") [case testInplaceOperatorMethods] import typing class A: def __iadd__(self, x: int) -> 'A': pass def __imul__(self, x: str) -> 'A': pass def __imatmul__(self, x: str) -> 'A': pass a = A() a += 1 a *= '' a @= '' a += '' # E: Argument 1 to "__iadd__" of "A" has incompatible type "str"; expected "int" a *= 1 # E: Argument 1 to "__imul__" of "A" has incompatible type "int"; expected "str" a @= 1 # E: Argument 1 to "__imatmul__" of "A" has incompatible type "int"; expected "str" [case testInplaceSetitem] class A(object): def __init__(self): self.a = 0 def __iadd__(self, a): # type: (int) -> A self.a += 1 return self a = A() b = [a] b[0] += 1 [builtins fixtures/list.pyi] [out] -- Assert statement -- ---------------- [case testAssert] import typing assert None + None # Fail assert None [out] main:2: error: Unsupported left operand type for + ("None") -- Exception handling -- ------------------ [case testRaiseStatement] e = None # type: BaseException f = None # type: MyError a = None # type: A raise a # Fail raise e raise f class A: pass class MyError(BaseException): pass [builtins fixtures/exception.pyi] [out] main:5: error: Exception must be derived from BaseException [case testRaiseClassobject] import typing class A: pass class MyError(BaseException): pass def f(): pass raise BaseException raise MyError raise A # E: Exception must be derived from BaseException raise object # E: Exception must be derived from BaseException raise f # E: Exception must be derived from BaseException [builtins fixtures/exception.pyi] [case testRaiseFromStatement] e = None # type: BaseException f = None # type: MyError a = None # type: A raise e from a # E: Exception must be derived from BaseException raise e from e raise e from f class A: pass class MyError(BaseException): pass [builtins fixtures/exception.pyi] [case testRaiseFromClassobject] import typing class A: pass class MyError(BaseException): pass def f(): pass raise BaseException from BaseException raise BaseException from MyError raise BaseException from A # E: Exception must be derived from BaseException raise BaseException from object # E: Exception must be derived from BaseException raise BaseException from f # E: Exception must be derived from BaseException [builtins fixtures/exception.pyi] [case testTryFinallyStatement] import typing try: b = object() # type: A # Fail finally: c = object() # type: A # Fail class A: pass [out] main:3: error: Incompatible types in assignment (expression has type "object", variable has type "A") main:5: error: Incompatible types in assignment (expression has type "object", variable has type "A") [case testSimpleTryExcept] try: pass except BaseException as e: a, o = None, None # type: (BaseException, object) e = a e = o # Fail class A: pass class B: pass [builtins fixtures/exception.pyi] [out] main:7: error: Incompatible types in assignment (expression has type "object", variable has type "BaseException") [case testTypeErrorInBlock] while object: x = None # type: A x = object() x = B() class A: pass class B: pass [out] main:4: error: Incompatible types in assignment (expression has type "object", variable has type "A") main:5: error: Incompatible types in assignment (expression has type "B", variable has type "A") [case testTypeErrorInvolvingBaseException] x, a = None, None # type: (BaseException, A) a = BaseException() # Fail a = object() # Fail x = object() # Fail x = A() # Fail x = BaseException() class A: pass [builtins fixtures/exception.pyi] [out] main:3: error: Incompatible types in assignment (expression has type "BaseException", variable has type "A") main:4: error: Incompatible types in assignment (expression has type "object", variable has type "A") main:5: error: Incompatible types in assignment (expression has type "object", variable has type "BaseException") main:6: error: Incompatible types in assignment (expression has type "A", variable has type "BaseException") [case testSimpleTryExcept2] import typing try: pass except BaseException as e: e = object() # Fail e = BaseException() [builtins fixtures/exception.pyi] [out] main:5: error: Incompatible types in assignment (expression has type "object", variable has type "BaseException") [case testBaseClassAsExceptionTypeInExcept] import typing try: pass except Err as e: e = BaseException() # Fail e = Err() class Err(BaseException): pass [builtins fixtures/exception.pyi] [out] main:5: error: Incompatible types in assignment (expression has type "BaseException", variable has type "Err") [case testMultipleExceptHandlers] import typing try: pass except BaseException as e: pass except Err as f: f = BaseException() # Fail f = Err() class Err(BaseException): pass [builtins fixtures/exception.pyi] [out] main:7: error: Incompatible types in assignment (expression has type "BaseException", variable has type "Err") [case testTryExceptStatement] import typing try: a = B() # type: A # Fail except BaseException as e: e = A() # Fail e = Err() except Err as f: f = BaseException() # Fail f = Err() class A: pass class B: pass class Err(BaseException): pass [builtins fixtures/exception.pyi] [out] main:3: error: Incompatible types in assignment (expression has type "B", variable has type "A") main:5: error: Incompatible types in assignment (expression has type "A", variable has type "BaseException") main:8: error: Incompatible types in assignment (expression has type "BaseException", variable has type "Err") [case testTryExceptWithinFunction] import typing def f() -> None: try: pass except BaseException as e: e = object() # Fail e = BaseException() except Err as f: f = BaseException() # Fail f = Err() class Err(BaseException): pass [builtins fixtures/exception.pyi] [out] main:5: error: Incompatible types in assignment (expression has type "object", variable has type "BaseException") main:8: error: Incompatible types in assignment (expression has type "BaseException", variable has type "Err") [case testTryExceptFlow] def f() -> None: x = 1 try: pass except: raise x + 'a' # E: Unsupported left operand type for + ("int") [builtins fixtures/exception.pyi] [out] [case testTryWithElse] import typing try: pass except BaseException: pass else: object(None) # E: Too many arguments for "object" [builtins fixtures/exception.pyi] [case testRedefinedFunctionInTryWithElse] def f() -> None: pass try: pass except BaseException: f2 = f else: def f2() -> str: pass try: pass except BaseException: f3 = f else: def f3() -> None: pass [builtins fixtures/exception.pyi] [out] main:7: error: Incompatible redefinition (redefinition with type "Callable[[], str]", original type "Callable[[], None]") [case testExceptWithoutType] import typing try: -None # E: Unsupported operand type for unary - ("None") except: ~None # E: Unsupported operand type for ~ ("None") [builtins fixtures/exception.pyi] [case testRaiseWithoutArgument] import typing try: None except: raise [builtins fixtures/exception.pyi] [case testExceptWithMultipleTypes] import typing class E1(BaseException): pass class E2(E1): pass try: pass except (E1, E2): pass except (E1, object): pass # E: Exception type must be derived from BaseException except (object, E2): pass # E: Exception type must be derived from BaseException except (E1, (E2,)): pass # E: Exception type must be derived from BaseException except (E1, E2): pass except ((E1, E2)): pass except (((E1, E2))): pass [builtins fixtures/exception.pyi] [case testExceptWithMultipleTypes2] import typing class E1(BaseException): pass class E2(E1): pass try: pass except (E1, E2) as e1: x = e1 # type: E1 y = e1 # type: E2 # E: Incompatible types in assignment (expression has type "E1", variable has type "E2") except (E2, E1) as e2: a = e2 # type: E1 b = e2 # type: E2 # E: Incompatible types in assignment (expression has type "E1", variable has type "E2") except (E1, E2, int) as e3: # E: Exception type must be derived from BaseException pass [builtins fixtures/exception.pyi] [case testExceptWithMultipleTypes3] import typing class E1(BaseException): pass class E1_1(E1): pass class E1_2(E1): pass try: pass except (E1, E1_1, E1_2) as e1: x = e1 # type: E1 y = e1 # type: E1_1 # E: Incompatible types in assignment (expression has type "E1", variable has type "E1_1") z = e1 # type: E1_2 # E: Incompatible types in assignment (expression has type "E1", variable has type "E1_2") except (E1_1, E1_2) as e2: a = e2 # type: E1 b = e2 # type: E1_1 # E: Incompatible types in assignment (expression has type "Union[E1_1, E1_2]", variable has type "E1_1") c = e2 # type: E1_2 # E: Incompatible types in assignment (expression has type "Union[E1_1, E1_2]", variable has type "E1_2") [builtins fixtures/exception.pyi] [case testExceptWithMultipleTypes4] from typing import Tuple, Type, Union class E1(BaseException): pass class E2(BaseException): pass class E3(BaseException): pass def variadic(exc: Tuple[Type[E1], ...]) -> None: try: pass except exc as e: reveal_type(e) # E: Revealed type is '__main__.E1' def union(exc: Union[Type[E1], Type[E2]]) -> None: try: pass except exc as e: reveal_type(e) # E: Revealed type is 'Union[__main__.E1, __main__.E2]' def tuple_in_union(exc: Union[Type[E1], Tuple[Type[E2], Type[E3]]]) -> None: try: pass except exc as e: reveal_type(e) # E: Revealed type is 'Union[__main__.E1, __main__.E2, __main__.E3]' def variadic_in_union(exc: Union[Type[E1], Tuple[Type[E2], ...]]) -> None: try: pass except exc as e: reveal_type(e) # E: Revealed type is 'Union[__main__.E1, __main__.E2]' def nested_union(exc: Union[Type[E1], Union[Type[E2], Type[E3]]]) -> None: try: pass except exc as e: reveal_type(e) # E: Revealed type is 'Union[__main__.E1, __main__.E2, __main__.E3]' def error_in_union(exc: Union[Type[E1], int]) -> None: try: pass except exc as e: # E: Exception type must be derived from BaseException pass def error_in_variadic(exc: Tuple[int, ...]) -> None: try: pass except exc as e: # E: Exception type must be derived from BaseException pass [builtins fixtures/tuple.pyi] [case testExceptWithAnyTypes] from typing import Any E1 = None # type: Any class E2(BaseException): pass class NotBaseDerived: pass try: pass except BaseException as e1: reveal_type(e1) # E: Revealed type is 'builtins.BaseException' except (E1, BaseException) as e2: reveal_type(e2) # E: Revealed type is 'Union[Any, builtins.BaseException]' except (E1, E2) as e3: reveal_type(e3) # E: Revealed type is 'Union[Any, __main__.E2]' except (E1, E2, BaseException) as e4: reveal_type(e4) # E: Revealed type is 'Union[Any, builtins.BaseException]' try: pass except E1 as e1: reveal_type(e1) # E: Revealed type is 'Any' except E2 as e2: reveal_type(e2) # E: Revealed type is '__main__.E2' except NotBaseDerived as e3: # E: Exception type must be derived from BaseException pass except (NotBaseDerived, E1) as e4: # E: Exception type must be derived from BaseException pass except (NotBaseDerived, E2) as e5: # E: Exception type must be derived from BaseException pass except (NotBaseDerived, E1, E2) as e6: # E: Exception type must be derived from BaseException pass except (E1, E2, NotBaseDerived) as e6: # E: Exception type must be derived from BaseException pass [builtins fixtures/exception.pyi] [case testReuseTryExceptionVariable] import typing class E1(BaseException): pass class E2(BaseException): pass try: pass except E1 as e: pass try: pass except E1 as e: pass try: pass except E2 as e: pass e + 1 # E: Trying to read deleted variable 'e' e = E1() # E: Assignment to variable 'e' outside except: block [builtins fixtures/exception.pyi] [case testReuseDefinedTryExceptionVariable] import typing class E1(BaseException): pass class E2(BaseException): pass e = 1 e = 1 try: pass except E1 as e: pass e = 1 # E: Assignment to variable 'e' outside except: block e = E1() # E: Assignment to variable 'e' outside except: block [builtins fixtures/exception.pyi] [case testExceptionVariableReuseInDeferredNode1] def f(*a: BaseException) -> int: x try: pass except BaseException as err: pass try: pass except BaseException as err: f(err) return 0 x = f() [builtins fixtures/exception.pyi] [case testExceptionVariableReuseInDeferredNode2] def f(*a: BaseException) -> int: try: pass except BaseException as err: pass x try: pass except BaseException as err: f(err) return 0 x = f() [builtins fixtures/exception.pyi] [case testExceptionVariableReuseInDeferredNode3] def f(*a: BaseException) -> int: try: pass except BaseException as err: pass try: pass except BaseException as err: f(err) x return 0 x = f() [builtins fixtures/exception.pyi] [case testExceptionVariableReuseInDeferredNode4] class EA(BaseException): a = None # type: int class EB(BaseException): b = None # type: str def f(*arg: BaseException) -> int: x try: pass except EA as err: f(err) a = err.a reveal_type(a) try: pass except EB as err: f(err) b = err.b reveal_type(b) return 0 x = f() [builtins fixtures/exception.pyi] [out] main:11: error: Revealed type is 'builtins.int' main:16: error: Revealed type is 'builtins.str' [case testExceptionVariableReuseInDeferredNode5] class EA(BaseException): a = None # type: int class EB(BaseException): b = None # type: str def f(*arg: BaseException) -> int: try: pass except EA as err: f(err) a = err.a reveal_type(a) x try: pass except EB as err: f(err) b = err.b reveal_type(b) return 0 x = f() [builtins fixtures/exception.pyi] [out] main:10: error: Revealed type is 'builtins.int' main:16: error: Revealed type is 'builtins.str' [case testExceptionVariableReuseInDeferredNode6] class EA(BaseException): a = None # type: int class EB(BaseException): b = None # type: str def f(*arg: BaseException) -> int: try: pass except EA as err: f(err) a = err.a reveal_type(a) try: pass except EB as err: f(err) b = err.b reveal_type(b) x return 0 x = f() [builtins fixtures/exception.pyi] [out] main:10: error: Revealed type is 'builtins.int' main:15: error: Revealed type is 'builtins.str' [case testArbitraryExpressionAsExceptionType] import typing a = BaseException try: pass except a as b: b = BaseException() b = object() # E: Incompatible types in assignment (expression has type "object", variable has type "BaseException") [builtins fixtures/exception.pyi] [case testInvalidExceptionCallable] import typing def exc() -> BaseException: pass try: pass except exc as e: pass # E: Exception type must be derived from BaseException except BaseException() as b: pass # E: Exception type must be derived from BaseException [builtins fixtures/exception.pyi] [case testTupleValueAsExceptionType] import typing def exc() -> BaseException: pass class E1(BaseException): pass class E1_1(E1): pass class E1_2(E1): pass exs1 = (E1, E1_1, E1_2) try: pass except exs1 as e1: x = e1 # type: E1 y = e1 # type: E1_1 # E: Incompatible types in assignment (expression has type "E1", variable has type "E1_1") z = e1 # type: E1_2 # E: Incompatible types in assignment (expression has type "E1", variable has type "E1_2") exs2 = (E1_1, E1_2) try: pass except exs2 as e2: a = e2 # type: E1 b = e2 # type: E1_1 # E: Incompatible types in assignment (expression has type "Union[E1_1, E1_2]", variable has type "E1_1") c = e2 # type: E1_2 # E: Incompatible types in assignment (expression has type "Union[E1_1, E1_2]", variable has type "E1_2") exs3 = (E1, (E1_1, (E1_2,))) try: pass except exs3 as e3: pass # E: Exception type must be derived from BaseException [builtins fixtures/exception.pyi] [case testInvalidTupleValueAsExceptionType] import typing def exc() -> BaseException: pass class E1(BaseException): pass class E2(E1): pass exs1 = (E1, E2, int) try: pass except exs1 as e: pass # E: Exception type must be derived from BaseException [builtins fixtures/exception.pyi] [case testOverloadedExceptionType] from foo import * [file foo.pyi] from typing import overload class E(BaseException): @overload def __init__(self) -> None: pass @overload def __init__(self, x) -> None: pass try: pass except E as e: e = E() e = BaseException() # E: Incompatible types in assignment (expression has type "BaseException", variable has type "E") [builtins fixtures/exception.pyi] [case testExceptionWithAnyBaseClass] from typing import Any E = None # type: Any class EE(E): pass raise EE() raise EE [builtins fixtures/exception.pyi] [case testExceptionIsType] from typing import Type class B(BaseException): pass def f(e: Type[B]): try: pass except e: pass def g(e: Type[BaseException]): try: pass except e as err: reveal_type(err) def h(e: Type[int]): try: pass except e: pass [builtins fixtures/exception.pyi] [out] main:9: error: Revealed type is 'builtins.BaseException' main:12: error: Exception type must be derived from BaseException -- Del statement -- ------------- [case testDelStmtWithIndex] a, b = None, None # type: (A, B) del b[a] del b[b] # E: Argument 1 to "__delitem__" of "B" has incompatible type "B"; expected "A" del a[a] # E: "A" has no attribute "__delitem__" del a[b] # E: "A" has no attribute "__delitem__" class B: def __delitem__(self, index: 'A'): pass class A: pass [case testDelStmtWithAttribute] class A: def f(self): pass x = 0 a = A() del a.f del a.x del a.z # E: "A" has no attribute "z" [case testDelStatementWithTuple] class A: x = 0 a = A() del a.x, a.y # E: "A" has no attribute "y" [case testDelStatementWithAssignmentSimple] a = 1 a + 1 del a a + 1 # E: Trying to read deleted variable 'a' [builtins fixtures/ops.pyi] [case testDelStatementWithAssignmentTuple] a = 1 b = 1 del (a, b) b + 1 # E: Trying to read deleted variable 'b' [builtins fixtures/ops.pyi] [case testDelStatementWithAssignmentClass] class C: a = 1 c = C() c.a = 1 c.a + 1 del c.a c.a + 1 [builtins fixtures/ops.pyi] [case testDelStatementWithConditions] x = 5 del x if x: ... # E: Trying to read deleted variable 'x' def f(x): return x if 0: ... elif f(x): ... # E: Trying to read deleted variable 'x' while x == 5: ... # E: Trying to read deleted variable 'x' -- Yield statement -- --------------- [case testSimpleYield] from typing import Iterator def f() -> Iterator[int]: yield 1 yield '' # E: Incompatible types in "yield" (actual type "str", expected type "int") [builtins fixtures/for.pyi] [out] [case testYieldInFunctionReturningGenerator] from typing import Generator def f() -> Generator[int, None, None]: yield 1 [builtins fixtures/for.pyi] [out] [case testYieldInFunctionReturningIterable] from typing import Iterable def f() -> Iterable[int]: yield 1 [builtins fixtures/for.pyi] [out] [case testYieldInFunctionReturningObject] def f() -> object: yield 1 [builtins fixtures/for.pyi] [out] [case testYieldInFunctionReturningAny] from typing import Any def f() -> Any: yield object() [out] [case testYieldInFunctionReturningFunction] from typing import Callable def f() -> Callable[[], None]: # E: The return type of a generator function should be "Generator" or one of its supertypes yield object() [out] [case testYieldInDynamicallyTypedFunction] import typing def f(): yield f [case testWithInvalidInstanceReturnType] import typing def f() -> int: # E: The return type of a generator function should be "Generator" or one of its supertypes yield 1 [builtins fixtures/for.pyi] [out] [case testTypeInferenceContextAndYield] from typing import List, Iterator def f() -> 'Iterator[List[int]]': yield [] yield [object()] # E: List item 0 has incompatible type "object"; expected "int" [builtins fixtures/for.pyi] [out] [case testYieldAndReturnWithoutValue] from typing import Iterator def f() -> Iterator[int]: yield 1 return [builtins fixtures/for.pyi] [case testYieldWithNoValue] from typing import Iterator def f() -> Iterator[None]: yield [builtins fixtures/for.pyi] [case testYieldWithNoValueWhenValueRequired] from typing import Iterator def f() -> Iterator[int]: yield # E: Yield value expected [builtins fixtures/for.pyi] [out] [case testYieldWithExplicitNone] from typing import Iterator def f() -> Iterator[None]: yield None [builtins fixtures/for.pyi] [out] -- Yield from statement -- -------------------- -- -- (It's not really a statement, but don't want to move the tests.) [case testSimpleYieldFromWithIterator] from typing import Iterator def g() -> Iterator[str]: yield '42' def h() -> Iterator[int]: yield 42 def f() -> Iterator[str]: yield from g() yield from h() # E: Incompatible types in "yield from" (actual type "int", expected type "str") [out] [case testYieldFromAppliedToAny] from typing import Any def g() -> Any: yield object() def f() -> Any: yield from g() [out] [case testYieldFromInFunctionReturningFunction] from typing import Iterator, Callable def g() -> Iterator[int]: yield 42 def f() -> Callable[[], None]: # E: The return type of a generator function should be "Generator" or one of its supertypes yield from g() [out] [case testYieldFromNotIterableReturnType] from typing import Iterator def g() -> Iterator[int]: yield 42 def f() -> int: # E: The return type of a generator function should be "Generator" or one of its supertypes yield from g() [out] [case testYieldFromNotAppliedIterator] from typing import Iterator def g() -> int: return 42 def f() -> Iterator[int]: yield from g() # E: "yield from" can't be applied to "int" [out] [case testYieldFromCheckIncompatibleTypesTwoIterables] from typing import List, Iterator def g() -> Iterator[List[int]]: yield [2, 3, 4] def f() -> Iterator[List[int]]: yield from g() yield from [1, 2, 3] # E: Incompatible types in "yield from" (actual type "int", expected type "List[int]") [builtins fixtures/for.pyi] [out] [case testYieldFromNotAppliedToNothing] def h(): yield from # E: invalid syntax [out] [case testYieldFromAndYieldTogether] from typing import Iterator def f() -> Iterator[str]: yield "g1 ham" yield from g() yield "g1 eggs" def g() -> Iterator[str]: yield "g2 spam" yield "g2 more spam" [out] [case testYieldFromAny] from typing import Iterator def f(a): b = yield from a return b [out] [case testYieldFromGenericCall] from typing import Generator, TypeVar T = TypeVar('T') def f(a: T) -> Generator[int, str, T]: pass def g() -> Generator[int, str, float]: r = yield from f('') reveal_type(r) # E: Revealed type is 'builtins.str*' return 3.14 -- With statement -- -------------- [case testSimpleWith] import typing class A: def __enter__(self) -> None: pass def __exit__(self, x, y, z) -> None: pass with A(): object(A) # E: Too many arguments for "object" [case testWithStmtAndInvalidExit] import typing class A: def __enter__(self) -> None: pass def __exit__(self, x, y) -> None: pass with A(): # E: Too many arguments for "__exit__" of "A" pass [case testWithStmtAndMissingExit] import typing class A: def __enter__(self) -> None: pass with A(): # E: "A" has no attribute "__exit__" pass [case testWithStmtAndInvalidEnter] import typing class A: def __enter__(self, x) -> None: pass def __exit__(self, x, y, z) -> None: pass with A(): # E: Too few arguments for "__enter__" of "A" pass [case testWithStmtAndMissingEnter] import typing class A: def __exit__(self, x, y, z) -> None: pass with A(): # E: "A" has no attribute "__enter__" pass [case testWithStmtAndMultipleExprs] import typing class A: def __enter__(self) -> None: pass def __exit__(self, x, y, z) -> None: pass class B: def __enter__(self) -> None: pass with A(), B(): # E: "B" has no attribute "__exit__" pass with B(), A(): # E: "B" has no attribute "__exit__" pass [case testWithStmtAndResult] import typing class B: pass class A: def __enter__(self) -> B: pass def __exit__(self, x, y, z): pass with A() as b: b = B() b = A() # E: Incompatible types in assignment (expression has type "A", variable has type "B") [case testWithStmtAndMultipleResults] from typing import TypeVar, Generic t = TypeVar('t') class B: pass class C: pass class A(Generic[t]): def __enter__(self) -> t: pass def __exit__(self, x, y, z): pass a_b = A() # type: A[B] a_c = A() # type: A[C] with a_b as b, a_c as c: b = B() c = C() b = c # E: Incompatible types in assignment (expression has type "C", variable has type "B") c = b # E: Incompatible types in assignment (expression has type "B", variable has type "C") [case testWithStmtAndComplexTarget] from typing import Tuple class A: def __enter__(self) -> Tuple[int, str]: pass def __exit__(self, x, y, z): pass with A() as (a, b): a = 1 b = '' a = b # E: Incompatible types in assignment (expression has type "str", variable has type "int") [builtins fixtures/tuple.pyi] [case testWithStmtTypeComment] from typing import Union class A: def __enter__(self) -> int: pass def __exit__(self, x, y, z): pass with A(): # type: int # E: Invalid type comment pass with A() as a: # type: int pass with A() as b: # type: str # E: Incompatible types in assignment (expression has type "int", variable has type "str") pass with A() as c: # type: int, int # E: Invalid tuple literal type pass with A() as d: # type: Union[int, str] reveal_type(d) # E: Revealed type is 'Union[builtins.int, builtins.str]' [case testWithStmtTupleTypeComment] from typing import Tuple class A: def __enter__(self) -> Tuple[int, int]: pass def __exit__(self, x, y, z): pass with A(): pass with A() as a: # type: Tuple[int, int] pass with A() as b: # type: Tuple[int, str] # E: Incompatible types in assignment (expression has type "Tuple[int, int]", variable has type "Tuple[int, str]") pass with A() as (c, d): # type: int, int pass with A() as (e, f): # type: Tuple[int, int] pass with A() as (g, h): # type: int # E: Tuple type expected for multiple variables pass with A() as (i, j): # type: int, int, str # E: Incompatible number of tuple items pass with A() as (k, l): # type: int, str # E: Incompatible types in assignment (expression has type "int", variable has type "str") pass [builtins fixtures/tuple.pyi] [case testWithStmtComplexTypeComment] from typing import Tuple class A: def __enter__(self) -> Tuple[int, int]: pass def __exit__(self, x, y, z): pass class B: def __enter__(self) -> str: pass def __exit__(self, x, y, z): pass with A() as a, A() as (b, c), B() as d: # type: Tuple[int, int], (int, int), str pass with A() as e, A() as (f, g), B() as h: # type: Tuple[int, int], Tuple[int, int], str pass with A() as i, A() as (j, k), B() as l: # type: (int, int), (int, int), str # E: Invalid tuple literal type pass with A(), A(), B() as m, A() as n, B(), B() as o: # type: int, Tuple[int, int] # E: Incompatible number of types for `with` targets pass with A(), B(), B() as p, A(), A(): # type: str pass [builtins fixtures/tuple.pyi] -- Chained assignment -- ------------------ [case testChainedAssignment] import typing class A: pass class B: pass x = y = A() x = A() y = A() x = B() # E: Incompatible types in assignment (expression has type "B", variable has type "A") y = B() # E: Incompatible types in assignment (expression has type "B", variable has type "A") [case testChainedAssignment2] import typing def f() -> None: x = 1 y = 'x' x = y = 'x' # E: Incompatible types in assignment (expression has type "str", variable has type "int") x = y = 1 # E: Incompatible types in assignment (expression has type "int", variable has type "str") [builtins fixtures/primitives.pyi] [out] [case testChainedAssignmentWithType] x = y = None # type: int x = '' # E: Incompatible types in assignment (expression has type "str", variable has type "int") y = '' # E: Incompatible types in assignment (expression has type "str", variable has type "int") x = 1 y = 1 -- Star assignment -- --------------- [case testAssignListToStarExpr] from typing import List bs, cs = None, None # type: List[A], List[B] *bs, b = bs *bs, c = cs # E: Incompatible types in assignment (expression has type "List[B]", variable has type "List[A]") *ns, c = cs nc = cs class A: pass class B: pass [builtins fixtures/list.pyi] -- Type aliases -- ------------ [case testSimpleTypeAlias] import typing foo = int def f(x: foo) -> None: pass f(1) f('x') # E: Argument 1 to "f" has incompatible type "str"; expected "int" [case testTypeAliasDefinedInAModule] import typing import m def f(x: m.foo) -> None: pass f(1) f('x') # E: Argument 1 to "f" has incompatible type "str"; expected "int" [file m.py] import typing foo = int [case testTypeAliasDefinedInAModule2] import typing from m import foo def f(x: foo) -> None: pass f(1) f('x') # E: Argument 1 to "f" has incompatible type "str"; expected "int" [file m.py] import typing foo = int -- nonlocal and global -- ------------------- [case testTypeOfGlobalUsed] import typing g = A() def f() -> None: global g g = B() class A(): pass class B(): pass [out] main:5: error: Incompatible types in assignment (expression has type "B", variable has type "A") [case testTypeOfNonlocalUsed] import typing def f() -> None: a = A() def g() -> None: nonlocal a a = B() class A(): pass class B(): pass [out] main:6: error: Incompatible types in assignment (expression has type "B", variable has type "A") [case testTypeOfOuterMostNonlocalUsed] import typing def f() -> None: a = A() def g() -> None: a = B() def h() -> None: nonlocal a a = A() a = B() class A(): pass class B(): pass [out] main:8: error: Incompatible types in assignment (expression has type "A", variable has type "B") [case testAugmentedAssignmentIntFloat] weight0 = 65.5 reveal_type(weight0) # E: Revealed type is 'builtins.float' weight0 = 65 reveal_type(weight0) # E: Revealed type is 'builtins.int' weight0 *= 'a' # E: Incompatible types in assignment (expression has type "str", variable has type "float") weight0 *= 0.5 reveal_type(weight0) # E: Revealed type is 'builtins.float' weight0 *= object() # E: Unsupported operand types for * ("float" and "object") reveal_type(weight0) # E: Revealed type is 'builtins.float' [builtins fixtures/float.pyi] [case testAugmentedAssignmentIntFloatMember] class A: def __init__(self) -> None: self.weight0 = 65.5 reveal_type(self.weight0) # E: Revealed type is 'builtins.float' self.weight0 = 65 reveal_type(self.weight0) # E: Revealed type is 'builtins.int' self.weight0 *= 'a' # E: Incompatible types in assignment (expression has type "str", variable has type "float") self.weight0 *= 0.5 reveal_type(self.weight0) # E: Revealed type is 'builtins.float' self.weight0 *= object() # E: Unsupported operand types for * ("float" and "object") reveal_type(self.weight0) # E: Revealed type is 'builtins.float' [builtins fixtures/float.pyi] [case testAugmentedAssignmentIntFloatDict] from typing import Dict d = {'weight0': 65.5} reveal_type(d['weight0']) # E: Revealed type is 'builtins.float*' d['weight0'] = 65 reveal_type(d['weight0']) # E: Revealed type is 'builtins.float*' d['weight0'] *= 'a' # E: Unsupported operand types for * ("float" and "str") # E: Incompatible types in assignment (expression has type "str", target has type "float") d['weight0'] *= 0.5 reveal_type(d['weight0']) # E: Revealed type is 'builtins.float*' d['weight0'] *= object() # E: Unsupported operand types for * ("float" and "object") reveal_type(d['weight0']) # E: Revealed type is 'builtins.float*' [builtins fixtures/floatdict.pyi] [case testForwardRefsInForStatementImplicit] from typing import List, NamedTuple lst: List[N] for i in lst: reveal_type(i.x) # E: Revealed type is 'builtins.int' a: str = i[0] # E: Incompatible types in assignment (expression has type "int", variable has type "str") N = NamedTuple('N', [('x', int)]) [builtins fixtures/list.pyi] [out] [case testForwardRefsInForStatement] from typing import List, NamedTuple lst: List[M] for i in lst: # type: N reveal_type(i.x) # E: Revealed type is 'builtins.int' a: str = i[0] # E: Incompatible types in assignment (expression has type "int", variable has type "str") N = NamedTuple('N', [('x', int)]) class M(N): pass [builtins fixtures/list.pyi] [out] [case testForwardRefsInWithStatementImplicit] from typing import ContextManager, Any from mypy_extensions import TypedDict cm: ContextManager[N] with cm as g: a: int = g['x'] N = TypedDict('N', {'x': int}) [builtins fixtures/dict.pyi] [typing fixtures/typing-full.pyi] [out] [case testForwardRefsInWithStatement] from typing import ContextManager, Any from mypy_extensions import TypedDict cm: ContextManager[Any] with cm as g: # type: N a: str = g['x'] # E: Incompatible types in assignment (expression has type "int", variable has type "str") N = TypedDict('N', {'x': int}) [builtins fixtures/dict.pyi] [typing fixtures/typing-full.pyi] [out] mypy-0.560/test-data/unit/check-super.test0000644€tŠÔÚ€2›s®0000001635613215007205024661 0ustar jukkaDROPBOX\Domain Users00000000000000-- Test cases for type checker related to super(). -- Supertype member reference -- -------------------------- [case testAccessingSupertypeMethod] class B: def f(self) -> 'B': pass class A(B): def f(self) -> 'A': a, b = None, None # type: (A, B) a = super().f() # E: Incompatible types in assignment (expression has type "B", variable has type "A") a = super().g() # E: "g" undefined in superclass b = super().f() return a [out] [case testAccessingSuperTypeMethodWithArgs] from typing import Any class B: def f(self, y: 'A') -> None: pass class A(B): def f(self, y: Any) -> None: a, b = None, None # type: (A, B) super().f(b) # E: Argument 1 to "f" of "B" has incompatible type "B"; expected "A" super().f(a) self.f(b) self.f(a) [out] [case testAccessingSuperInit] import typing class B: def __init__(self, x: A) -> None: pass class A(B): def __init__(self) -> None: super().__init__(B(None)) # E: Argument 1 to "__init__" of "B" has incompatible type "B"; expected "A" super().__init__() # E: Too few arguments for "__init__" of "B" super().__init__(A()) [out] [case testAccessingSuperMemberWithDeepHierarchy] import typing class C: def f(self) -> None: pass class B(C): pass class A(B): def f(self) -> None: super().g() # E: "g" undefined in superclass super().f() [out] [case testAssignToBaseClassMethod] import typing class A: def f(self) -> None: pass class B(A): def g(self) -> None: super().f = None [out] main:6: error: Invalid assignment target [case testSuperWithMultipleInheritance] import typing class A: def f(self) -> None: pass class B: def g(self, x: int) -> None: pass class C(A, B): def f(self) -> None: super().f() super().g(1) super().f(1) # E: Too many arguments for "f" of "A" super().g() # E: Too few arguments for "g" of "B" super().not_there() # E: "not_there" undefined in superclass [out] [case testSuperWithNew] class A: def __new__(cls, x: int) -> 'A': return object.__new__(cls) class B(A): def __new__(cls, x: int, y: str = '') -> 'A': super().__new__(cls, 1) return super().__new__(cls, 1, '') # E: Too many arguments for "__new__" of "A" B('') # E: Argument 1 to "B" has incompatible type "str"; expected "int" B(1) B(1, 'x') [builtins fixtures/__new__.pyi] reveal_type(C.a) # E: Revealed type is 'Any' [out] [case testSuperWithUnknownBase] from typing import Any B = None # type: Any class C(B): def __init__(self, arg=0): super(C, self).__init__(arg, arg=arg) [out] [case testSuperSilentInDynamicFunction] class A: pass class B(A): def foo(self): super(B, self).foo() # Not an error [out] [case testSuperWithAny] class B: def f(self) -> None: pass class C(B): def h(self, x) -> None: reveal_type(super(x, x).f) # E: Revealed type is 'def ()' reveal_type(super(C, x).f) # E: Revealed type is 'def ()' [case testSuperInUnannotatedMethod] class C: def h(self): super(C, self).xyz [case testSuperWithTypeObjects] from typing import Type class A: def f(self) -> object: pass class B(A): def f(self) -> int: pass @classmethod def g(cls, x) -> None: reveal_type(super(cls, x).f) # E: Revealed type is 'def () -> builtins.object' def h(self, t: Type[B]) -> None: reveal_type(super(t, self).f) # E: Revealed type is 'def () -> builtins.object' [builtins fixtures/classmethod.pyi] [case testSuperWithTypeTypeAsSecondArgument] class B: def f(self) -> None: pass class C(B): def __new__(cls) -> 'C': super(C, cls).f return C() [case testSuperWithGenericSelf] from typing import TypeVar T = TypeVar('T', bound='C') class B: def f(self) -> float: pass class C(B): def f(self) -> int: pass def g(self: T) -> T: reveal_type(super(C, self).f) # E: Revealed type is 'def () -> builtins.float' return self [case testSuperWithTypeVarValues1] from typing import TypeVar T = TypeVar('T', 'C', 'D') S = TypeVar('S', 'B', 'C') class B: def f(self) -> None: pass class C(B): def f(self) -> None: pass def g(self, x: T, y: S) -> None: super(C, x).f super(C, y).f # E: Argument 2 for "super" not an instance of argument 1 class D(C): pass [case testSuperWithTypeVarValues2] from typing import TypeVar, Generic T = TypeVar('T', 'C', 'D') S = TypeVar('S', 'B', 'C') class B: def f(self) -> None: pass class C(B, Generic[T, S]): def f(self) -> None: pass def g(self, x: T, y: S) -> None: super(C, x).f super(C, y).f # E: Argument 2 for "super" not an instance of argument 1 class D(C): pass -- Invalid uses of super() -- ----------------------- [case testSuperOutsideMethodNoCrash] class C: a = super().whatever # E: super() outside of a method is not supported [case testSuperWithSingleArgument] class B: def f(self) -> None: pass class C(B): def __init__(self) -> None: super(C).f() # E: "super" with a single argument not supported [case testSuperWithThreeArguments] class B: def f(self) -> None: pass class C(B): def h(self) -> None: super(C, self, 1).f() # E: Too many arguments for "super" [case testSuperWithNonPositionalArguments] class B: def f(self) -> None: pass class C(B): def h(self) -> None: super(C, x=self).f() # E: "super" only accepts positional arguments super(**{}).f() # E: "super" only accepts positional arguments [case testSuperWithVarArgs] class B: def f(self) -> None: pass class C(B): def h(self) -> None: super(*(C, self)).f() # E: Varargs not supported with "super" [case testInvalidSuperArg] class B: def f(self) -> None: pass class C(B): def h(self) -> None: super(x, y).f # E: Name 'x' is not defined # E: Name 'y' is not defined [case testTypeErrorInSuperArg] class B: def f(self) -> None: pass class C(B): def h(self) -> None: super(1(), self).f # E: "int" not callable super(C, ''()).f # E: "str" not callable [case testFlippedSuperArgs] class B: def f(self) -> None: pass class C(B): def h(self) -> None: super(self, C).f # E: Argument 1 for "super" must be a type object; got a non-type instance [case testInvalidFirstSuperArg] class B: def f(self) -> None: pass class C(B): def h(self) -> None: super(None, C).f # E: Argument 1 for "super" must be a type object; got "None" [case testInvalidSecondArgumentToSuper] class B: def f(self) -> None: pass class C(B): def h(self) -> None: super(C, 1).f # E: Argument 2 for "super" not an instance of argument 1 super(C, None).f # E: Unsupported argument 2 for "super" [case testSuperInMethodWithNoArguments] class A: def f(self) -> None: pass class B(A): def g() -> None: # E: Method must have at least one argument super().f() # E: super() requires one or more positional arguments in enclosing function def h(self) -> None: def a() -> None: super().f() # E: super() requires one or more positional arguments in enclosing function [case testSuperWithUnsupportedTypeObject] from typing import Type class A: def f(self) -> int: pass class B(A): def h(self, t: Type[None]) -> None: super(t, self).f # E: Unsupported argument 1 for "super" mypy-0.560/test-data/unit/check-tuples.test0000644€tŠÔÚ€2›s®0000006664013215007205025040 0ustar jukkaDROPBOX\Domain Users00000000000000-- Normal assignment and subtyping -- ------------------------------- [case testTupleAssignmentWithTupleTypes] from typing import Tuple t1 = None # type: Tuple[A] t2 = None # type: Tuple[B] t3 = None # type: Tuple[A, A] t4 = None # type: Tuple[A, B] t5 = None # type: Tuple[B, A] t1 = t2 # E: Incompatible types in assignment (expression has type "Tuple[B]", variable has type "Tuple[A]") t1 = t3 # E: Incompatible types in assignment (expression has type "Tuple[A, A]", variable has type "Tuple[A]") t3 = t1 # E: Incompatible types in assignment (expression has type "Tuple[A]", variable has type "Tuple[A, A]") t3 = t4 # E: Incompatible types in assignment (expression has type "Tuple[A, B]", variable has type "Tuple[A, A]") t3 = t5 # E: Incompatible types in assignment (expression has type "Tuple[B, A]", variable has type "Tuple[A, A]") # Ok t1 = t1 t2 = t2 t3 = t3 t4 = t4 t5 = t5 class A: pass class B: pass [builtins fixtures/tuple.pyi] [case testTupleSubtyping] from typing import Tuple t1 = None # type: Tuple[A, A] t2 = None # type: Tuple[A, B] t3 = None # type: Tuple[B, A] t2 = t1 # E: Incompatible types in assignment (expression has type "Tuple[A, A]", variable has type "Tuple[A, B]") t2 = t3 # E: Incompatible types in assignment (expression has type "Tuple[B, A]", variable has type "Tuple[A, B]") t3 = t1 # E: Incompatible types in assignment (expression has type "Tuple[A, A]", variable has type "Tuple[B, A]") t3 = t2 # E: Incompatible types in assignment (expression has type "Tuple[A, B]", variable has type "Tuple[B, A]") t1 = t2 t1 = t3 class A: pass class B(A): pass [builtins fixtures/tuple.pyi] [case testTupleCompatibilityWithOtherTypes] from typing import Tuple a, o = None, None # type: (A, object) t = None # type: Tuple[A, A] a = t # E: Incompatible types in assignment (expression has type "Tuple[A, A]", variable has type "A") t = o # E: Incompatible types in assignment (expression has type "object", variable has type "Tuple[A, A]") t = a # E: Incompatible types in assignment (expression has type "A", variable has type "Tuple[A, A]") # TODO: callable types + tuples # Ok o = t t = None class A: pass [builtins fixtures/tuple.pyi] [case testNestedTupleTypes] from typing import Tuple t1 = None # type: Tuple[A, Tuple[A, A]] t2 = None # type: Tuple[B, Tuple[B, B]] t2 = t1 # E: Incompatible types in assignment (expression has type "Tuple[A, Tuple[A, A]]", variable has type "Tuple[B, Tuple[B, B]]") t1 = t2 class A: pass class B(A): pass [builtins fixtures/tuple.pyi] [case testNestedTupleTypes2] from typing import Tuple t1 = None # type: Tuple[A, Tuple[A, A]] t2 = None # type: Tuple[B, Tuple[B, B]] t2 = t1 # E: Incompatible types in assignment (expression has type "Tuple[A, Tuple[A, A]]", variable has type "Tuple[B, Tuple[B, B]]") t1 = t2 class A: pass class B(A): pass [builtins fixtures/tuple.pyi] [case testSubtypingWithNamedTupleType] from typing import Tuple t1 = None # type: Tuple[A, A] t2 = None # type: tuple t1 = t2 # E: Incompatible types in assignment (expression has type "Tuple[Any, ...]", variable has type "Tuple[A, A]") t2 = t1 class A: pass [builtins fixtures/tuple.pyi] [case testTupleInitializationWithNone] from typing import Tuple t = None # type: Tuple[A, A] t = None class A: pass [builtins fixtures/tuple.pyi] -- Tuple expressions -- ----------------- [case testTupleExpressions] from typing import Tuple t1 = None # type: tuple t2 = None # type: Tuple[A] t3 = None # type: Tuple[A, B] a, b, c = None, None, None # type: (A, B, C) t2 = () # E: Incompatible types in assignment (expression has type "Tuple[]", variable has type "Tuple[A]") t2 = (a, a) # E: Incompatible types in assignment (expression has type "Tuple[A, A]", variable has type "Tuple[A]") t3 = (a, a) # E: Incompatible types in assignment (expression has type "Tuple[A, A]", variable has type "Tuple[A, B]") t3 = (b, b) # E: Incompatible types in assignment (expression has type "Tuple[B, B]", variable has type "Tuple[A, B]") t3 = (a, b, a) # E: Incompatible types in assignment (expression has type "Tuple[A, B, A]", variable has type "Tuple[A, B]") t1 = () t1 = (a,) t2 = (a,) t3 = (a, b) t3 = (a, c) t3 = (None, None) class A: pass class B: pass class C(B): pass [builtins fixtures/tuple.pyi] [case testVoidValueInTuple] import typing (None, f()) # E: "f" does not return a value (f(), None) # E: "f" does not return a value def f() -> None: pass [builtins fixtures/tuple.pyi] -- Indexing -- -------- [case testIndexingTuples] from typing import Tuple t1 = None # type: Tuple[A, B] t2 = None # type: Tuple[A] t3 = None # type: Tuple[A, B, C, D, E] a, b = None, None # type: (A, B) x = None # type: Tuple[A, B, C] y = None # type: Tuple[A, C, E] n = 0 a = t1[1] # E: Incompatible types in assignment (expression has type "B", variable has type "A") b = t1[0] # E: Incompatible types in assignment (expression has type "A", variable has type "B") t1[2] # E: Tuple index out of range t1[3] # E: Tuple index out of range t2[1] # E: Tuple index out of range reveal_type(t1[n]) # E: Revealed type is 'Union[__main__.A, __main__.B]' reveal_type(t3[n:]) # E: Revealed type is 'Union[__main__.A, __main__.B, __main__.C, __main__.D, __main__.E]' b = t1[(0)] # E: Incompatible types in assignment (expression has type "A", variable has type "B") a = t1[0] b = t1[1] b = t1[-1] a = t1[(0)] x = t3[0:3] # type (A, B, C) y = t3[0:5:2] # type (A, C, E) x = t3[:-2] # type (A, B, C) class A: pass class B: pass class C: pass class D: pass class E: pass [builtins fixtures/tuple.pyi] [case testIndexingTuplesWithNegativeIntegers] from typing import Tuple t1 = None # type: Tuple[A, B] t2 = None # type: Tuple[A] a, b = None, None # type: A, B a = t1[-1] # E: Incompatible types in assignment (expression has type "B", variable has type "A") b = t1[-2] # E: Incompatible types in assignment (expression has type "A", variable has type "B") t1[-3] # E: Tuple index out of range t1[-4] # E: Tuple index out of range b = t2[(-1)] # E: Incompatible types in assignment (expression has type "A", variable has type "B") a = t1[-2] b = t1[-1] a = t2[(-1)] class A: pass class B: pass [builtins fixtures/tuple.pyi] [case testAssigningToTupleItems] from typing import Tuple t = None # type: Tuple[A, B] n = 0 t[0] = A() # E: Unsupported target for indexed assignment t[2] = A() # E: Unsupported target for indexed assignment t[n] = A() # E: Unsupported target for indexed assignment class A: pass class B: pass [builtins fixtures/tuple.pyi] -- Multiple assignment -- ------------------- [case testMultipleAssignmentWithTuples] from typing import Tuple t1 = None # type: Tuple[A, B] t2 = None # type: Tuple[A, B, A] a, b = None, None # type: (A, B) a, a = t1 # E: Incompatible types in assignment (expression has type "B", variable has type "A") b, b = t1 # E: Incompatible types in assignment (expression has type "A", variable has type "B") a, b, b = t2 # E: Incompatible types in assignment (expression has type "A", variable has type "B") a, b = t1 a, b, a = t2 class A: pass class B: pass [builtins fixtures/tuple.pyi] [case testMultipleAssignmentWithInvalidNumberOfValues] from typing import Tuple t1 = None # type: Tuple[A, A, A] a = None # type: A a, a = t1 # E: Too many values to unpack (2 expected, 3 provided) a, a, a, a = t1 # E: Need more than 3 values to unpack (4 expected) a, a, a = t1 class A: pass [builtins fixtures/tuple.pyi] [case testMultipleAssignmentWithTupleExpressionRvalue] a, b = None, None # type: (A, B) a, b = a, a # Fail a, b = b, a # Fail a, b = a, b a, a = a, a class A: pass class B: pass [builtins fixtures/tuple.pyi] [out] main:4: error: Incompatible types in assignment (expression has type "A", variable has type "B") main:5: error: Incompatible types in assignment (expression has type "B", variable has type "A") main:5: error: Incompatible types in assignment (expression has type "A", variable has type "B") [case testSubtypingInMultipleAssignment] a, b = None, None # type: (A, B) b, b = a, b # E: Incompatible types in assignment (expression has type "A", variable has type "B") b, b = b, a # E: Incompatible types in assignment (expression has type "A", variable has type "B") a, b = b, b b, a = b, b class A: pass class B(A): pass [builtins fixtures/tuple.pyi] [case testInitializationWithMultipleValues] a, b = None, None # type: (A, B) a1, b1 = a, a # type: (A, B) # E: Incompatible types in assignment (expression has type "A", variable has type "B") a2, b2 = b, b # type: (A, B) # E: Incompatible types in assignment (expression has type "B", variable has type "A") a3, b3 = a # type: (A, B) # E: '__main__.A' object is not iterable a4, b4 = None # type: (A, B) # E: 'builtins.None' object is not iterable a5, b5 = a, b, a # type: (A, B) # E: Too many values to unpack (2 expected, 3 provided) ax, bx = a, b # type: (A, B) class A: pass class B: pass [builtins fixtures/tuple.pyi] [case testMultipleAssignmentWithNonTupleRvalue] a, b = None, None # type: (A, B) def f(): pass a, b = None # E: 'builtins.None' object is not iterable a, b = a # E: '__main__.A' object is not iterable a, b = f # E: 'def () -> Any' object is not iterable class A: pass class B: pass [builtins fixtures/tuple.pyi] [case testMultipleAssignmentWithIndexedLvalues] a, b = None, None # type: (A, B) aa, bb = None, None # type: (AA, BB) a[a], b[b] = a, bb # E: Incompatible types in assignment (expression has type "A", target has type "AA") a[a], b[b] = aa, b # E: Incompatible types in assignment (expression has type "B", target has type "BB") a[aa], b[b] = aa, bb # E: Invalid index type "AA" for "A"; expected type "A" a[a], b[bb] = aa, bb # E: Invalid index type "BB" for "B"; expected type "B" a[a], b[b] = aa, bb class A: def __setitem__(self, x: 'A', y: 'AA') -> None: pass class B: def __setitem__(self, x: 'B', y: 'BB') -> None: pass class AA: pass class BB: pass [builtins fixtures/tuple.pyi] [case testMultipleDeclarationWithParentheses] (a, b) = (None, None) # type: int, str a = '' # E: Incompatible types in assignment (expression has type "str", variable has type "int") b = 1 # E: Incompatible types in assignment (expression has type "int", variable has type "str") a = 1 b = '' [case testMultipleAssignmentWithExtraParentheses] a, b = None, None # type: (A, B) (a, b) = (a, a) # E: Incompatible types in assignment (expression has type "A", variable has type "B") (a, b) = (b, b) # E: Incompatible types in assignment (expression has type "B", variable has type "A") ((a), (b)) = ((a), (a)) # E: Incompatible types in assignment (expression has type "A", variable has type "B") ((a), (b)) = ((b), (b)) # E: Incompatible types in assignment (expression has type "B", variable has type "A") [a, b] = a, a # E: Incompatible types in assignment (expression has type "A", variable has type "B") [a, b] = b, b # E: Incompatible types in assignment (expression has type "B", variable has type "A") (a, b) = (a, b) ((a), (b)) = ((a), (b)) [a, b] = a, b class A: pass class B: pass [builtins fixtures/tuple.pyi] [case testMultipleAssignmentUsingSingleTupleType] from typing import Tuple a, b = None, None # type: Tuple[int, str] a = 1 b = '' a = '' # E: Incompatible types in assignment (expression has type "str", variable has type "int") b = 1 # E: Incompatible types in assignment (expression has type "int", variable has type "str") [case testMultipleAssignmentWithMixedVariables] a = b, c = 1, 1 x, y = p, q = 1, 1 u, v, w = r, s = 1, 1 # E: Need more than 2 values to unpack (3 expected) d, e = f, g, h = 1, 1 # E: Need more than 2 values to unpack (3 expected) -- Assignment to starred expressions -- --------------------------------- [case testAssignmentToStarMissingAnnotation] from typing import List t = 1, 2 a, b, *c = 1, 2 # E: Need type annotation for variable aa, bb, *cc = t # E: Need type annotation for variable [builtins fixtures/list.pyi] [case testAssignmentToStarAnnotation] from typing import List li, lo = None, None # type: List[int], List[object] a, b, *c = 1, 2 # type: int, int, List[int] c = lo # E: Incompatible types in assignment (expression has type "List[object]", variable has type "List[int]") c = li [builtins fixtures/list.pyi] [case testAssignmentToStarCount1] from typing import List ca = None # type: List[int] c = [1] a, b, *c = 1, # E: Need more than 1 value to unpack (2 expected) a, b, *c = 1, 2 a, b, *c = 1, 2, 3 a, b, *c = 1, 2, 3, 4 [builtins fixtures/list.pyi] [case testAssignmentToStarCount2] from typing import List ca = None # type: List[int] t1 = 1, t2 = 1, 2 t3 = 1, 2, 3 t4 = 1, 2, 3, 4 c = [1] a, b, *c = t1 # E: Need more than 1 value to unpack (2 expected) a, b, *c = t2 a, b, *c = t3 a, b, *c = t4 [builtins fixtures/list.pyi] [case testAssignmentToStarFromAny] from typing import Any, cast a, c = cast(Any, 1), C() p, *q = a c = a c = q class C: pass [case testAssignmentToComplexStar] from typing import List li = None # type: List[int] a, *(li) = 1, a, *(b, c) = 1, 2 # E: Need more than 1 value to unpack (2 expected) a, *(b, c) = 1, 2, 3 a, *(b, c) = 1, 2, 3, 4 # E: Too many values to unpack (2 expected, 3 provided) [builtins fixtures/list.pyi] [case testAssignmentToStarFromTupleType] from typing import List, Tuple li = None # type: List[int] la = None # type: List[A] ta = None # type: Tuple[A, A, A] a, *la = ta a, *li = ta # E a, *na = ta na = la na = a # E class A: pass [builtins fixtures/list.pyi] [out] main:6: error: List item 0 has incompatible type "A"; expected "int" main:6: error: List item 1 has incompatible type "A"; expected "int" main:9: error: Incompatible types in assignment (expression has type "A", variable has type "List[A]") [case testAssignmentToStarFromTupleInference] from typing import List li = None # type: List[int] la = None # type: List[A] a, *l = A(), A() l = li # E: Incompatible types in assignment (expression has type "List[int]", variable has type "List[A]") l = la class A: pass [builtins fixtures/list.pyi] [out] [case testAssignmentToStarFromListInference] from typing import List li = None # type: List[int] la = None # type: List[A] a, *l = [A(), A()] l = li # E: Incompatible types in assignment (expression has type "List[int]", variable has type "List[A]") l = la class A: pass [builtins fixtures/list.pyi] [out] [case testAssignmentToStarFromTupleTypeInference] from typing import List, Tuple li = None # type: List[int] la = None # type: List[A] ta = None # type: Tuple[A, A, A] a, *l = ta l = li # E: Incompatible types in assignment (expression has type "List[int]", variable has type "List[A]") l = la class A: pass [builtins fixtures/list.pyi] [out] [case testAssignmentToStarFromListTypeInference] from typing import List li = None # type: List[int] la = None # type: List[A] a, *l = la l = li # E: Incompatible types in assignment (expression has type "List[int]", variable has type "List[A]") l = la class A: pass [builtins fixtures/list.pyi] [out] -- Nested tuple assignment -- ---------------------------- [case testNestedTupleAssignment1] a1, b1, c1 = None, None, None # type: (A, B, C) a2, b2, c2 = None, None, None # type: (A, B, C) a1, (b1, c1) = a2, (b2, c2) a1, (a1, (b1, c1)) = a2, (a2, (b2, c2)) a1, (a1, (a1, b1)) = a1, (a1, (a1, c1)) # Fail class A: pass class B: pass class C: pass [out] main:7: error: Incompatible types in assignment (expression has type "C", variable has type "B") [case testNestedTupleAssignment2] a1, b1, c1 = None, None, None # type: (A, B, C) a2, b2, c2 = None, None, None # type: (A, B, C) t = a1, b1 a2, b2 = t (a2, b2), c2 = t, c1 (a2, c2), c2 = t, c1 # Fail t, c2 = (a2, b2), c2 t, c2 = (a2, a2), c2 # Fail t = a1, a1, a1 # Fail t = a1 # Fail a2, a2, a2 = t # Fail a2, = t # Fail a2 = t # Fail class A: pass class B: pass class C: pass [out] main:8: error: Incompatible types in assignment (expression has type "B", variable has type "C") main:10: error: Incompatible types in assignment (expression has type "Tuple[A, A]", variable has type "Tuple[A, B]") main:11: error: Incompatible types in assignment (expression has type "Tuple[A, A, A]", variable has type "Tuple[A, B]") main:12: error: Incompatible types in assignment (expression has type "A", variable has type "Tuple[A, B]") main:13: error: Need more than 2 values to unpack (3 expected) main:14: error: Too many values to unpack (1 expected, 2 provided) main:15: error: Incompatible types in assignment (expression has type "Tuple[A, B]", variable has type "A") -- Error messages -- -------------- [case testTupleErrorMessages] a = None # type: A (a, a) + a # E: Unsupported left operand type for + ("Tuple[A, A]") a + (a, a) # E: Unsupported operand types for + ("A" and "Tuple[A, A]") f((a, a)) # E: Argument 1 to "f" has incompatible type "Tuple[A, A]"; expected "A" (a, a).foo # E: "Tuple[A, A]" has no attribute "foo" def f(x: 'A') -> None: pass class A: def __add__(self, x: 'A') -> 'A': pass [builtins fixtures/tuple.pyi] [case testLargeTuplesInErrorMessages] a = None # type: LongTypeName a + (a, a, a, a, a, a, a, a, a, a, a, a, a, a, a, a, a, a, a, a, a, a, a, a, a, a, a, a, a, a, a, a, a, a, a, a, a, a, a, a, a, a, a, a, a, a, a, a, a, a) # Fail class LongTypeName: def __add__(self, x: 'LongTypeName') -> 'LongTypeName': pass [builtins fixtures/tuple.pyi] [out] main:3: error: Unsupported operand types for + ("LongTypeName" and ) -- Tuple methods -- ------------- [case testTupleMethods] from typing import Tuple t = None # type: Tuple[int, str] i = 0 s = '' b = bool() s = t.__len__() # E: Incompatible types in assignment (expression has type "int", variable has type "str") i = t.__str__() # E: Incompatible types in assignment (expression has type "str", variable has type "int") i = s in t # E: Incompatible types in assignment (expression has type "bool", variable has type "int") t.foo # E: "Tuple[int, str]" has no attribute "foo" i = t.__len__() s = t.__str__() b = s in t [file builtins.py] from typing import TypeVar, Generic _T = TypeVar('_T') class object: def __init__(self) -> None: pass class tuple(Generic[_T]): def __len__(self) -> int: pass def __str__(self) -> str: pass def __contains__(self, o: object) -> bool: pass class int: pass class str: pass class bool: pass class type: pass class function: pass -- For loop over tuple -- ------------------- [case testForLoopOverTuple] import typing t = 1, 2 for x in t: x = 1 x = '' # E: Incompatible types in assignment (expression has type "str", variable has type "int") [builtins fixtures/for.pyi] [case testForLoopOverEmptyTuple] import typing t = () for x in t: pass # E: Need type annotation for variable [builtins fixtures/for.pyi] [case testForLoopOverNoneValuedTuple] import typing t = () for x in None, None: pass [builtins fixtures/for.pyi] [case testForLoopOverTupleAndSubtyping] import typing class A: pass class B(A): pass for x in B(), A(): x = A() x = B() x = '' # E: Incompatible types in assignment (expression has type "str", variable has type "A") [builtins fixtures/for.pyi] [case testTupleIterable] y = 'a' x = sum((1,2)) y = x # E: Incompatible types in assignment (expression has type "int", variable has type "str") [builtins fixtures/tuple.pyi] -- Tuple as a base type -- -------------------- [case testTupleBaseClass] import m [file m.pyi] from typing import Tuple class A(Tuple[int, str]): def f(self, x: int) -> None: a, b = 1, '' a, b = self b, a = self # Error self.f('') # Error [builtins fixtures/tuple.pyi] [out] tmp/m.pyi:6: error: Incompatible types in assignment (expression has type "int", variable has type "str") tmp/m.pyi:6: error: Incompatible types in assignment (expression has type "str", variable has type "int") tmp/m.pyi:7: error: Argument 1 to "f" of "A" has incompatible type "str"; expected "int" [case testValidTupleBaseClass2] from typing import Tuple class A(Tuple[int, str]): pass x, y = A() reveal_type(x) # E: Revealed type is 'builtins.int' reveal_type(y) # E: Revealed type is 'builtins.str' x1 = A()[0] # type: int x2 = A()[1] # type: int # E: Incompatible types in assignment (expression has type "str", variable has type "int") A()[2] # E: Tuple index out of range class B(Tuple[int, ...]): pass z1 = B()[0] # type: int z2 = B()[1] # type: str # E: Incompatible types in assignment (expression has type "int", variable has type "str") B()[100] [builtins fixtures/tuple.pyi] [out] [case testValidTupleBaseClass] from typing import Tuple class A(tuple): pass [out] [case testTupleBaseClass2] import m [file m.pyi] from typing import Tuple a = None # type: A class A(Tuple[int, str]): pass x, y = a x() # E: "int" not callable y() # E: "str" not callable [builtins fixtures/tuple.pyi] [out] [case testGenericClassWithTupleBaseClass] from typing import TypeVar, Generic, Tuple T = TypeVar('T') class Test(Generic[T], Tuple[T]): pass x = Test() # type: Test[int] [builtins fixtures/tuple.pyi] [out] main:4: error: Generic tuple types not supported -- Variable-length tuples (Tuple[t, ...] with literal '...') -- --------------------------------------------------------- [case testIndexingVariableLengthTuple] from typing import Tuple x = () # type: Tuple[str, ...] n = 5 x[n]() # E: "str" not callable x[3]() # E: "str" not callable [builtins fixtures/tuple.pyi] [case testSubtypingVariableLengthTuple] from typing import Tuple class A: pass class B(A): pass def fa(t: Tuple[A, ...]) -> None: pass def fb(t: Tuple[B, ...]) -> None: pass ta = () # type: Tuple[A, ...] tb = () # type: Tuple[B, ...] fa(ta) fa(tb) fb(tb) fb(ta) # E: Argument 1 to "fb" has incompatible type "Tuple[A, ...]"; expected "Tuple[B, ...]" [builtins fixtures/tuple.pyi] [case testSubtypingFixedAndVariableLengthTuples] from typing import Tuple class A: pass class B(A): pass def fa(t: Tuple[A, ...]) -> None: pass def fb(t: Tuple[B, ...]) -> None: pass aa = (A(), A()) ab = (A(), B()) bb = (B(), B()) fa(aa) fa(ab) fa(bb) fb(bb) fb(ab) # E: Argument 1 to "fb" has incompatible type "Tuple[A, B]"; expected "Tuple[B, ...]" fb(aa) # E: Argument 1 to "fb" has incompatible type "Tuple[A, A]"; expected "Tuple[B, ...]" [builtins fixtures/tuple.pyi] [case testSubtypingTupleIsContainer] from typing import Container a = None # type: Container[str] a = () [case testSubtypingTupleIsSized] from typing import Sized a = None # type: Sized a = () [case testTupleWithStarExpr1] a = (1, 2) b = (*a, '') reveal_type(b) # E: Revealed type is 'Tuple[builtins.int, builtins.int, builtins.str]' [case testTupleWithStarExpr2] a = [1] b = (0, *a) reveal_type(b) # E: Revealed type is 'builtins.tuple[builtins.int*]' [builtins fixtures/tuple.pyi] [case testTupleWithStarExpr3] a = [''] b = (0, *a) reveal_type(b) # E: Revealed type is 'builtins.tuple[builtins.object*]' c = (*a, '') reveal_type(c) # E: Revealed type is 'builtins.tuple[builtins.str*]' [builtins fixtures/tuple.pyi] [case testTupleWithStarExpr4] a = (1, 1, 'x', 'x') b = (1, 'x') a = (0, *b, '') [builtins fixtures/tuple.pyi] [case testTupleMeetTupleAny] from typing import Union, Tuple class A: pass class B: pass def f(x: Union[B, Tuple[A, A]]) -> None: if isinstance(x, tuple): reveal_type(x) # E: Revealed type is 'Tuple[__main__.A, __main__.A]' else: reveal_type(x) # E: Revealed type is '__main__.B' def g(x: Union[str, Tuple[str, str]]) -> None: if isinstance(x, tuple): reveal_type(x) # E: Revealed type is 'Tuple[builtins.str, builtins.str]' else: reveal_type(x) # E: Revealed type is 'builtins.str' [builtins fixtures/tuple.pyi] [out] [case testTupleMeetTupleAnyComplex] from typing import Tuple, Union Pair = Tuple[int, int] Variant = Union[int, Pair] def tuplify(v: Variant) -> None: reveal_type(v) # E: Revealed type is 'Union[builtins.int, Tuple[builtins.int, builtins.int]]' if not isinstance(v, tuple): reveal_type(v) # E: Revealed type is 'builtins.int' v = (v, v) reveal_type(v) # E: Revealed type is 'Tuple[builtins.int, builtins.int]' reveal_type(v) # E: Revealed type is 'Tuple[builtins.int, builtins.int]' reveal_type(v[0]) # E: Revealed type is 'builtins.int' Pair2 = Tuple[int, str] Variant2 = Union[int, Pair2] def tuplify2(v: Variant2) -> None: if isinstance(v, tuple): reveal_type(v) # E: Revealed type is 'Tuple[builtins.int, builtins.str]' else: reveal_type(v) # E: Revealed type is 'builtins.int' [builtins fixtures/tuple.pyi] [out] [case testTupleMeetTupleAnyAfter] from typing import Tuple, Union def good(blah: Union[Tuple[int, int], int]) -> None: reveal_type(blah) # E: Revealed type is 'Union[Tuple[builtins.int, builtins.int], builtins.int]' if isinstance(blah, tuple): reveal_type(blah) # E: Revealed type is 'Tuple[builtins.int, builtins.int]' reveal_type(blah) # E: Revealed type is 'Union[Tuple[builtins.int, builtins.int], builtins.int]' [builtins fixtures/tuple.pyi] [out] [case testTupleMeetTupleVariable] from typing import Tuple, TypeVar, Generic, Union T = TypeVar('T') class A: pass class B1(A): pass class B2(A): pass class C: pass x = None # type: Tuple[A, ...] y = None # type: Tuple[Union[B1, C], Union[B2, C]] def g(x: T) -> Tuple[T, T]: return (x, x) z = 1 x, y = g(z) # E: Argument 1 to "g" has incompatible type "int"; expected "Tuple[B1, B2]" [builtins fixtures/tuple.pyi] [out] [case testTupleWithUndersizedContext] a = ([1], 'x') a = ([], 'x', 1) # E: Incompatible types in assignment (expression has type "Tuple[List[int], str, int]", variable has type "Tuple[List[int], str]") [builtins fixtures/tuple.pyi] [case testTupleWithOversizedContext] a = (1, [1], 'x') a = (1, []) # E: Incompatible types in assignment (expression has type "Tuple[int, List[int]]", variable has type "Tuple[int, List[int], str]") [builtins fixtures/tuple.pyi] [case testTupleWithoutContext] a = (1, []) # E: Need type annotation for variable [builtins fixtures/tuple.pyi] [case testTupleWithUnionContext] from typing import List, Union, Tuple def f() -> Union[int, Tuple[List[str]]]: return ([],) [builtins fixtures/tuple.pyi] [case testTupleWithVariableSizedTupleContext] from typing import List, Tuple def f() -> Tuple[List[str], ...]: return ([],) [builtins fixtures/tuple.pyi] [case testTupleWithoutArgs] from typing import Tuple def f(a: Tuple) -> None: pass f(()) f((1,)) f(('', '')) f(0) # E: Argument 1 to "f" has incompatible type "int"; expected "Tuple[Any, ...]" [builtins fixtures/tuple.pyi] [case testTupleSingleton] from typing import Tuple def f(a: Tuple[()]) -> None: pass f(()) f((1,)) # E: Argument 1 to "f" has incompatible type "Tuple[int]"; expected "Tuple[]" f(('', '')) # E: Argument 1 to "f" has incompatible type "Tuple[str, str]"; expected "Tuple[]" f(0) # E: Argument 1 to "f" has incompatible type "int"; expected "Tuple[]" [builtins fixtures/tuple.pyi] [case testNonliteralTupleIndex] t = (0, "") x = 0 y = "" reveal_type(t[x]) # E: Revealed type is 'Union[builtins.int, builtins.str]' t[y] # E: Invalid tuple index type (actual type "str", expected type "Union[int, slice]") [builtins fixtures/tuple.pyi] [case testNonliteralTupleSlice] t = (0, "") x = 0 y = "" reveal_type(t[x:]) # E: Revealed type is 'Union[builtins.int, builtins.str]' t[y:] # E: Slice index must be an integer or None [builtins fixtures/tuple.pyi] [case testInferTupleTypeFallbackAgainstInstance] from typing import TypeVar, Generic, Tuple T = TypeVar('T') class Base(Generic[T]): pass def f(x: Base[T]) -> T: pass class DT(Tuple[str, str], Base[int]): pass reveal_type(f(DT())) # E: Revealed type is 'builtins.int*' [builtins fixtures/tuple.pyi] [out] [case testTypeTupleClassmethod] from typing import Tuple, Type class C(Tuple[int, str]): @classmethod def f(cls) -> None: pass t: Type[C] t.g() # E: "Type[C]" has no attribute "g" t.f() [builtins fixtures/classmethod.pyi] mypy-0.560/test-data/unit/check-type-aliases.test0000644€tŠÔÚ€2›s®0000001660613215007205026121 0ustar jukkaDROPBOX\Domain Users00000000000000[case testSimpleTypeAlias] import typing i = int def f(x: i) -> None: pass f(1) f('') # E: Argument 1 to "f" has incompatible type "str"; expected "int" [case testUnionTypeAlias] from typing import Union U = Union[int, str] def f(x: U) -> None: pass f(1) f('') f(()) # E: Argument 1 to "f" has incompatible type "Tuple[]"; expected "Union[int, str]" [case testTupleTypeAlias] from typing import Tuple T = Tuple[int, str] def f(x: T) -> None: pass f((1, 'x')) f(1) # E: Argument 1 to "f" has incompatible type "int"; expected "Tuple[int, str]" [case testCallableTypeAlias] from typing import Callable A = Callable[[int], None] f = None # type: A f(1) f('') # E: Argument 1 has incompatible type "str"; expected "int" [case testListTypeAlias] from typing import List A = List[int] def f(x: A) -> None: pass f([1]) f(['x']) # E: List item 0 has incompatible type "str"; expected "int" [builtins fixtures/list.pyi] [out] [case testAnyTypeAlias] from typing import Any A = Any def f(x: A) -> None: x.foo() f(1) f('x') [case testImportUnionAlias] import typing from _m import U def f(x: U) -> None: pass f(1) f('x') f(()) # E: Argument 1 to "f" has incompatible type "Tuple[]"; expected "Union[int, str]" [file _m.py] from typing import Union U = Union[int, str] [builtins fixtures/tuple.pyi] [case testProhibitReassigningAliases] A = float A = int # E: Cannot assign to a type \ # E: Cannot assign multiple types to name "A" without an explicit "Type[...]" annotation [out] [case testProhibitReassigningSubscriptedAliases] from typing import Callable A = Callable[[], float] A = Callable[[], int] # E: Cannot assign multiple types to name "A" without an explicit "Type[...]" annotation \ # E: Value of type "int" is not indexable # the second error is because of `Callable = 0` in lib-stub/typing.pyi [builtins fixtures/list.pyi] [out] [case testProhibitReassigningGenericAliases] from typing import TypeVar, Union, Tuple T = TypeVar('T') A = Tuple[T, T] A = Union[T, int] # E: Cannot assign multiple types to name "A" without an explicit "Type[...]" annotation \ # E: Value of type "int" is not indexable # the second error is because of `Union = 0` in lib-stub/typing.pyi [out] [case testProhibitUsingVariablesAsTypesAndAllowAliasesAsTypes] from typing import TypeVar, Sequence, Type T = TypeVar('T') A: Type[float] = int A = float # OK x: A # E: Invalid type "__main__.A" def bad(tp: A) -> None: # E: Invalid type "__main__.A" pass Alias = int GenAlias = Sequence[T] def fun(x: Alias) -> GenAlias[int]: pass [out] [case testCorrectQualifiedAliasesAlsoInFunctions] from typing import TypeVar, Generic T = TypeVar('T') S = TypeVar('S') class X(Generic[T]): A = X[S] def f(self) -> X[T]: pass a: X[T] b: A = a c: A[T] = a d: A[int] = a # E: Incompatible types in assignment (expression has type "X[T]", variable has type "X[int]") def g(self) -> None: a: X[T] b: X.A = a c: X.A[T] = a d: X.A[int] = a # E: Incompatible types in assignment (expression has type "X[T]", variable has type "X[int]") def g(arg: X[int]) -> None: p: X[int] = arg.f() q: X.A = arg.f() r: X.A[str] = arg.f() # E: Incompatible types in assignment (expression has type "X[int]", variable has type "X[str]") [out] [case testProhibitBoundTypeVariableReuseForAliases] from typing import TypeVar, Generic, List T = TypeVar('T') class C(Generic[T]): A = List[T] # E: Can't use bound type variable "T" to define generic alias x: C.A reveal_type(x) # E: Revealed type is 'builtins.list[Any]' def f(x: T) -> T: A = List[T] # E: Can't use bound type variable "T" to define generic alias return x [builtins fixtures/list.pyi] [out] [case testTypeAliasInBuiltins] def f(x: bytes): pass bytes f(1) # E: Argument 1 to "f" has incompatible type "int"; expected "str" [builtins fixtures/alias.pyi] [case testEmptyTupleTypeAlias] from typing import Tuple, Callable EmptyTuple = Tuple[()] x = None # type: EmptyTuple reveal_type(x) # E: Revealed type is 'Tuple[]' EmptyTupleCallable = Callable[[Tuple[()]], None] f = None # type: EmptyTupleCallable reveal_type(f) # E: Revealed type is 'def (Tuple[])' [builtins fixtures/list.pyi] [case testForwardTypeAlias] def f(p: 'Alias') -> None: pass reveal_type(f) # E: Revealed type is 'def (p: builtins.int)' Alias = int [out] [case testForwardTypeAliasGeneric] from typing import TypeVar, Tuple def f(p: 'Alias[str]') -> None: pass reveal_type(f) # E: Revealed type is 'def (p: Tuple[builtins.int, builtins.str])' T = TypeVar('T') Alias = Tuple[int, T] [out] [case testRecursiveAliasesErrors1] from typing import Type, Callable, Union A = Union[A, int] B = Callable[[B], int] C = Type[C] [out] main:3: error: Recursive types not fully supported yet, nested types replaced with "Any" main:4: error: Recursive types not fully supported yet, nested types replaced with "Any" main:5: error: Recursive types not fully supported yet, nested types replaced with "Any" [case testRecursiveAliasesErrors2] from typing import Type, Callable, Union A = Union[B, int] B = Callable[[C], int] C = Type[A] [out] main:3: error: Recursive types not fully supported yet, nested types replaced with "Any" main:4: error: Recursive types not fully supported yet, nested types replaced with "Any" main:5: error: Recursive types not fully supported yet, nested types replaced with "Any" [case testDoubleForwardAlias] from typing import List x: A A = List[B] B = List[int] reveal_type(x) # E: Revealed type is 'builtins.list[builtins.list[builtins.int]]' [builtins fixtures/list.pyi] [out] [case testDoubleForwardAliasWithNamedTuple] from typing import List, NamedTuple x: A A = List[B] class B(NamedTuple): x: str reveal_type(x[0].x) # E: Revealed type is 'builtins.str' [builtins fixtures/list.pyi] [out] [case testJSONAliasApproximation] from typing import List, Union, Dict x: JSON JSON = Union[int, str, List[JSON], Dict[str, JSON]] # type: ignore reveal_type(x) # E: Revealed type is 'Union[builtins.int, builtins.str, builtins.list[Any], builtins.dict[builtins.str, Any]]' if isinstance(x, list): reveal_type(x) # E: Revealed type is 'builtins.list[Any]' [builtins fixtures/isinstancelist.pyi] [out] [case testProhibitedForwardRefToTypeVar] from typing import TypeVar, List a: List[T] T = TypeVar('T') [builtins fixtures/list.pyi] [out] main:3: error: Invalid type "__main__.T" main:3: note: Forward references to type variables are prohibited [case testUnsupportedForwardRef] from typing import List, TypeVar T = TypeVar('T') def f(x: T) -> None: y: A[T] # E: Unsupported forward reference to "A" A = List[T] [builtins fixtures/list.pyi] [out] [case testUnsupportedForwardRef2] from typing import List, TypeVar def f() -> None: X = List[int] x: A[X] # E: Unsupported forward reference to "A" T = TypeVar('T') A = List[T] [builtins fixtures/list.pyi] [out] [case testNoneAlias] from typing import Union void = type(None) x: void reveal_type(x) # E: Revealed type is 'builtins.None' y: Union[int, void] reveal_type(y) # E: Revealed type is 'Union[builtins.int, builtins.None]' [builtins fixtures/bool.pyi] [case testNoneAliasStrict] # flags: --strict-optional from typing import Optional, Union void = type(None) x: int y: Union[int, void] z: Optional[int] x = y # E: Incompatible types in assignment (expression has type "Optional[int]", variable has type "int") y = z [builtins fixtures/bool.pyi] mypy-0.560/test-data/unit/check-type-checks.test0000644€tŠÔÚ€2›s®0000001003213215007205025723 0ustar jukkaDROPBOX\Domain Users00000000000000-- Conditional type checks. [case testSimpleIsinstance] x = None # type: object n = None # type: int s = None # type: str n = x # E: Incompatible types in assignment (expression has type "object", variable has type "int") if isinstance(x, int): n = x s = x # E: Incompatible types in assignment (expression has type "int", variable has type "str") n = x # E: Incompatible types in assignment (expression has type "object", variable has type "int") [builtins fixtures/isinstance.pyi] [case testSimpleIsinstance2] import typing def f(x: object, n: int, s: str) -> None: n = x # E: Incompatible types in assignment (expression has type "object", variable has type "int") if isinstance(x, int): n = x s = x # E: Incompatible types in assignment (expression has type "int", variable has type "str") n = x # E: Incompatible types in assignment (expression has type "object", variable has type "int") [builtins fixtures/isinstance.pyi] [out] [case testSimpleIsinstance3] class A: x = None # type: object n = None # type: int s = None # type: str n = x # E: Incompatible types in assignment (expression has type "object", variable has type "int") if isinstance(x, int): n = x s = x # E: Incompatible types in assignment (expression has type "int", variable has type "str") else: n = x # E: Incompatible types in assignment (expression has type "object", variable has type "int") [builtins fixtures/isinstance.pyi] [out] [case testMultipleIsinstanceTests] import typing class A: pass class B(A): pass def f(x: object, a: A, b: B, c: int) -> None: if isinstance(x, A): if isinstance(x, B): b = x x = a a = x c = x # E: Incompatible types in assignment (expression has type "A", variable has type "int") [builtins fixtures/isinstance.pyi] [out] [case testMultipleIsinstanceTests2] import typing class A: pass class B(A): pass def f(x: object, y: object, n: int, s: str) -> None: if isinstance(x, int): if isinstance(y, str): n = x s = y s = x # E: Incompatible types in assignment (expression has type "int", variable has type "str") n = y # E: Incompatible types in assignment (expression has type "str", variable has type "int") s = y # E: Incompatible types in assignment (expression has type "object", variable has type "str") n = y # E: Incompatible types in assignment (expression has type "object", variable has type "int") n = x [builtins fixtures/isinstance.pyi] [out] [case testIsinstanceAndElif] import typing def f(x: object, n: int, s: str) -> None: n = x # E: Incompatible types in assignment (expression has type "object", variable has type "int") if isinstance(x, int): n = x s = x # E: Incompatible types in assignment (expression has type "int", variable has type "str") elif isinstance(x, str): s = x n = x # E: Incompatible types in assignment (expression has type "str", variable has type "int") else: n = x # E: Incompatible types in assignment (expression has type "object", variable has type "int") s = x # E: Incompatible types in assignment (expression has type "object", variable has type "str") n = x # E: Incompatible types in assignment (expression has type "object", variable has type "int") [builtins fixtures/isinstance.pyi] [out] [case testIsinstanceAndAnyType] from typing import Any def f(x: Any, n: int, s: str) -> None: s = x if isinstance(x, int): n = x s = x # E: Incompatible types in assignment (expression has type "int", variable has type "str") s = x [builtins fixtures/isinstance.pyi] [out] [case testIsinstanceAndGenericType] from typing import TypeVar, Generic T = TypeVar('T') class C(Generic[T]): def f(self, x: T) -> None: pass def f(x: object) -> None: if isinstance(x, C): x.f(1) x.f('') x.g() # E: "C[Any]" has no attribute "g" x.g() # E: "object" has no attribute "g" [builtins fixtures/isinstance.pyi] [out] mypy-0.560/test-data/unit/check-type-promotion.test0000644€tŠÔÚ€2›s®0000000161613215007205026521 0ustar jukkaDROPBOX\Domain Users00000000000000-- Test cases for type promotion (e.g. int -> float). [case testPromoteIntToFloat] def f(x: float) -> None: pass f(1) [builtins fixtures/primitives.pyi] [case testCantPromoteFloatToInt] def f(x: int) -> None: pass f(1.1) # E: Argument 1 to "f" has incompatible type "float"; expected "int" [builtins fixtures/primitives.pyi] [case testPromoteFloatToComplex] def f(x: complex) -> None: pass f(1) [builtins fixtures/primitives.pyi] [case testPromoteIntToComplex] def f(x: complex) -> None: pass f(1) [builtins fixtures/primitives.pyi] [case testPromoteBytearrayToByte] def f(x: bytes) -> None: pass f(bytearray()) [builtins fixtures/primitives.pyi] [case testNarrowingDownFromPromoteTargetType] y = 0.0 y = 1 y() # E: "int" not callable [builtins fixtures/primitives.pyi] [case testNarrowingDownFromPromoteTargetType2] y = 0.0 y = 1 y.x # E: "int" has no attribute "x" [builtins fixtures/primitives.pyi] mypy-0.560/test-data/unit/check-typeddict.test0000644€tŠÔÚ€2›s®0000013712013215007205025505 0ustar jukkaDROPBOX\Domain Users00000000000000-- Create Instance [case testCanCreateTypedDictInstanceWithKeywordArguments] from mypy_extensions import TypedDict Point = TypedDict('Point', {'x': int, 'y': int}) p = Point(x=42, y=1337) reveal_type(p) # E: Revealed type is 'TypedDict('__main__.Point', {'x': builtins.int, 'y': builtins.int})' # Use values() to check fallback value type. reveal_type(p.values()) # E: Revealed type is 'typing.Iterable[builtins.int*]' [builtins fixtures/dict.pyi] [typing fixtures/typing-full.pyi] [case testCanCreateTypedDictInstanceWithDictCall] from mypy_extensions import TypedDict Point = TypedDict('Point', {'x': int, 'y': int}) p = Point(dict(x=42, y=1337)) reveal_type(p) # E: Revealed type is 'TypedDict('__main__.Point', {'x': builtins.int, 'y': builtins.int})' # Use values() to check fallback value type. reveal_type(p.values()) # E: Revealed type is 'typing.Iterable[builtins.int*]' [builtins fixtures/dict.pyi] [typing fixtures/typing-full.pyi] [case testCanCreateTypedDictInstanceWithDictLiteral] from mypy_extensions import TypedDict Point = TypedDict('Point', {'x': int, 'y': int}) p = Point({'x': 42, 'y': 1337}) reveal_type(p) # E: Revealed type is 'TypedDict('__main__.Point', {'x': builtins.int, 'y': builtins.int})' # Use values() to check fallback value type. reveal_type(p.values()) # E: Revealed type is 'typing.Iterable[builtins.int*]' [builtins fixtures/dict.pyi] [typing fixtures/typing-full.pyi] [case testCanCreateTypedDictInstanceWithNoArguments] from typing import TypeVar, Union from mypy_extensions import TypedDict EmptyDict = TypedDict('EmptyDict', {}) p = EmptyDict() reveal_type(p) # E: Revealed type is 'TypedDict('__main__.EmptyDict', {})' reveal_type(p.values()) # E: Revealed type is 'typing.Iterable[]' [builtins fixtures/dict.pyi] [typing fixtures/typing-full.pyi] -- Create Instance (Errors) [case testCannotCreateTypedDictInstanceWithUnknownArgumentPattern] from mypy_extensions import TypedDict Point = TypedDict('Point', {'x': int, 'y': int}) p = Point(42, 1337) # E: Expected keyword arguments, {...}, or dict(...) in TypedDict constructor [builtins fixtures/dict.pyi] [case testCannotCreateTypedDictInstanceNonLiteralItemName] from mypy_extensions import TypedDict Point = TypedDict('Point', {'x': int, 'y': int}) x = 'x' p = Point({x: 42, 'y': 1337}) # E: Expected TypedDict key to be string literal [builtins fixtures/dict.pyi] [case testCannotCreateTypedDictInstanceWithExtraItems] from mypy_extensions import TypedDict Point = TypedDict('Point', {'x': int, 'y': int}) p = Point(x=42, y=1337, z=666) # E: Extra key 'z' for TypedDict "Point" [builtins fixtures/dict.pyi] [case testCannotCreateTypedDictInstanceWithMissingItems] from mypy_extensions import TypedDict Point = TypedDict('Point', {'x': int, 'y': int}) p = Point(x=42) # E: Key 'y' missing for TypedDict "Point" [builtins fixtures/dict.pyi] [case testCannotCreateTypedDictInstanceWithIncompatibleItemType] from mypy_extensions import TypedDict Point = TypedDict('Point', {'x': int, 'y': int}) p = Point(x='meaning_of_life', y=1337) # E: Incompatible types (expression has type "str", TypedDict item "x" has type "int") [builtins fixtures/dict.pyi] -- Define TypedDict (Class syntax) [case testCanCreateTypedDictWithClass] # flags: --python-version 3.6 from mypy_extensions import TypedDict class Point(TypedDict): x: int y: int p = Point(x=42, y=1337) reveal_type(p) # E: Revealed type is 'TypedDict('__main__.Point', {'x': builtins.int, 'y': builtins.int})' [builtins fixtures/dict.pyi] [case testCanCreateTypedDictWithSubclass] # flags: --python-version 3.6 from mypy_extensions import TypedDict class Point1D(TypedDict): x: int class Point2D(Point1D): y: int r: Point1D p: Point2D reveal_type(r) # E: Revealed type is 'TypedDict('__main__.Point1D', {'x': builtins.int})' reveal_type(p) # E: Revealed type is 'TypedDict('__main__.Point2D', {'x': builtins.int, 'y': builtins.int})' [builtins fixtures/dict.pyi] [case testCanCreateTypedDictWithSubclass2] # flags: --python-version 3.6 from mypy_extensions import TypedDict class Point1D(TypedDict): x: int class Point2D(TypedDict, Point1D): # We also allow to include TypedDict in bases, it is simply ignored at runtime y: int p: Point2D reveal_type(p) # E: Revealed type is 'TypedDict('__main__.Point2D', {'x': builtins.int, 'y': builtins.int})' [builtins fixtures/dict.pyi] [case testCanCreateTypedDictClassEmpty] # flags: --python-version 3.6 from mypy_extensions import TypedDict class EmptyDict(TypedDict): pass p = EmptyDict() reveal_type(p) # E: Revealed type is 'TypedDict('__main__.EmptyDict', {})' [builtins fixtures/dict.pyi] -- Define TypedDict (Class syntax errors) [case testCanCreateTypedDictWithClassOldVersion] # flags: --python-version 3.5 from mypy_extensions import TypedDict class Point(TypedDict): # E: TypedDict class syntax is only supported in Python 3.6 pass [builtins fixtures/dict.pyi] [case testCannotCreateTypedDictWithClassOtherBases] # flags: --python-version 3.6 from mypy_extensions import TypedDict class A: pass class Point1D(TypedDict, A): # E: All bases of a new TypedDict must be TypedDict types x: int class Point2D(Point1D, A): # E: All bases of a new TypedDict must be TypedDict types y: int p: Point2D reveal_type(p) # E: Revealed type is 'TypedDict('__main__.Point2D', {'x': builtins.int, 'y': builtins.int})' [builtins fixtures/dict.pyi] [case testCannotCreateTypedDictWithClassWithOtherStuff] # flags: --python-version 3.6 from mypy_extensions import TypedDict class Point(TypedDict): x: int y: int = 1 # E: Right hand side values are not supported in TypedDict def f(): pass # E: Invalid statement in TypedDict definition; expected "field_name: field_type" z = int # E: Invalid statement in TypedDict definition; expected "field_name: field_type" p = Point(x=42, y=1337, z='whatever') reveal_type(p) # E: Revealed type is 'TypedDict('__main__.Point', {'x': builtins.int, 'y': builtins.int, 'z': Any})' [builtins fixtures/dict.pyi] [case testCanCreateTypedDictTypeWithUnderscoreItemName] from mypy_extensions import TypedDict Point = TypedDict('Point', {'x': int, 'y': int, '_fallback': object}) [builtins fixtures/dict.pyi] [case testCanCreateTypedDictWithClassUnderscores] # flags: --python-version 3.6 from mypy_extensions import TypedDict class Point(TypedDict): x: int _y: int p: Point reveal_type(p) # E: Revealed type is 'TypedDict('__main__.Point', {'x': builtins.int, '_y': builtins.int})' [builtins fixtures/dict.pyi] [case testCannotCreateTypedDictWithClassOverwriting] # flags: --python-version 3.6 from mypy_extensions import TypedDict class Bad(TypedDict): x: int x: str # E: Duplicate TypedDict field "x" b: Bad reveal_type(b) # E: Revealed type is 'TypedDict('__main__.Bad', {'x': builtins.int})' [builtins fixtures/dict.pyi] [case testCannotCreateTypedDictWithClassOverwriting2] # flags: --python-version 3.6 from mypy_extensions import TypedDict class Point1(TypedDict): x: int class Point2(TypedDict): x: float class Bad(Point1, Point2): # E: Cannot overwrite TypedDict field "x" while merging pass b: Bad reveal_type(b) # E: Revealed type is 'TypedDict('__main__.Bad', {'x': builtins.int})' [builtins fixtures/dict.pyi] [case testCannotCreateTypedDictWithClassOverwriting2] # flags: --python-version 3.6 from mypy_extensions import TypedDict class Point1(TypedDict): x: int class Point2(Point1): x: float # E: Cannot overwrite TypedDict field "x" while extending p2: Point2 reveal_type(p2) # E: Revealed type is 'TypedDict('__main__.Point2', {'x': builtins.int})' [builtins fixtures/dict.pyi] -- Subtyping [case testCanConvertTypedDictToItself] from mypy_extensions import TypedDict Point = TypedDict('Point', {'x': int, 'y': int}) def identity(p: Point) -> Point: return p [builtins fixtures/dict.pyi] [case testCanConvertTypedDictToEquivalentTypedDict] from mypy_extensions import TypedDict PointA = TypedDict('PointA', {'x': int, 'y': int}) PointB = TypedDict('PointB', {'x': int, 'y': int}) def identity(p: PointA) -> PointB: return p [builtins fixtures/dict.pyi] [case testCannotConvertTypedDictToSimilarTypedDictWithNarrowerItemTypes] from mypy_extensions import TypedDict Point = TypedDict('Point', {'x': int, 'y': int}) ObjectPoint = TypedDict('ObjectPoint', {'x': object, 'y': object}) def convert(op: ObjectPoint) -> Point: return op # E: Incompatible return value type (got "ObjectPoint", expected "Point") [builtins fixtures/dict.pyi] [case testCannotConvertTypedDictToSimilarTypedDictWithWiderItemTypes] from mypy_extensions import TypedDict Point = TypedDict('Point', {'x': int, 'y': int}) ObjectPoint = TypedDict('ObjectPoint', {'x': object, 'y': object}) def convert(p: Point) -> ObjectPoint: return p # E: Incompatible return value type (got "Point", expected "ObjectPoint") [builtins fixtures/dict.pyi] [case testCannotConvertTypedDictToSimilarTypedDictWithIncompatibleItemTypes] from mypy_extensions import TypedDict Point = TypedDict('Point', {'x': int, 'y': int}) Chameleon = TypedDict('Chameleon', {'x': str, 'y': str}) def convert(p: Point) -> Chameleon: return p # E: Incompatible return value type (got "Point", expected "Chameleon") [builtins fixtures/dict.pyi] [case testCanConvertTypedDictToNarrowerTypedDict] from mypy_extensions import TypedDict Point = TypedDict('Point', {'x': int, 'y': int}) Point1D = TypedDict('Point1D', {'x': int}) def narrow(p: Point) -> Point1D: return p [builtins fixtures/dict.pyi] [case testCannotConvertTypedDictToWiderTypedDict] from mypy_extensions import TypedDict Point = TypedDict('Point', {'x': int, 'y': int}) Point3D = TypedDict('Point3D', {'x': int, 'y': int, 'z': int}) def widen(p: Point) -> Point3D: return p # E: Incompatible return value type (got "Point", expected "Point3D") [builtins fixtures/dict.pyi] [case testCanConvertTypedDictToCompatibleMapping] from mypy_extensions import TypedDict from typing import Mapping Point = TypedDict('Point', {'x': int, 'y': int}) def as_mapping(p: Point) -> Mapping[str, int]: return p [builtins fixtures/dict.pyi] [case testCannotConvertTypedDictToCompatibleMapping] from mypy_extensions import TypedDict from typing import Mapping Point = TypedDict('Point', {'x': int, 'y': int}) def as_mapping(p: Point) -> Mapping[str, str]: return p # E: Incompatible return value type (got "Point", expected "Mapping[str, str]") [builtins fixtures/dict.pyi] [case testTypedDictAcceptsIntForFloatDuckTypes] from mypy_extensions import TypedDict from typing import Any, Mapping Point = TypedDict('Point', {'x': float, 'y': float}) def create_point() -> Point: return Point(x=1, y=2) reveal_type(Point(x=1, y=2)) # E: Revealed type is 'TypedDict('__main__.Point', {'x': builtins.float, 'y': builtins.float})' [builtins fixtures/dict.pyi] [case testTypedDictDoesNotAcceptsFloatForInt] from mypy_extensions import TypedDict from typing import Any, Mapping Point = TypedDict('Point', {'x': int, 'y': int}) def create_point() -> Point: return Point(x=1.2, y=2.5) [out] main:5: error: Incompatible types (expression has type "float", TypedDict item "x" has type "int") main:5: error: Incompatible types (expression has type "float", TypedDict item "y" has type "int") [builtins fixtures/dict.pyi] [case testTypedDictAcceptsAnyType] from mypy_extensions import TypedDict from typing import Any, Mapping Point = TypedDict('Point', {'x': float, 'y': float}) def create_point(something: Any) -> Point: return Point({ 'x': something.x, 'y': something.y }) [builtins fixtures/dict.pyi] [case testTypedDictValueTypeContext] from mypy_extensions import TypedDict from typing import List D = TypedDict('D', {'x': List[int]}) reveal_type(D(x=[])) # E: Revealed type is 'TypedDict('__main__.D', {'x': builtins.list[builtins.int]})' [builtins fixtures/dict.pyi] [case testCannotConvertTypedDictToDictOrMutableMapping] from mypy_extensions import TypedDict from typing import Dict, MutableMapping Point = TypedDict('Point', {'x': int, 'y': int}) def as_dict(p: Point) -> Dict[str, int]: return p # E: Incompatible return value type (got "Point", expected "Dict[str, int]") def as_mutable_mapping(p: Point) -> MutableMapping[str, int]: return p # E: Incompatible return value type (got "Point", expected "MutableMapping[str, int]") \ # N: 'Point' is missing following 'MutableMapping' protocol member: \ # N: __setitem__ [builtins fixtures/dict.pyi] [case testCanConvertTypedDictToAny] from mypy_extensions import TypedDict from typing import Any Point = TypedDict('Point', {'x': int, 'y': int}) def unprotect(p: Point) -> Any: return p [builtins fixtures/dict.pyi] [case testAnonymousTypedDictInErrorMessages] from mypy_extensions import TypedDict A = TypedDict('A', {'x': int, 'y': str}) B = TypedDict('B', {'x': int, 'z': str, 'a': int}) C = TypedDict('C', {'x': int, 'z': str, 'a': str}) a: A b: B c: C def f(a: A) -> None: pass l = [a, b] # Join generates an anonymous TypedDict f(l) # E: Argument 1 to "f" has incompatible type "List[TypedDict({'x': int})]"; expected "A" ll = [b, c] f(ll) # E: Argument 1 to "f" has incompatible type "List[TypedDict({'x': int, 'z': str})]"; expected "A" [builtins fixtures/dict.pyi] [case testTypedDictWithSimpleProtocol] from typing_extensions import Protocol from mypy_extensions import TypedDict class StrIntMap(Protocol): def __getitem__(self, key: str) -> int: ... A = TypedDict('A', {'x': int, 'y': int}) B = TypedDict('B', {'x': int, 'y': str}) def fun(arg: StrIntMap) -> None: ... a: A b: B fun(a) fun(b) # Error [builtins fixtures/dict.pyi] [out] main:14: error: Argument 1 to "fun" has incompatible type "B"; expected "StrIntMap" main:14: note: Following member(s) of "B" have conflicts: main:14: note: Expected: main:14: note: def __getitem__(self, str) -> int main:14: note: Got: main:14: note: def __getitem__(self, str) -> object [case testTypedDictWithSimpleProtocolInference] from typing_extensions import Protocol from mypy_extensions import TypedDict from typing import TypeVar T_co = TypeVar('T_co', covariant=True) T = TypeVar('T') class StrMap(Protocol[T_co]): def __getitem__(self, key: str) -> T_co: ... A = TypedDict('A', {'x': int, 'y': int}) B = TypedDict('B', {'x': int, 'y': str}) def fun(arg: StrMap[T]) -> T: return arg['whatever'] a: A b: B reveal_type(fun(a)) # E: Revealed type is 'builtins.int*' reveal_type(fun(b)) # E: Revealed type is 'builtins.object*' [builtins fixtures/dict.pyi] [out] -- Join [case testJoinOfTypedDictHasOnlyCommonKeysAndNewFallback] from mypy_extensions import TypedDict TaggedPoint = TypedDict('TaggedPoint', {'type': str, 'x': int, 'y': int}) Point3D = TypedDict('Point3D', {'x': int, 'y': int, 'z': int}) p1 = TaggedPoint(type='2d', x=0, y=0) p2 = Point3D(x=1, y=1, z=1) joined_points = [p1, p2][0] reveal_type(p1.values()) # E: Revealed type is 'typing.Iterable[builtins.object*]' reveal_type(p2.values()) # E: Revealed type is 'typing.Iterable[builtins.int*]' reveal_type(joined_points) # E: Revealed type is 'TypedDict({'x': builtins.int, 'y': builtins.int}, fallback=typing.Mapping[builtins.str, builtins.int])' [builtins fixtures/dict.pyi] [typing fixtures/typing-full.pyi] [case testJoinOfTypedDictRemovesNonequivalentKeys] from mypy_extensions import TypedDict CellWithInt = TypedDict('CellWithInt', {'value': object, 'meta': int}) CellWithObject = TypedDict('CellWithObject', {'value': object, 'meta': object}) c1 = CellWithInt(value=1, meta=42) c2 = CellWithObject(value=2, meta='turtle doves') joined_cells = [c1, c2] reveal_type(c1) # E: Revealed type is 'TypedDict('__main__.CellWithInt', {'value': builtins.object, 'meta': builtins.int})' reveal_type(c2) # E: Revealed type is 'TypedDict('__main__.CellWithObject', {'value': builtins.object, 'meta': builtins.object})' reveal_type(joined_cells) # E: Revealed type is 'builtins.list[TypedDict({'value': builtins.object}, fallback=typing.Mapping[builtins.str, builtins.object])]' [builtins fixtures/dict.pyi] [case testJoinOfDisjointTypedDictsIsEmptyTypedDict] from mypy_extensions import TypedDict Point = TypedDict('Point', {'x': int, 'y': int}) Cell = TypedDict('Cell', {'value': object}) d1 = Point(x=0, y=0) d2 = Cell(value='pear tree') joined_dicts = [d1, d2] reveal_type(d1) # E: Revealed type is 'TypedDict('__main__.Point', {'x': builtins.int, 'y': builtins.int})' reveal_type(d2) # E: Revealed type is 'TypedDict('__main__.Cell', {'value': builtins.object})' reveal_type(joined_dicts) # E: Revealed type is 'builtins.list[TypedDict({}, fallback=typing.Mapping[builtins.str, ])]' [builtins fixtures/dict.pyi] [case testJoinOfTypedDictWithCompatibleMappingIsMapping] from mypy_extensions import TypedDict from typing import Mapping Cell = TypedDict('Cell', {'value': int}) left = Cell(value=42) right = {'score': 999} # type: Mapping[str, int] joined1 = [left, right] joined2 = [right, left] reveal_type(joined1) # E: Revealed type is 'builtins.list[typing.Mapping*[builtins.str, builtins.int]]' reveal_type(joined2) # E: Revealed type is 'builtins.list[typing.Mapping*[builtins.str, builtins.int]]' [builtins fixtures/dict.pyi] [case testJoinOfTypedDictWithCompatibleMappingSupertypeIsSupertype] from mypy_extensions import TypedDict from typing import Sized Cell = TypedDict('Cell', {'value': int}) left = Cell(value=42) right = {'score': 999} # type: Sized joined1 = [left, right] joined2 = [right, left] reveal_type(joined1) # E: Revealed type is 'builtins.list[typing.Sized*]' reveal_type(joined2) # E: Revealed type is 'builtins.list[typing.Sized*]' [builtins fixtures/dict.pyi] [typing fixtures/typing-full.pyi] [case testJoinOfTypedDictWithIncompatibleMappingIsObject] from mypy_extensions import TypedDict from typing import Mapping Cell = TypedDict('Cell', {'value': int}) left = Cell(value=42) right = {'score': 'zero'} # type: Mapping[str, str] joined1 = [left, right] joined2 = [right, left] reveal_type(joined1) # E: Revealed type is 'builtins.list[builtins.object*]' reveal_type(joined2) # E: Revealed type is 'builtins.list[builtins.object*]' [builtins fixtures/dict.pyi] [case testJoinOfTypedDictWithIncompatibleTypeIsObject] from mypy_extensions import TypedDict from typing import Mapping Cell = TypedDict('Cell', {'value': int}) left = Cell(value=42) right = 42 joined1 = [left, right] joined2 = [right, left] reveal_type(joined1) # E: Revealed type is 'builtins.list[builtins.object*]' reveal_type(joined2) # E: Revealed type is 'builtins.list[builtins.object*]' [builtins fixtures/dict.pyi] -- Meet [case testMeetOfTypedDictsWithCompatibleCommonKeysHasAllKeysAndNewFallback] from mypy_extensions import TypedDict from typing import TypeVar, Callable XY = TypedDict('XY', {'x': int, 'y': int}) YZ = TypedDict('YZ', {'y': int, 'z': int}) T = TypeVar('T') def f(x: Callable[[T, T], None]) -> T: pass def g(x: XY, y: YZ) -> None: pass reveal_type(f(g)) # E: Revealed type is 'TypedDict({'x': builtins.int, 'y': builtins.int, 'z': builtins.int}, fallback=typing.Mapping[builtins.str, builtins.int])' [builtins fixtures/dict.pyi] [case testMeetOfTypedDictsWithIncompatibleCommonKeysIsUninhabited] # flags: --strict-optional from mypy_extensions import TypedDict from typing import TypeVar, Callable XYa = TypedDict('XYa', {'x': int, 'y': int}) YbZ = TypedDict('YbZ', {'y': object, 'z': int}) T = TypeVar('T') def f(x: Callable[[T, T], None]) -> T: pass def g(x: XYa, y: YbZ) -> None: pass reveal_type(f(g)) # E: Revealed type is '' [builtins fixtures/dict.pyi] [case testMeetOfTypedDictsWithNoCommonKeysHasAllKeysAndNewFallback] from mypy_extensions import TypedDict from typing import TypeVar, Callable X = TypedDict('X', {'x': int}) Z = TypedDict('Z', {'z': int}) T = TypeVar('T') def f(x: Callable[[T, T], None]) -> T: pass def g(x: X, y: Z) -> None: pass reveal_type(f(g)) # E: Revealed type is 'TypedDict({'x': builtins.int, 'z': builtins.int}, fallback=typing.Mapping[builtins.str, builtins.int])' [builtins fixtures/dict.pyi] # TODO: It would be more accurate for the meet to be TypedDict instead. [case testMeetOfTypedDictWithCompatibleMappingIsUninhabitedForNow] # flags: --strict-optional from mypy_extensions import TypedDict from typing import TypeVar, Callable, Mapping X = TypedDict('X', {'x': int}) M = Mapping[str, int] T = TypeVar('T') def f(x: Callable[[T, T], None]) -> T: pass def g(x: X, y: M) -> None: pass reveal_type(f(g)) # E: Revealed type is '' [builtins fixtures/dict.pyi] [case testMeetOfTypedDictWithIncompatibleMappingIsUninhabited] # flags: --strict-optional from mypy_extensions import TypedDict from typing import TypeVar, Callable, Mapping X = TypedDict('X', {'x': int}) M = Mapping[str, str] T = TypeVar('T') def f(x: Callable[[T, T], None]) -> T: pass def g(x: X, y: M) -> None: pass reveal_type(f(g)) # E: Revealed type is '' [builtins fixtures/dict.pyi] # TODO: It would be more accurate for the meet to be TypedDict instead. [case testMeetOfTypedDictWithCompatibleMappingSuperclassIsUninhabitedForNow] # flags: --strict-optional from mypy_extensions import TypedDict from typing import TypeVar, Callable, Iterable X = TypedDict('X', {'x': int}) I = Iterable[str] T = TypeVar('T') def f(x: Callable[[T, T], None]) -> T: pass def g(x: X, y: I) -> None: pass reveal_type(f(g)) # E: Revealed type is '' [builtins fixtures/dict.pyi] [case testMeetOfTypedDictsWithNonTotal] from mypy_extensions import TypedDict from typing import TypeVar, Callable XY = TypedDict('XY', {'x': int, 'y': int}, total=False) YZ = TypedDict('YZ', {'y': int, 'z': int}, total=False) T = TypeVar('T') def f(x: Callable[[T, T], None]) -> T: pass def g(x: XY, y: YZ) -> None: pass reveal_type(f(g)) # E: Revealed type is 'TypedDict({'x'?: builtins.int, 'y'?: builtins.int, 'z'?: builtins.int}, fallback=typing.Mapping[builtins.str, builtins.int])' [builtins fixtures/dict.pyi] [case testMeetOfTypedDictsWithNonTotalAndTotal] from mypy_extensions import TypedDict from typing import TypeVar, Callable XY = TypedDict('XY', {'x': int}, total=False) YZ = TypedDict('YZ', {'y': int, 'z': int}) T = TypeVar('T') def f(x: Callable[[T, T], None]) -> T: pass def g(x: XY, y: YZ) -> None: pass reveal_type(f(g)) # E: Revealed type is 'TypedDict({'x'?: builtins.int, 'y': builtins.int, 'z': builtins.int}, fallback=typing.Mapping[builtins.str, builtins.int])' [builtins fixtures/dict.pyi] [case testMeetOfTypedDictsWithIncompatibleNonTotalAndTotal] # flags: --strict-optional from mypy_extensions import TypedDict from typing import TypeVar, Callable XY = TypedDict('XY', {'x': int, 'y': int}, total=False) YZ = TypedDict('YZ', {'y': int, 'z': int}) T = TypeVar('T') def f(x: Callable[[T, T], None]) -> T: pass def g(x: XY, y: YZ) -> None: pass reveal_type(f(g)) # E: Revealed type is '' [builtins fixtures/dict.pyi] -- Constraint Solver [case testTypedDictConstraintsAgainstIterable] from typing import TypeVar, Iterable from mypy_extensions import TypedDict T = TypeVar('T') def f(x: Iterable[T]) -> T: pass A = TypedDict('A', {'x': int}) a: A reveal_type(f(a)) # E: Revealed type is 'builtins.str*' [builtins fixtures/dict.pyi] [typing fixtures/typing-full.pyi] -- TODO: Figure out some way to trigger the ConstraintBuilderVisitor.visit_typeddict_type() path. -- Special Method: __getitem__ [case testCanGetItemOfTypedDictWithValidStringLiteralKey] from mypy_extensions import TypedDict TaggedPoint = TypedDict('TaggedPoint', {'type': str, 'x': int, 'y': int}) p = TaggedPoint(type='2d', x=42, y=1337) reveal_type(p['type']) # E: Revealed type is 'builtins.str' reveal_type(p['x']) # E: Revealed type is 'builtins.int' reveal_type(p['y']) # E: Revealed type is 'builtins.int' [builtins fixtures/dict.pyi] [case testCanGetItemOfTypedDictWithValidBytesOrUnicodeLiteralKey] # flags: --python-version 2.7 from mypy_extensions import TypedDict Cell = TypedDict('Cell', {'value': int}) c = Cell(value=42) reveal_type(c['value']) # E: Revealed type is 'builtins.int' reveal_type(c[u'value']) # E: Revealed type is 'builtins.int' [builtins_py2 fixtures/dict.pyi] [case testCannotGetItemOfTypedDictWithInvalidStringLiteralKey] from mypy_extensions import TypedDict TaggedPoint = TypedDict('TaggedPoint', {'type': str, 'x': int, 'y': int}) p: TaggedPoint p['z'] # E: TypedDict "TaggedPoint" has no key 'z' [builtins fixtures/dict.pyi] [case testCannotGetItemOfAnonymousTypedDictWithInvalidStringLiteralKey] from typing import TypeVar from mypy_extensions import TypedDict A = TypedDict('A', {'x': str, 'y': int, 'z': str}) B = TypedDict('B', {'x': str, 'z': int}) C = TypedDict('C', {'x': str, 'y': int, 'z': int}) T = TypeVar('T') def join(x: T, y: T) -> T: return x ab = join(A(x='', y=1, z=''), B(x='', z=1)) ac = join(A(x='', y=1, z=''), C(x='', y=0, z=1)) ab['y'] # E: 'y' is not a valid TypedDict key; expected one of ('x') ac['a'] # E: 'a' is not a valid TypedDict key; expected one of ('x', 'y') [builtins fixtures/dict.pyi] [case testCannotGetItemOfTypedDictWithNonLiteralKey] from mypy_extensions import TypedDict from typing import Union TaggedPoint = TypedDict('TaggedPoint', {'type': str, 'x': int, 'y': int}) p = TaggedPoint(type='2d', x=42, y=1337) def get_coordinate(p: TaggedPoint, key: str) -> Union[str, int]: return p[key] # E: TypedDict key must be a string literal; expected one of ('type', 'x', 'y') [builtins fixtures/dict.pyi] -- Special Method: __setitem__ [case testCanSetItemOfTypedDictWithValidStringLiteralKeyAndCompatibleValueType] from mypy_extensions import TypedDict TaggedPoint = TypedDict('TaggedPoint', {'type': str, 'x': int, 'y': int}) p = TaggedPoint(type='2d', x=42, y=1337) p['type'] = 'two_d' p['x'] = 1 [builtins fixtures/dict.pyi] [case testCannotSetItemOfTypedDictWithIncompatibleValueType] from mypy_extensions import TypedDict TaggedPoint = TypedDict('TaggedPoint', {'type': str, 'x': int, 'y': int}) p = TaggedPoint(type='2d', x=42, y=1337) p['x'] = 'y' # E: Argument 2 has incompatible type "str"; expected "int" [builtins fixtures/dict.pyi] [case testCannotSetItemOfTypedDictWithInvalidStringLiteralKey] from mypy_extensions import TypedDict TaggedPoint = TypedDict('TaggedPoint', {'type': str, 'x': int, 'y': int}) p = TaggedPoint(type='2d', x=42, y=1337) p['z'] = 1 # E: TypedDict "TaggedPoint" has no key 'z' [builtins fixtures/dict.pyi] [case testCannotSetItemOfTypedDictWithNonLiteralKey] from mypy_extensions import TypedDict from typing import Union TaggedPoint = TypedDict('TaggedPoint', {'type': str, 'x': int, 'y': int}) p = TaggedPoint(type='2d', x=42, y=1337) def set_coordinate(p: TaggedPoint, key: str, value: int) -> None: p[key] = value # E: TypedDict key must be a string literal; expected one of ('type', 'x', 'y') [builtins fixtures/dict.pyi] -- isinstance [case testTypedDictAndInstance] from mypy_extensions import TypedDict D = TypedDict('D', {'x': int}) d: object if isinstance(d, D): # E: Cannot use isinstance() with a TypedDict type reveal_type(d) # E: Revealed type is '__main__.D' [builtins fixtures/isinstancelist.pyi] -- Scoping [case testTypedDictInClassNamespace] # https://github.com/python/mypy/pull/2553#issuecomment-266474341 from mypy_extensions import TypedDict class C: def f(self): A = TypedDict('A', {'x': int}) def g(self): A = TypedDict('A', {'y': int}) C.A # E: "Type[C]" has no attribute "A" [builtins fixtures/dict.pyi] [case testTypedDictInFunction] from mypy_extensions import TypedDict def f() -> None: A = TypedDict('A', {'x': int}) A # E: Name 'A' is not defined [builtins fixtures/dict.pyi] -- Union simplification / proper subtype checks [case testTypedDictUnionSimplification] from typing import TypeVar, Union, Any, cast from mypy_extensions import TypedDict T = TypeVar('T') S = TypeVar('S') def u(x: T, y: S) -> Union[S, T]: pass C = TypedDict('C', {'a': int}) D = TypedDict('D', {'a': int, 'b': int}) E = TypedDict('E', {'a': str}) F = TypedDict('F', {'x': int}) G = TypedDict('G', {'a': Any}) c = C(a=1) d = D(a=1, b=1) e = E(a='') f = F(x=1) g = G(a=cast(Any, 1)) # Work around #2610 reveal_type(u(d, d)) # E: Revealed type is 'TypedDict('__main__.D', {'a': builtins.int, 'b': builtins.int})' reveal_type(u(c, d)) # E: Revealed type is 'TypedDict('__main__.C', {'a': builtins.int})' reveal_type(u(d, c)) # E: Revealed type is 'TypedDict('__main__.C', {'a': builtins.int})' reveal_type(u(c, e)) # E: Revealed type is 'Union[TypedDict('__main__.E', {'a': builtins.str}), TypedDict('__main__.C', {'a': builtins.int})]' reveal_type(u(e, c)) # E: Revealed type is 'Union[TypedDict('__main__.C', {'a': builtins.int}), TypedDict('__main__.E', {'a': builtins.str})]' reveal_type(u(c, f)) # E: Revealed type is 'Union[TypedDict('__main__.F', {'x': builtins.int}), TypedDict('__main__.C', {'a': builtins.int})]' reveal_type(u(f, c)) # E: Revealed type is 'Union[TypedDict('__main__.C', {'a': builtins.int}), TypedDict('__main__.F', {'x': builtins.int})]' reveal_type(u(c, g)) # E: Revealed type is 'Union[TypedDict('__main__.G', {'a': Any}), TypedDict('__main__.C', {'a': builtins.int})]' reveal_type(u(g, c)) # E: Revealed type is 'Union[TypedDict('__main__.C', {'a': builtins.int}), TypedDict('__main__.G', {'a': Any})]' [builtins fixtures/dict.pyi] [case testTypedDictUnionSimplification2] from typing import TypeVar, Union, Mapping, Any from mypy_extensions import TypedDict T = TypeVar('T') S = TypeVar('S') def u(x: T, y: S) -> Union[S, T]: pass C = TypedDict('C', {'a': int, 'b': int}) c = C(a=1, b=1) m_s_i: Mapping[str, int] m_s_s: Mapping[str, str] m_i_i: Mapping[int, int] m_s_a: Mapping[str, Any] reveal_type(u(c, m_s_i)) # E: Revealed type is 'typing.Mapping*[builtins.str, builtins.int]' reveal_type(u(m_s_i, c)) # E: Revealed type is 'typing.Mapping*[builtins.str, builtins.int]' reveal_type(u(c, m_s_s)) # E: Revealed type is 'Union[typing.Mapping*[builtins.str, builtins.str], TypedDict('__main__.C', {'a': builtins.int, 'b': builtins.int})]' reveal_type(u(c, m_i_i)) # E: Revealed type is 'Union[typing.Mapping*[builtins.int, builtins.int], TypedDict('__main__.C', {'a': builtins.int, 'b': builtins.int})]' reveal_type(u(c, m_s_a)) # E: Revealed type is 'Union[typing.Mapping*[builtins.str, Any], TypedDict('__main__.C', {'a': builtins.int, 'b': builtins.int})]' [builtins fixtures/dict.pyi] -- Use dict literals [case testTypedDictDictLiterals] from mypy_extensions import TypedDict Point = TypedDict('Point', {'x': int, 'y': int}) def f(p: Point) -> None: p = {'x': 2, 'y': 3} p = {'x': 2} # E: Key 'y' missing for TypedDict "Point" p = dict(x=2, y=3) f({'x': 1, 'y': 3}) f({'x': 1, 'y': 'z'}) # E: Incompatible types (expression has type "str", TypedDict item "y" has type "int") f(dict(x=1, y=3)) f(dict(x=1, y=3, z=4)) # E: Extra key 'z' for TypedDict "Point" f(dict(x=1, y=3, z=4, a=5)) # E: Extra keys ('z', 'a') for TypedDict "Point" [builtins fixtures/dict.pyi] [case testTypedDictExplicitTypes] from mypy_extensions import TypedDict Point = TypedDict('Point', {'x': int, 'y': int}) p1a: Point = {'x': 'hi'} # E: Key 'y' missing for TypedDict "Point" p1b: Point = {} # E: Keys ('x', 'y') missing for TypedDict "Point" p2: Point p2 = dict(x='bye') # E: Key 'y' missing for TypedDict "Point" p3 = Point(x=1, y=2) p3 = {'x': 'hi'} # E: Key 'y' missing for TypedDict "Point" p4: Point = {'x': 1, 'y': 2} [builtins fixtures/dict.pyi] [case testCannotCreateAnonymousTypedDictInstanceUsingDictLiteralWithExtraItems] from mypy_extensions import TypedDict from typing import TypeVar A = TypedDict('A', {'x': int, 'y': int}) B = TypedDict('B', {'x': int, 'y': str}) T = TypeVar('T') def join(x: T, y: T) -> T: return x ab = join(A(x=1, y=1), B(x=1, y='')) ab = {'x': 1, 'z': 1} # E: Expected TypedDict key 'x' but found keys ('x', 'z') [builtins fixtures/dict.pyi] [case testCannotCreateAnonymousTypedDictInstanceUsingDictLiteralWithMissingItems] from mypy_extensions import TypedDict from typing import TypeVar A = TypedDict('A', {'x': int, 'y': int, 'z': int}) B = TypedDict('B', {'x': int, 'y': int, 'z': str}) T = TypeVar('T') def join(x: T, y: T) -> T: return x ab = join(A(x=1, y=1, z=1), B(x=1, y=1, z='')) ab = {} # E: Expected TypedDict keys ('x', 'y') but found no keys [builtins fixtures/dict.pyi] -- Other TypedDict methods [case testTypedDictGetMethod] # flags: --strict-optional from mypy_extensions import TypedDict class A: pass D = TypedDict('D', {'x': int, 'y': str}) d: D reveal_type(d.get('x')) # E: Revealed type is 'Union[builtins.int, builtins.None]' reveal_type(d.get('y')) # E: Revealed type is 'Union[builtins.str, builtins.None]' reveal_type(d.get('x', A())) # E: Revealed type is 'Union[builtins.int, __main__.A]' reveal_type(d.get('x', 1)) # E: Revealed type is 'builtins.int' reveal_type(d.get('y', None)) # E: Revealed type is 'Union[builtins.str, builtins.None]' [builtins fixtures/dict.pyi] [typing fixtures/typing-full.pyi] [case testTypedDictGetMethodTypeContext] # flags: --strict-optional from typing import List from mypy_extensions import TypedDict class A: pass D = TypedDict('D', {'x': List[int], 'y': int}) d: D reveal_type(d.get('x', [])) # E: Revealed type is 'builtins.list[builtins.int]' d.get('x', ['x']) # E: List item 0 has incompatible type "str"; expected "int" a = [''] reveal_type(d.get('x', a)) # E: Revealed type is 'Union[builtins.list[builtins.int], builtins.list[builtins.str*]]' [builtins fixtures/dict.pyi] [typing fixtures/typing-full.pyi] [case testTypedDictGetMethodInvalidArgs] from mypy_extensions import TypedDict D = TypedDict('D', {'x': int, 'y': str}) d: D d.get() # E: No overload variant of "get" of "Mapping" matches argument types [] d.get('x', 1, 2) # E: No overload variant of "get" of "Mapping" matches argument types [builtins.str, builtins.int, builtins.int] x = d.get('z') # E: TypedDict "D" has no key 'z' reveal_type(x) # E: Revealed type is 'Any' s = '' y = d.get(s) reveal_type(y) # E: Revealed type is 'builtins.object*' [builtins fixtures/dict.pyi] [typing fixtures/typing-full.pyi] [case testTypedDictMissingMethod] from mypy_extensions import TypedDict D = TypedDict('D', {'x': int, 'y': str}) d: D d.bad(1) # E: "D" has no attribute "bad" [builtins fixtures/dict.pyi] [case testTypedDictChainedGetMethodWithDictFallback] from mypy_extensions import TypedDict D = TypedDict('D', {'x': int, 'y': str}) E = TypedDict('E', {'d': D}) p = E(d=D(x=0, y='')) reveal_type(p.get('d', {'x': 1, 'y': ''})) # E: Revealed type is 'TypedDict('__main__.D', {'x': builtins.int, 'y': builtins.str})' [builtins fixtures/dict.pyi] [typing fixtures/typing-full.pyi] [case testTypedDictGetDefaultParameterStillTypeChecked] from mypy_extensions import TypedDict TaggedPoint = TypedDict('TaggedPoint', {'type': str, 'x': int, 'y': int}) p = TaggedPoint(type='2d', x=42, y=1337) p.get('x', 1 + 'y') # E: Unsupported operand types for + ("int" and "str") [builtins fixtures/dict.pyi] [typing fixtures/typing-full.pyi] [case testTypedDictChainedGetWithEmptyDictDefault] # flags: --strict-optional from mypy_extensions import TypedDict C = TypedDict('C', {'a': int}) D = TypedDict('D', {'x': C, 'y': str}) d: D reveal_type(d.get('x', {})) \ # E: Revealed type is 'TypedDict('__main__.C', {'a'?: builtins.int})' reveal_type(d.get('x', None)) \ # E: Revealed type is 'Union[TypedDict('__main__.C', {'a': builtins.int}), builtins.None]' reveal_type(d.get('x', {}).get('a')) # E: Revealed type is 'Union[builtins.int, builtins.None]' reveal_type(d.get('x', {})['a']) # E: Revealed type is 'builtins.int' [builtins fixtures/dict.pyi] [typing fixtures/typing-full.pyi] -- Totality (the "total" keyword argument) [case testTypedDictWithTotalTrue] from mypy_extensions import TypedDict D = TypedDict('D', {'x': int, 'y': str}, total=True) d: D reveal_type(d) \ # E: Revealed type is 'TypedDict('__main__.D', {'x': builtins.int, 'y': builtins.str})' [builtins fixtures/dict.pyi] [case testTypedDictWithInvalidTotalArgument] from mypy_extensions import TypedDict A = TypedDict('A', {'x': int}, total=0) # E: TypedDict() "total" argument must be True or False B = TypedDict('B', {'x': int}, total=bool) # E: TypedDict() "total" argument must be True or False C = TypedDict('C', {'x': int}, x=False) # E: Unexpected keyword argument "x" for "TypedDict" D = TypedDict('D', {'x': int}, False) # E: Unexpected arguments to TypedDict() [builtins fixtures/dict.pyi] [case testTypedDictWithTotalFalse] from mypy_extensions import TypedDict D = TypedDict('D', {'x': int, 'y': str}, total=False) def f(d: D) -> None: reveal_type(d) # E: Revealed type is 'TypedDict('__main__.D', {'x'?: builtins.int, 'y'?: builtins.str})' f({}) f({'x': 1}) f({'y': ''}) f({'x': 1, 'y': ''}) f({'x': 1, 'z': ''}) # E: Extra key 'z' for TypedDict "D" f({'x': ''}) # E: Incompatible types (expression has type "str", TypedDict item "x" has type "int") [builtins fixtures/dict.pyi] [case testTypedDictConstructorWithTotalFalse] from mypy_extensions import TypedDict D = TypedDict('D', {'x': int, 'y': str}, total=False) def f(d: D) -> None: pass reveal_type(D()) # E: Revealed type is 'TypedDict('__main__.D', {'x'?: builtins.int, 'y'?: builtins.str})' reveal_type(D(x=1)) # E: Revealed type is 'TypedDict('__main__.D', {'x'?: builtins.int, 'y'?: builtins.str})' f(D(y='')) f(D(x=1, y='')) f(D(x=1, z='')) # E: Extra key 'z' for TypedDict "D" f(D(x='')) # E: Incompatible types (expression has type "str", TypedDict item "x" has type "int") [builtins fixtures/dict.pyi] [case testTypedDictIndexingWithNonRequiredKey] from mypy_extensions import TypedDict D = TypedDict('D', {'x': int, 'y': str}, total=False) d: D reveal_type(d['x']) # E: Revealed type is 'builtins.int' reveal_type(d['y']) # E: Revealed type is 'builtins.str' reveal_type(d.get('x')) # E: Revealed type is 'builtins.int' reveal_type(d.get('y')) # E: Revealed type is 'builtins.str' [builtins fixtures/dict.pyi] [typing fixtures/typing-full.pyi] [case testTypedDictSubtypingWithTotalFalse] from mypy_extensions import TypedDict A = TypedDict('A', {'x': int}) B = TypedDict('B', {'x': int}, total=False) C = TypedDict('C', {'x': int, 'y': str}, total=False) def fa(a: A) -> None: pass def fb(b: B) -> None: pass def fc(c: C) -> None: pass a: A b: B c: C fb(b) fc(c) fb(c) fb(a) # E: Argument 1 to "fb" has incompatible type "A"; expected "B" fa(b) # E: Argument 1 to "fa" has incompatible type "B"; expected "A" fc(b) # E: Argument 1 to "fc" has incompatible type "B"; expected "C" [builtins fixtures/dict.pyi] [case testTypedDictJoinWithTotalFalse] from typing import TypeVar from mypy_extensions import TypedDict A = TypedDict('A', {'x': int}) B = TypedDict('B', {'x': int}, total=False) C = TypedDict('C', {'x': int, 'y': str}, total=False) T = TypeVar('T') def j(x: T, y: T) -> T: return x a: A b: B c: C reveal_type(j(a, b)) \ # E: Revealed type is 'TypedDict({}, fallback=typing.Mapping[builtins.str, ])' reveal_type(j(b, b)) \ # E: Revealed type is 'TypedDict({'x'?: builtins.int}, fallback=typing.Mapping[builtins.str, builtins.int])' reveal_type(j(c, c)) \ # E: Revealed type is 'TypedDict({'x'?: builtins.int, 'y'?: builtins.str}, fallback=typing.Mapping[builtins.str, builtins.object])' reveal_type(j(b, c)) \ # E: Revealed type is 'TypedDict({'x'?: builtins.int}, fallback=typing.Mapping[builtins.str, builtins.int])' reveal_type(j(c, b)) \ # E: Revealed type is 'TypedDict({'x'?: builtins.int}, fallback=typing.Mapping[builtins.str, builtins.int])' [builtins fixtures/dict.pyi] [case testTypedDictClassWithTotalArgument] from mypy_extensions import TypedDict class D(TypedDict, total=False): x: int y: str d: D reveal_type(d) # E: Revealed type is 'TypedDict('__main__.D', {'x'?: builtins.int, 'y'?: builtins.str})' [builtins fixtures/dict.pyi] [case testTypedDictClassWithInvalidTotalArgument] from mypy_extensions import TypedDict class D(TypedDict, total=1): # E: Value of "total" must be True or False x: int class E(TypedDict, total=bool): # E: Value of "total" must be True or False x: int class F(TypedDict, total=xyz): # E: Value of "total" must be True or False \ # E: Name 'xyz' is not defined x: int [builtins fixtures/dict.pyi] [case testTypedDictClassInheritanceWithTotalArgument] from mypy_extensions import TypedDict class A(TypedDict): x: int class B(TypedDict, A, total=False): y: int class C(TypedDict, B, total=True): z: str c: C reveal_type(c) # E: Revealed type is 'TypedDict('__main__.C', {'x': builtins.int, 'y'?: builtins.int, 'z': builtins.str})' [builtins fixtures/dict.pyi] [case testNonTotalTypedDictInErrorMessages] from mypy_extensions import TypedDict A = TypedDict('A', {'x': int, 'y': str}, total=False) B = TypedDict('B', {'x': int, 'z': str, 'a': int}, total=False) C = TypedDict('C', {'x': int, 'z': str, 'a': str}, total=False) a: A b: B c: C def f(a: A) -> None: pass l = [a, b] # Join generates an anonymous TypedDict f(l) # E: Argument 1 to "f" has incompatible type "List[TypedDict({'x'?: int})]"; expected "A" ll = [b, c] f(ll) # E: Argument 1 to "f" has incompatible type "List[TypedDict({'x'?: int, 'z'?: str})]"; expected "A" [builtins fixtures/dict.pyi] -- Create Type (Errors) [case testCannotCreateTypedDictTypeWithTooFewArguments] from mypy_extensions import TypedDict Point = TypedDict('Point') # E: Too few arguments for TypedDict() [builtins fixtures/dict.pyi] [case testCannotCreateTypedDictTypeWithTooManyArguments] from mypy_extensions import TypedDict Point = TypedDict('Point', {'x': int, 'y': int}, dict) # E: Unexpected arguments to TypedDict() [builtins fixtures/dict.pyi] [case testCannotCreateTypedDictTypeWithInvalidName] from mypy_extensions import TypedDict Point = TypedDict(dict, {'x': int, 'y': int}) # E: TypedDict() expects a string literal as the first argument [builtins fixtures/dict.pyi] [case testCannotCreateTypedDictTypeWithInvalidItems] from mypy_extensions import TypedDict Point = TypedDict('Point', {'x'}) # E: TypedDict() expects a dictionary literal as the second argument [builtins fixtures/dict.pyi] -- NOTE: The following code works at runtime but is not yet supported by mypy. -- Keyword arguments may potentially be supported in the future. [case testCannotCreateTypedDictTypeWithNonpositionalArgs] from mypy_extensions import TypedDict Point = TypedDict(typename='Point', fields={'x': int, 'y': int}) # E: Unexpected arguments to TypedDict() [builtins fixtures/dict.pyi] [case testCannotCreateTypedDictTypeWithInvalidItemName] from mypy_extensions import TypedDict Point = TypedDict('Point', {int: int, int: int}) # E: Invalid TypedDict() field name [builtins fixtures/dict.pyi] [case testCannotCreateTypedDictTypeWithInvalidItemType] from mypy_extensions import TypedDict Point = TypedDict('Point', {'x': 1, 'y': 1}) # E: Invalid field type [builtins fixtures/dict.pyi] [case testCannotCreateTypedDictTypeWithInvalidName] from mypy_extensions import TypedDict X = TypedDict('Y', {'x': int}) # E: First argument 'Y' to TypedDict() does not match variable name 'X' [builtins fixtures/dict.pyi] -- Overloading [case testTypedDictOverloading] from typing import overload, Iterable from mypy_extensions import TypedDict A = TypedDict('A', {'x': int}) @overload def f(x: Iterable[str]) -> str: ... @overload def f(x: int) -> int: ... def f(x): pass a: A reveal_type(f(a)) # E: Revealed type is 'builtins.str' reveal_type(f(1)) # E: Revealed type is 'builtins.int' [builtins fixtures/dict.pyi] [typing fixtures/typing-full.pyi] [case testTypedDictOverloading2] from typing import overload, Iterable from mypy_extensions import TypedDict A = TypedDict('A', {'x': int}) @overload def f(x: Iterable[int]) -> None: ... @overload def f(x: int) -> None: ... def f(x): pass a: A f(a) [builtins fixtures/dict.pyi] [typing fixtures/typing-full.pyi] [out] main:13: error: Argument 1 to "f" has incompatible type "A"; expected "Iterable[int]" main:13: note: Following member(s) of "A" have conflicts: main:13: note: Expected: main:13: note: def __iter__(self) -> Iterator[int] main:13: note: Got: main:13: note: def __iter__(self) -> Iterator[str] [case testTypedDictOverloading3] from typing import overload from mypy_extensions import TypedDict A = TypedDict('A', {'x': int}) @overload def f(x: str) -> None: ... @overload def f(x: int) -> None: ... def f(x): pass a: A f(a) # E: No overload variant of "f" matches argument types [TypedDict('__main__.A', {'x': builtins.int})] [builtins fixtures/dict.pyi] [typing fixtures/typing-full.pyi] [case testTypedDictOverloading4] from typing import overload from mypy_extensions import TypedDict A = TypedDict('A', {'x': int}) B = TypedDict('B', {'x': str}) @overload def f(x: A) -> int: ... @overload def f(x: int) -> str: ... def f(x): pass a: A b: B reveal_type(f(a)) # E: Revealed type is 'builtins.int' reveal_type(f(1)) # E: Revealed type is 'builtins.str' f(b) # E: Argument 1 to "f" has incompatible type "B"; expected "A" [builtins fixtures/dict.pyi] [typing fixtures/typing-full.pyi] [case testTypedDictOverloading5] from typing import overload from mypy_extensions import TypedDict A = TypedDict('A', {'x': int}) B = TypedDict('B', {'y': str}) C = TypedDict('C', {'y': int}) @overload def f(x: A) -> None: ... @overload def f(x: B) -> None: ... def f(x): pass a: A b: B c: C f(a) f(b) f(c) # E: Argument 1 to "f" has incompatible type "C"; expected "A" [builtins fixtures/dict.pyi] [typing fixtures/typing-full.pyi] [case testTypedDictOverloading6] from typing import overload from mypy_extensions import TypedDict A = TypedDict('A', {'x': int}) B = TypedDict('B', {'y': str}) @overload def f(x: A) -> int: ... # E: Overloaded function signatures 1 and 2 overlap with incompatible return types @overload def f(x: B) -> str: ... def f(x): pass a: A b: B reveal_type(f(a)) # E: Revealed type is 'Any' reveal_type(f(b)) # E: Revealed type is 'Any' [builtins fixtures/dict.pyi] [typing fixtures/typing-full.pyi] -- Special cases [case testForwardReferenceInTypedDict] from typing import Mapping from mypy_extensions import TypedDict X = TypedDict('X', {'b': 'B', 'c': 'C'}) class B: pass class C(B): pass x: X reveal_type(x) # E: Revealed type is 'TypedDict('__main__.X', {'b': __main__.B, 'c': __main__.C})' m1: Mapping[str, B] = x m2: Mapping[str, C] = x # E: Incompatible types in assignment (expression has type "X", variable has type "Mapping[str, C]") [builtins fixtures/dict.pyi] [case testForwardReferenceInClassTypedDict] from typing import Mapping from mypy_extensions import TypedDict class X(TypedDict): b: 'B' c: 'C' class B: pass class C(B): pass x: X reveal_type(x) # E: Revealed type is 'TypedDict('__main__.X', {'b': __main__.B, 'c': __main__.C})' m1: Mapping[str, B] = x m2: Mapping[str, C] = x # E: Incompatible types in assignment (expression has type "X", variable has type "Mapping[str, C]") [builtins fixtures/dict.pyi] [case testForwardReferenceToTypedDictInTypedDict] from typing import Mapping from mypy_extensions import TypedDict X = TypedDict('X', {'a': 'A'}) A = TypedDict('A', {'b': int}) x: X reveal_type(x) # E: Revealed type is 'TypedDict('__main__.X', {'a': TypedDict('__main__.A', {'b': builtins.int})})' reveal_type(x['a']['b']) # E: Revealed type is 'builtins.int' [builtins fixtures/dict.pyi] [case testSelfRecursiveTypedDictInheriting] from mypy_extensions import TypedDict class MovieBase(TypedDict): name: str year: int class Movie(MovieBase): # type: ignore # warning about recursive not fully supported director: 'Movie' m: Movie reveal_type(m['director']['name']) # E: Revealed type is 'builtins.str' [builtins fixtures/dict.pyi] [out] [case testTypedDictForwardAsUpperBound] from typing import TypeVar, Generic from mypy_extensions import TypedDict T = TypeVar('T', bound='M') class G(Generic[T]): x: T yb: G[int] # E: Type argument "builtins.int" of "G" must be a subtype of "TypedDict({'x': builtins.int}, fallback=typing.Mapping[builtins.str, builtins.object])" yg: G[M] z: int = G[M]().x['x'] class M(TypedDict): x: int [builtins fixtures/dict.pyi] [out] [case testTypedDictWithImportCycleForward] import a [file a.py] from mypy_extensions import TypedDict from b import f N = TypedDict('N', {'a': str}) [file b.py] import a def f(x: a.N) -> None: reveal_type(x) reveal_type(x['a']) [builtins fixtures/dict.pyi] [out] tmp/b.py:4: error: Revealed type is 'TypedDict('a.N', {'a': builtins.str})' tmp/b.py:5: error: Revealed type is 'builtins.str' mypy-0.560/test-data/unit/check-typevar-values.test0000644€tŠÔÚ€2›s®0000004050013215007205026476 0ustar jukkaDROPBOX\Domain Users00000000000000-- Test cases for type variables with values restriction. [case testCallGenericFunctionWithTypeVarValueRestriction] from typing import TypeVar T = TypeVar('T', int, str) def f(x: T) -> None: pass f(1) f('x') f(object()) # E: Value of type variable "T" of "f" cannot be "object" [case testCallGenericFunctionWithTypeVarValueRestrictionUsingContext] from typing import TypeVar, List T = TypeVar('T', int, str) def f(x: T) -> List[T]: pass i = [1] s = ['x'] o = [object()] i = f(1) s = f('') o = f(1) # E: Value of type variable "T" of "f" cannot be "object" [builtins fixtures/list.pyi] [case testCallGenericFunctionWithTypeVarValueRestrictionAndAnyArgs] from typing import TypeVar, Any, cast T = TypeVar('T', int, str) def f(x: T) -> None: pass f(cast(Any, object())) [out] [case testCallGenericFunctionWithTypeVarValueRestrictionInDynamicFunc] from typing import TypeVar, Any T = TypeVar('T', int, str) def f(x: T) -> None: pass def g(): f(object()) [out] [case testCallGenericFunctionWithTypeVarValueRestrictionUsingSubtype] from typing import TypeVar T = TypeVar('T', int, str) def f(x: T) -> None: pass class S(str): pass f(S()) [out] [case testCheckGenericFunctionBodyWithTypeVarValues] from typing import TypeVar class A: def f(self, x: int) -> A: return self class B: def f(self, x: int) -> B: return self AB = TypeVar('AB', A, B) def f(x: AB) -> AB: x = x.f(1) return x.f(1) [case testCheckGenericFunctionBodyWithTypeVarValues2] from typing import TypeVar class A: def f(self) -> A: return A() def g(self) -> B: return B() class B: def f(self) -> A: return A() def g(self) -> B: return B() AB = TypeVar('AB', A, B) def f(x: AB) -> AB: return x.f() # Error def g(x: AB) -> AB: return x.g() # Error [out] main:10: error: Incompatible return value type (got "A", expected "B") main:12: error: Incompatible return value type (got "B", expected "A") [case testTypeInferenceAndTypeVarValues] from typing import TypeVar class A: def f(self) -> A: return self def g(self) -> B: return B() class B: def f(self) -> B: return self def g(self) -> B: return B() AB = TypeVar('AB', A, B) def f(x: AB) -> AB: y = x if y: return y.f() else: return y.g() # E: Incompatible return value type (got "B", expected "A") [out] [case testTypeDeclaredBasedOnTypeVarWithValues] from typing import TypeVar T = TypeVar('T', int, str) def f(x: T) -> T: a = None # type: T b = None # type: T a = x b = x a = '' # E: Incompatible types in assignment (expression has type "str", variable has type "int") b = 1 # E: Incompatible types in assignment (expression has type "int", variable has type "str") return x [out] [case testIsinstanceAndTypeVarValues] from typing import TypeVar T = TypeVar('T', int, str) def f(x: T) -> T: if isinstance(x, int): return 2 return x def g(x: T) -> T: if isinstance(x, str): return '' return x def h(x: T) -> T: if isinstance(x, int): return '' # E: Incompatible return value type (got "str", expected "int") return x [builtins fixtures/isinstance.pyi] [out] [case testIsinstanceAndTypeVarValues2] from typing import TypeVar T = TypeVar('T', int, str) def f(x: T) -> T: if isinstance(x, int): return 2 else: return '' def g(x: T) -> T: if isinstance(x, int): return '' # E: Incompatible return value type (got "str", expected "int") else: return 2 # E: Incompatible return value type (got "int", expected "str") return x [builtins fixtures/isinstance.pyi] [out] [case testIsinstanceAndTypeVarValues3] from typing import TypeVar T = TypeVar('T', int, str) def f(x: T) -> T: if isinstance(x, int): y = 1 else: y = '' return y [builtins fixtures/isinstance.pyi] [case testIsinstanceAndTypeVarValues4] from typing import TypeVar T = TypeVar('T', int, str) def f(x: T) -> T: if isinstance(x, int): y = 1 else: y = object() return y # E: Incompatible return value type (got "object", expected "str") [builtins fixtures/isinstance.pyi] [out] [case testIsinstanceAndTypeVarValues5] from typing import TypeVar T = TypeVar('T', int, str) def f(x: T) -> T: if isinstance(x, int): y = object() else: y = '' return y # E: Incompatible return value type (got "object", expected "int") [builtins fixtures/isinstance.pyi] [out] [case testIsinstanceWithUserDefinedTypeAndTypeVarValues] from typing import TypeVar class A: pass class B: pass T = TypeVar('T', A, B) def f(x: T) -> None: y = x if isinstance(x, A): # This is only checked when x is A, since A and B are not considered overlapping. x = y x = A() else: x = B() x = y x.foo() # E: "B" has no attribute "foo" S = TypeVar('S', int, str) def g(x: S) -> None: y = x if isinstance(x, int): x = y [builtins fixtures/isinstance.pyi] [out] [case testIsinstanceWithUserDefinedTypeAndTypeVarValues2] from typing import TypeVar class S(str): pass T = TypeVar('T', S, int) def f(x: T) -> None: y = x if isinstance(x, S): # This is checked only when type of x is str. x = y x = S() x = 1 # E: Incompatible types in assignment (expression has type "int", variable has type "S") else: x = y x = 1 x = S() # E: Incompatible types in assignment (expression has type "S", variable has type "int") [builtins fixtures/isinstance.pyi] [out] [case testTypeVarValuesAndNestedCalls] from typing import TypeVar T = TypeVar('T', int, str) def f(m: T) -> int: pass def h(x: int) -> int: pass def g(a: T) -> None: h(f(a)) [out] [case testGenericTypeWithTypevarValues] from typing import TypeVar, Generic, Any X = TypeVar('X', int, str) class A(Generic[X]): pass a = None # type: A[int] b = None # type: A[str] d = None # type: A[object] # E: Value of type variable "X" of "A" cannot be "object" c = None # type: A[Any] [case testConstructGenericTypeWithTypevarValuesAndTypeInference] from typing import TypeVar, Generic, Any, cast X = TypeVar('X', int, str) class A(Generic[X]): def __init__(self, x: X) -> None: pass A(1) A('x') A(cast(Any, object())) A(object()) # E: Value of type variable "X" of "A" cannot be "object" [case testGenericTypeWithTypevarValuesAndTypevarArgument] from typing import TypeVar, Generic class C: pass X = TypeVar('X', int, str) Y = TypeVar('Y', int, C) Z = TypeVar('Z') class D(Generic[X]): def __init__(self, x: X) -> None: pass def f(x: X) -> None: a = None # type: D[X] def g(x: Y) -> None: a = None # type: D[Y] def h(x: Z) -> None: a = None # type: D[Z] [out] main:11: error: Invalid type argument value for "D" main:13: error: Type variable "Z" not valid as type argument value for "D" [case testGenericTypeWithTypevarValuesAndSubtypePromotion] from typing import TypeVar, Generic X = TypeVar('X', int, str) class S(str): pass class C(Generic[X]): def __init__(self, x: X) -> None: pass x = None # type: C[str] y = C(S()) x = y y = x c_int = C(1) # type: C[int] y = c_int # E: Incompatible types in assignment (expression has type "C[int]", variable has type "C[str]") [case testGenericTypeBodyWithTypevarValues] from typing import TypeVar, Generic class A: def f(self, x: int) -> None: pass def g(self, x: int) -> None: pass def h(self, x: str) -> None: pass class B: def f(self, x: int) -> None: pass def g(self, x: str) -> None: pass def h(self, x: int) -> None: pass X = TypeVar('X', A, B) class C(Generic[X]): def f(self, x: X) -> None: x.f(1) x.g(1) # E: Argument 1 to "g" of "B" has incompatible type "int"; expected "str" x.h(1) # E: Argument 1 to "h" of "A" has incompatible type "int"; expected "str" [out] [case testAttributeInGenericTypeWithTypevarValues1] from typing import TypeVar, Generic X = TypeVar('X', int, str) class C(Generic[X]): x = None # type: X def f(self, x: X) -> None: self.x = x self.x = 1 # E: Incompatible types in assignment (expression has type "int", variable has type "str") [out] [case testAttributeInGenericTypeWithTypevarValues2] from typing import TypeVar, Generic X = TypeVar('X', int, str) class C(Generic[X]): x = None # type: X cn = C() # type: C[int] cn.x = 1 cn.x = '' # E: Incompatible types in assignment (expression has type "str", variable has type "int") cs = C() # type: C[str] cs.x = '' cs.x = 1 # E: Incompatible types in assignment (expression has type "int", variable has type "str") [case testAttributeInGenericTypeWithTypevarValues3] from typing import TypeVar, Generic X = TypeVar('X', int, str) class C(Generic[X]): def f(self, x: X) -> None: self.x = x # type: X ci: C[int] cs: C[str] reveal_type(ci.x) # E: Revealed type is 'builtins.int*' reveal_type(cs.x) # E: Revealed type is 'builtins.str*' [case testAttributeInGenericTypeWithTypevarValuesUsingInference1] from typing import TypeVar, Generic X = TypeVar('X', int, str) class C(Generic[X]): def f(self, x: X) -> None: self.x = x # E: Need type annotation for variable ci: C[int] cs: C[str] reveal_type(ci.x) # E: Revealed type is 'Any' reveal_type(cs.x) # E: Revealed type is 'Any' [case testAttributeInGenericTypeWithTypevarValuesUsingInference2] from typing import TypeVar, Generic X = TypeVar('X', int, str) class C(Generic[X]): def f(self, x: X) -> None: self.x = 1 reveal_type(self.x) # E: Revealed type is 'builtins.int' ci: C[int] cs: C[str] reveal_type(ci.x) # E: Revealed type is 'builtins.int' reveal_type(cs.x) # E: Revealed type is 'builtins.int' [case testAttributeInGenericTypeWithTypevarValuesUsingInference3] from typing import TypeVar, Generic X = TypeVar('X', int, str) class C(Generic[X]): x: X def f(self) -> None: self.y = self.x # E: Need type annotation for variable ci: C[int] cs: C[str] reveal_type(ci.y) # E: Revealed type is 'Any' reveal_type(cs.y) # E: Revealed type is 'Any' [case testInferredAttributeInGenericClassBodyWithTypevarValues] from typing import TypeVar, Generic X = TypeVar('X', int, str) class C(Generic[X]): x = 1 C.x = 1 C.x = '' # E: Incompatible types in assignment (expression has type "str", variable has type "int") [case testMultipleClassTypevarsWithValues1] from typing import TypeVar, Generic class A: def f(self, x: int) -> None: pass class B: def f(self, x: str) -> None: pass X = TypeVar('X', A, B) Y = TypeVar('Y', int, str) class C(Generic[X, Y]): def f(self, x: X, y: Y) -> None: x.f(y) [out] main:10: error: Argument 1 to "f" of "A" has incompatible type "str"; expected "int" main:10: error: Argument 1 to "f" of "B" has incompatible type "int"; expected "str" [case testMultipleClassTypevarsWithValues2] from typing import TypeVar, Generic class A: pass class B: pass X = TypeVar('X', A, B) Y = TypeVar('Y', int, str) class C(Generic[X, Y]): pass a = None # type: C[A, int] b = None # type: C[B, str] c = None # type: C[int, int] # E: Value of type variable "X" of "C" cannot be "int" d = None # type: C[A, A] # E: Value of type variable "Y" of "C" cannot be "A" [case testCallGenericFunctionUsingMultipleTypevarsWithValues] from typing import TypeVar class A: pass class B: pass X = TypeVar('X', A, B) Y = TypeVar('Y', int, str) def f(x: X, y: Y) -> None: pass f(A(), '') f(B(), 1) f(A(), A()) # E: Value of type variable "Y" of "f" cannot be "A" f(1, 1) # E: Value of type variable "X" of "f" cannot be "int" [case testGenericFunctionWithNormalAndRestrictedTypevar] from typing import TypeVar, Generic X = TypeVar('X') Y = TypeVar('Y', int, str) class C(Generic[Y]): def __init__(self, y: Y) -> None: pass def f(x: X, y: Y, z: int) -> None: C(y) C(x) # Error z = x # Error z = y # Error y.foo # Error [out] main:8: error: Value of type variable "Y" of "C" cannot be "X" main:9: error: Incompatible types in assignment (expression has type "X", variable has type "int") main:10: error: Incompatible types in assignment (expression has type "str", variable has type "int") main:11: error: "int" has no attribute "foo" main:11: error: "str" has no attribute "foo" [case testTypeVarWithValueInferredFromObjectReturnTypeContext] from typing import TypeVar T = TypeVar('T', int, str) def c1(x: object) -> None: pass def c2(x: int) -> None: pass def c3(x: str) -> None: pass def g(x: T) -> T: pass c1(g('')) c2(g(1)) c3(g('')) c2(g('')) # E: Argument 1 to "c2" has incompatible type "str"; expected "int" c3(g(1)) # E: Argument 1 to "c3" has incompatible type "int"; expected "str" [case testTypeVarWithValueInferredFromObjectReturnTypeContext2] from typing import TypeVar T = TypeVar('T', int, str) class ss(str): pass def c(x: ss) -> None: pass def g(x: T) -> T: pass c(g('')) c(g(1)) [out] main:6: error: Argument 1 to "c" has incompatible type "str"; expected "ss" main:7: error: Argument 1 to "c" has incompatible type "int"; expected "ss" [case testDefineAttributeInGenericMethodUsingTypeVarWithValues] from typing import TypeVar T = TypeVar('T', int, str) class A: def f(self, x: T) -> None: self.x = x # E: Need type annotation for variable self.y = [x] # E: Need type annotation for variable self.z = 1 reveal_type(A().x) # E: Revealed type is 'Any' reveal_type(A().y) # E: Revealed type is 'Any' reveal_type(A().z) # E: Revealed type is 'builtins.int' [builtins fixtures/list.pyi] -- Special cases -- ------------- [case testTypevarValuesSpecialCase1] from typing import TypeVar, Generic from abc import abstractmethod T = TypeVar('T', int, str) class A(Generic[T]): @abstractmethod def f(self) -> 'A[T]': pass class B(A[str]): @abstractmethod def f(self) -> 'B': pass class C(A[str]): @abstractmethod def f(self) -> int: # E: Return type of "f" incompatible with supertype "A" pass [out] [case testDefaultArgumentValueInGenericClassWithTypevarValues] from typing import TypeVar, Generic T = TypeVar('T', int, str) class C(Generic[T]): def f(self, x: int = None) -> None: pass [case testTypevarValuesWithOverloadedFunctionSpecialCase] from foo import * [file foo.pyi] from typing import TypeVar, overload, Callable T = TypeVar('T', int, str) def f(x: T) -> None: y = m(g, x) x = y y = object() A = TypeVar('A') R = TypeVar('R') def m(f: Callable[[A], R], it: A) -> A: pass @overload def g(x: int) -> int: return x @overload def g(x: str) -> str: return x [out] tmp/foo.pyi:7: error: Incompatible types in assignment (expression has type "object", variable has type "int") tmp/foo.pyi:7: error: Incompatible types in assignment (expression has type "object", variable has type "str") [case testGenericFunctionSubtypingWithTypevarValues] from typing import TypeVar class A: pass T = TypeVar('T', int, str) U = TypeVar('U', str, A, int) def f(x: T) -> T: pass def g(x: U) -> U: pass a = f a = f a = g b = g b = g b = f # E: Incompatible types in assignment (expression has type "Callable[[T], T]", variable has type "Callable[[U], U]") [case testInnerFunctionWithTypevarValues] from typing import TypeVar T = TypeVar('T', int, str) U = TypeVar('U', int, str) def outer(x: T) -> T: def inner(y: T) -> T: return x def inner2(y: U) -> U: return y inner(x) inner(3) # E: Argument 1 to "inner" has incompatible type "int"; expected "str" inner2(x) inner2(3) outer(3) return x [out] [case testInnerFunctionMutualRecursionWithTypevarValues] from typing import TypeVar T = TypeVar('T', int, str) def outer(x: T) -> T: def inner1(y: T) -> T: return inner2(y) def inner2(y: T) -> T: return inner1('a') # E: Argument 1 to "inner1" has incompatible type "str"; expected "int" return inner1(x) [out] [case testClassMemberTypeVarInFunctionBody] from typing import TypeVar, List S = TypeVar('S') class C: T = TypeVar('T', bound=int) def f(self, x: T) -> T: L = List[S] y: L[C.T] = [x] C.T # E: Type variable "C.T" cannot be used as an expression A = C.T # E: Type variable "C.T" cannot be used as an expression return y[0] [builtins fixtures/list.pyi] [case testParameterLessGenericAsRestriction] from typing import Sequence, Iterable, TypeVar S = TypeVar('S', Sequence, Iterable) def my_len(s: S) -> None: pass def crash() -> None: my_len((0,)) [case testReferenceToDecoratedFunctionAndTypeVarValues] from typing import TypeVar, Callable T = TypeVar('T') S = TypeVar('S', int, str) def dec(f: Callable[..., T]) -> Callable[..., T]: ... @dec def g(s: S) -> Callable[[S], None]: ... def f(x: S) -> None: h = g(x) h(x) mypy-0.560/test-data/unit/check-underscores.test0000644€tŠÔÚ€2›s®0000000075013215007205026046 0ustar jukkaDROPBOX\Domain Users00000000000000[case testUnderscoresRequire36] # flags: --python-version 3.5 x = 1000_000 # E: Underscores in numeric literals are only supported in Python 3.6 and greater [out] [case testUnderscoresSyntaxError] # flags: --python-version 3.6 x = 1000_000_ # E: invalid token [out] [case testUnderscoresBasics] # flags: --python-version 3.6 x: int x = 1000_000 x = 0x_FF_FF_FF_FF y: str = 1000_000.000_001 # E: Incompatible types in assignment (expression has type "float", variable has type "str") mypy-0.560/test-data/unit/check-unions.test0000644€tŠÔÚ€2›s®0000007334513215007205025037 0ustar jukkaDROPBOX\Domain Users00000000000000-- Type checking of union types [case testUnion1] from typing import Union def f(x: Union[int, str]) -> None: if isinstance(x, int): y = 1 y = x elif isinstance(x, str): z = 'a' z = x [builtins fixtures/isinstance.pyi] [case testUnion2] from typing import Union def f(x: Union[int, str]) -> None: if isinstance(x, int): y = 1 y = x else: z = 'a' z = x [builtins fixtures/isinstance.pyi] [case testUnion3] from typing import Union def f(x: Union[int, str]) -> None: if isinstance(x, int): y = 1 y = x else: z = 2 z = x # E: Incompatible types in assignment (expression has type "str", variable has type "int") [builtins fixtures/isinstance.pyi] [out] [case testUnionAnyIsInstance] from typing import Any, Union def func(v: Union[int, Any]) -> None: if isinstance(v, int): reveal_type(v) # E: Revealed type is 'builtins.int' else: reveal_type(v) # E: Revealed type is 'Any' [builtins fixtures/isinstance.pyi] [out] [case testUnionAttributeAccess] from typing import Union class A: y = 1 class B: y = 2 class C: pass class D: pass u = None # type: Union[A, C, D] v = None # type: Union[C, D] w = None # type: Union[A, B] x = None # type: Union[A, C] y = None # type: int z = None # type: str y = w.y v.y # E: Item "C" of "Union[C, D]" has no attribute "y" \ # E: Item "D" of "Union[C, D]" has no attribute "y" u.y # E: Item "C" of "Union[A, C, D]" has no attribute "y" \ # E: Item "D" of "Union[A, C, D]" has no attribute "y" z = w.y # E: Incompatible types in assignment (expression has type "int", variable has type "str") w.y = 'a' # E: Incompatible types in assignment (expression has type "str", variable has type "int") y = x.y # E: Item "C" of "Union[A, C]" has no attribute "y" zz = x.y # E: Item "C" of "Union[A, C]" has no attribute "y" z = zz # E: Incompatible types in assignment (expression has type "Union[int, Any]", variable has type "str") [builtins fixtures/isinstance.pyi] [case testUnionMethodCalls] from typing import Union class A: def foo(self) -> int: pass class B: def foo(self) -> int: pass class C: def foo(self) -> str: pass x = None # type: Union[A, B] y = None # type: Union[A, C] i = None # type: int x.foo() y.foo() i = x.foo() i = y.foo() # E: Incompatible types in assignment (expression has type "Union[int, str]", variable has type "int") [builtins fixtures/isinstance.pyi] [case testUnionIndexing] from typing import Union, List x = None # type: Union[List[int], str] x[2] x[2] + 1 # E: Unsupported operand types for + (likely involving Union) [builtins fixtures/isinstancelist.pyi] [case testUnionAsOverloadArg] from foo import * x = 0 x = f(1) x = f('') s = '' s = f(int) s = f(1) # E: Incompatible types in assignment (expression has type "int", variable has type "str") x = f(int) # E: Incompatible types in assignment (expression has type "str", variable has type "int") [file foo.pyi] from typing import Union, overload @overload def f(x: Union[int, str]) -> int: pass @overload def f(x: type) -> str: pass [case testUnionWithNoneItem] from typing import Union def f() -> Union[int, None]: pass x = 1 x = f() [case testOptional] from typing import Optional def f(x: Optional[int]) -> None: pass f(1) f(None) f('') # E: Argument 1 to "f" has incompatible type "str"; expected "Optional[int]" [case testUnionSimplificationGenericFunction] from typing import TypeVar, Union, List T = TypeVar('T') def f(x: List[T]) -> Union[T, int]: pass def g(y: str) -> None: pass a = f([1]) g(a) # E: Argument 1 to "g" has incompatible type "int"; expected "str" [builtins fixtures/list.pyi] [case testUnionSimplificationGenericClass] from typing import TypeVar, Union, Generic T = TypeVar('T') U = TypeVar('U') class C(Generic[T, U]): def f(self, x: str) -> Union[T, U]: pass a = C() # type: C[int, int] b = a.f('a') a.f(b) # E: Argument 1 to "f" of "C" has incompatible type "int"; expected "str" [case testUnionOrderEquivalence] from typing import Union def foo(): pass S = str T = int if foo(): def f(x: Union[int, str]) -> None: pass elif foo(): def f(x: Union[str, int]) -> None: pass elif foo(): def f(x: Union[int, str, int, int, str]) -> None: pass elif foo(): def f(x: Union[int, str, float]) -> None: pass # E: All conditional function variants must have identical signatures elif foo(): def f(x: Union[S, T]) -> None: pass elif foo(): def f(x: Union[str]) -> None: pass # E: All conditional function variants must have identical signatures else: def f(x: Union[Union[int, T], Union[S, T], str]) -> None: pass # Checks bidirectionality of testing. The first definition of g is consistent with # the second, but not vice-versa. if foo(): def g(x: Union[int, str, bytes]) -> None: pass else: def g(x: Union[int, str]) -> None: pass # E: All conditional function variants must have identical signatures [case testUnionSimplificationSpecialCases] from typing import Any, TypeVar, Union class C(Any): pass T = TypeVar('T') S = TypeVar('S') def u(x: T, y: S) -> Union[S, T]: pass a = None # type: Any reveal_type(u(C(), None)) # E: Revealed type is '__main__.C*' reveal_type(u(None, C())) # E: Revealed type is '__main__.C*' reveal_type(u(C(), a)) # E: Revealed type is 'Union[Any, __main__.C*]' reveal_type(u(a, C())) # E: Revealed type is 'Union[__main__.C*, Any]' reveal_type(u(C(), C())) # E: Revealed type is '__main__.C*' reveal_type(u(a, a)) # E: Revealed type is 'Any' [case testUnionSimplificationSpecialCase2] from typing import Any, TypeVar, Union class C(Any): pass T = TypeVar('T') S = TypeVar('S') def u(x: T, y: S) -> Union[S, T]: pass def f(x: T) -> None: reveal_type(u(C(), x)) # E: Revealed type is 'Union[T`-1, __main__.C*]' reveal_type(u(x, C())) # E: Revealed type is 'Union[__main__.C*, T`-1]' [case testUnionSimplificationSpecialCase3] from typing import Any, TypeVar, Generic, Union class C(Any): pass V = TypeVar('V') T = TypeVar('T') class M(Generic[V]): def get(self, default: T) -> Union[V, T]: ... def f(x: M[C]) -> None: y = x.get(None) reveal_type(y) # E: Revealed type is '__main__.C' [case testUnionSimplificationSpecialCases] from typing import Any, TypeVar, Union class C(Any): pass T = TypeVar('T') S = TypeVar('S') def u(x: T, y: S) -> Union[S, T]: pass a = None # type: Any # Base-class-Any and None, simplify reveal_type(u(C(), None)) # E: Revealed type is '__main__.C*' reveal_type(u(None, C())) # E: Revealed type is '__main__.C*' # Normal instance type and None, simplify reveal_type(u(1, None)) # E: Revealed type is 'builtins.int*' reveal_type(u(None, 1)) # E: Revealed type is 'builtins.int*' # Normal instance type and base-class-Any, no simplification reveal_type(u(C(), 1)) # E: Revealed type is 'Union[builtins.int*, __main__.C*]' reveal_type(u(1, C())) # E: Revealed type is 'Union[__main__.C*, builtins.int*]' # Normal instance type and Any, no simplification reveal_type(u(1, a)) # E: Revealed type is 'Union[Any, builtins.int*]' reveal_type(u(a, 1)) # E: Revealed type is 'Union[builtins.int*, Any]' # Any and base-class-Any, no simplificaiton reveal_type(u(C(), a)) # E: Revealed type is 'Union[Any, __main__.C*]' reveal_type(u(a, C())) # E: Revealed type is 'Union[__main__.C*, Any]' # Two normal instance types, simplify reveal_type(u(1, object())) # E: Revealed type is 'builtins.object*' reveal_type(u(object(), 1)) # E: Revealed type is 'builtins.object*' # Two normal instance types, no simplification reveal_type(u(1, '')) # E: Revealed type is 'Union[builtins.str*, builtins.int*]' reveal_type(u('', 1)) # E: Revealed type is 'Union[builtins.int*, builtins.str*]' [case testUnionSimplificationWithDuplicateItems] from typing import Any, TypeVar, Union class C(Any): pass T = TypeVar('T') S = TypeVar('S') R = TypeVar('R') def u(x: T, y: S, z: R) -> Union[R, S, T]: pass a = None # type: Any reveal_type(u(1, 1, 1)) # E: Revealed type is 'builtins.int*' reveal_type(u(C(), C(), None)) # E: Revealed type is '__main__.C*' reveal_type(u(a, a, 1)) # E: Revealed type is 'Union[builtins.int*, Any]' reveal_type(u(a, C(), a)) # E: Revealed type is 'Union[Any, __main__.C*]' reveal_type(u('', 1, 1)) # E: Revealed type is 'Union[builtins.int*, builtins.str*]' [case testUnionAndBinaryOperation] from typing import Union class A: pass def f(x: Union[int, str, A]): x + object() # E: Unsupported left operand type for + (some union) \ # E: Unsupported operand types for + (likely involving Union) [case testNarrowingDownNamedTupleUnion] from typing import NamedTuple, Union A = NamedTuple('A', [('y', int)]) B = NamedTuple('B', [('x', int)]) C = NamedTuple('C', [('x', int)]) def foo(a: Union[A, B, C]): if isinstance(a, (B, C)): reveal_type(a) # E: Revealed type is 'Union[Tuple[builtins.int, fallback=__main__.B], Tuple[builtins.int, fallback=__main__.C]]' a.x a.y # E: Item "B" of "Union[B, C]" has no attribute "y" \ # E: Item "C" of "Union[B, C]" has no attribute "y" b = a # type: Union[B, C] [builtins fixtures/isinstance.pyi] [case testSimplifyingUnionAndTypePromotions] from typing import TypeVar, Union T = TypeVar('T') S = TypeVar('S') def u(x: T, y: S) -> Union[S, T]: pass reveal_type(u(1, 2.3)) # E: Revealed type is 'builtins.float*' reveal_type(u(2.3, 1)) # E: Revealed type is 'builtins.float*' reveal_type(u(False, 2.2)) # E: Revealed type is 'builtins.float*' reveal_type(u(2.2, False)) # E: Revealed type is 'builtins.float*' [builtins fixtures/primitives.pyi] [case testSimplifyingUnionWithTypeTypes1] from typing import TypeVar, Union, Type, Any T = TypeVar('T') S = TypeVar('S') def u(x: T, y: S) -> Union[S, T]: pass t_o = None # type: Type[object] t_s = None # type: Type[str] t_a = None # type: Type[Any] # Two identical items reveal_type(u(t_o, t_o)) # E: Revealed type is 'Type[builtins.object]' reveal_type(u(t_s, t_s)) # E: Revealed type is 'Type[builtins.str]' reveal_type(u(t_a, t_a)) # E: Revealed type is 'Type[Any]' reveal_type(u(type, type)) # E: Revealed type is 'def (x: builtins.object) -> builtins.type' # One type, other non-type reveal_type(u(t_s, 1)) # E: Revealed type is 'Union[builtins.int*, Type[builtins.str]]' reveal_type(u(1, t_s)) # E: Revealed type is 'Union[Type[builtins.str], builtins.int*]' reveal_type(u(type, 1)) # E: Revealed type is 'Union[builtins.int*, def (x: builtins.object) -> builtins.type]' reveal_type(u(1, type)) # E: Revealed type is 'Union[def (x: builtins.object) -> builtins.type, builtins.int*]' reveal_type(u(t_a, 1)) # E: Revealed type is 'Union[builtins.int*, Type[Any]]' reveal_type(u(1, t_a)) # E: Revealed type is 'Union[Type[Any], builtins.int*]' reveal_type(u(t_o, 1)) # E: Revealed type is 'Union[builtins.int*, Type[builtins.object]]' reveal_type(u(1, t_o)) # E: Revealed type is 'Union[Type[builtins.object], builtins.int*]' [case testSimplifyingUnionWithTypeTypes2] from typing import TypeVar, Union, Type, Any T = TypeVar('T') S = TypeVar('S') def u(x: T, y: S) -> Union[S, T]: pass t_o = None # type: Type[object] t_s = None # type: Type[str] t_a = None # type: Type[Any] t = None # type: type # Union with object reveal_type(u(t_o, object())) # E: Revealed type is 'builtins.object*' reveal_type(u(object(), t_o)) # E: Revealed type is 'builtins.object*' reveal_type(u(t_s, object())) # E: Revealed type is 'builtins.object*' reveal_type(u(object(), t_s)) # E: Revealed type is 'builtins.object*' reveal_type(u(t_a, object())) # E: Revealed type is 'builtins.object*' reveal_type(u(object(), t_a)) # E: Revealed type is 'builtins.object*' # Union between type objects reveal_type(u(t_o, t_a)) # E: Revealed type is 'Union[Type[Any], Type[builtins.object]]' reveal_type(u(t_a, t_o)) # E: Revealed type is 'Union[Type[builtins.object], Type[Any]]' reveal_type(u(t_s, t_o)) # E: Revealed type is 'Type[builtins.object]' reveal_type(u(t_o, t_s)) # E: Revealed type is 'Type[builtins.object]' reveal_type(u(t_o, type)) # E: Revealed type is 'Type[builtins.object]' reveal_type(u(type, t_o)) # E: Revealed type is 'Type[builtins.object]' reveal_type(u(t_a, t)) # E: Revealed type is 'builtins.type*' reveal_type(u(t, t_a)) # E: Revealed type is 'builtins.type*' # The following should arguably not be simplified, but it's unclear how to fix then # without causing regressions elsewhere. reveal_type(u(t_o, t)) # E: Revealed type is 'builtins.type*' reveal_type(u(t, t_o)) # E: Revealed type is 'builtins.type*' [case testNotSimplifyingUnionWithMetaclass] from typing import TypeVar, Union, Type, Any class M(type): pass class M2(M): pass class A(metaclass=M): pass T = TypeVar('T') S = TypeVar('S') def u(x: T, y: S) -> Union[S, T]: pass a: Any t_a: Type[A] reveal_type(u(M(*a), t_a)) # E: Revealed type is 'Union[Type[__main__.A], __main__.M*]' reveal_type(u(t_a, M(*a))) # E: Revealed type is 'Union[__main__.M*, Type[__main__.A]]' reveal_type(u(M2(*a), t_a)) # E: Revealed type is 'Union[Type[__main__.A], __main__.M2*]' reveal_type(u(t_a, M2(*a))) # E: Revealed type is 'Union[__main__.M2*, Type[__main__.A]]' [case testSimplifyUnionWithCallable] from typing import TypeVar, Union, Any, Callable T = TypeVar('T') S = TypeVar('S') def u(x: T, y: S) -> Union[S, T]: pass class C: pass class D(C): pass D_C: Callable[[D], C] A_C: Callable[[Any], C] D_A: Callable[[D], Any] C_C: Callable[[C], C] D_D: Callable[[D], D] i_C: Callable[[int], C] # TODO: Test argument names and kinds once we have flexible callable types. reveal_type(u(D_C, D_C)) # E: Revealed type is 'def (__main__.D) -> __main__.C' reveal_type(u(A_C, D_C)) # E: Revealed type is 'Union[def (__main__.D) -> __main__.C, def (Any) -> __main__.C]' reveal_type(u(D_C, A_C)) # E: Revealed type is 'Union[def (Any) -> __main__.C, def (__main__.D) -> __main__.C]' reveal_type(u(D_A, D_C)) # E: Revealed type is 'Union[def (__main__.D) -> __main__.C, def (__main__.D) -> Any]' reveal_type(u(D_C, D_A)) # E: Revealed type is 'Union[def (__main__.D) -> Any, def (__main__.D) -> __main__.C]' reveal_type(u(D_C, C_C)) # E: Revealed type is 'def (__main__.D) -> __main__.C' reveal_type(u(C_C, D_C)) # E: Revealed type is 'def (__main__.D) -> __main__.C' reveal_type(u(D_C, D_D)) # E: Revealed type is 'def (__main__.D) -> __main__.C' reveal_type(u(D_D, D_C)) # E: Revealed type is 'def (__main__.D) -> __main__.C' reveal_type(u(D_C, i_C)) # E: Revealed type is 'Union[def (builtins.int) -> __main__.C, def (__main__.D) -> __main__.C]' [case testUnionOperatorMethodSpecialCase] from typing import Union class C: def __le__(self, x: 'C') -> int: ... class D: def __le__(self, other) -> int: ... class E: def __ge__(self, other: Union[C, D]) -> int: ... [case testUnionSimplificationWithBoolIntAndFloat] from typing import List, Union l = reveal_type([]) # type: List[Union[bool, int, float]] \ # E: Revealed type is 'builtins.list[builtins.float]' reveal_type(l) \ # E: Revealed type is 'builtins.list[Union[builtins.bool, builtins.int, builtins.float]]' [builtins fixtures/list.pyi] [case testUnionSimplificationWithBoolIntAndFloat2] from typing import List, Union l = reveal_type([]) # type: List[Union[bool, int, float, str]] \ # E: Revealed type is 'builtins.list[Union[builtins.float, builtins.str]]' reveal_type(l) \ # E: Revealed type is 'builtins.list[Union[builtins.bool, builtins.int, builtins.float, builtins.str]]' [builtins fixtures/list.pyi] [case testNestedUnionsProcessedCorrectly] from typing import Union class A: pass class B: pass class C: pass def foo(bar: Union[Union[A, B], C]) -> None: if isinstance(bar, A): reveal_type(bar) # E: Revealed type is '__main__.A' else: reveal_type(bar) # E: Revealed type is 'Union[__main__.B, __main__.C]' [builtins fixtures/isinstance.pyi] [out] [case testAssignAnyToUnion] from typing import Union, Any x: Union[int, str] a: Any if bool(): x = a # TODO: Maybe we should infer Any as the type instead. reveal_type(x) # E: Revealed type is 'Union[builtins.int, builtins.str]' reveal_type(x) # E: Revealed type is 'Union[builtins.int, builtins.str]' [builtins fixtures/bool.pyi] [case testAssignAnyToUnionWithAny] from typing import Union, Any x: Union[int, Any] a: Any if bool(): x = a reveal_type(x) # E: Revealed type is 'Any' reveal_type(x) # E: Revealed type is 'Union[builtins.int, Any]' [builtins fixtures/bool.pyi] [case testUnionMultiassignSingle] from typing import Union, Tuple, Any a: Union[Tuple[int], Tuple[float]] (a1,) = a reveal_type(a1) # E: Revealed type is 'builtins.float' b: Union[Tuple[int], Tuple[str]] (b1,) = b reveal_type(b1) # E: Revealed type is 'Union[builtins.int, builtins.str]' [case testUnionMultiassignDouble] from typing import Union, Tuple c: Union[Tuple[int, int], Tuple[int, float]] (c1, c2) = c reveal_type(c1) # E: Revealed type is 'builtins.int' reveal_type(c2) # E: Revealed type is 'builtins.float' [case testUnionMultiassignGeneric] from typing import Union, Tuple, TypeVar T = TypeVar('T') S = TypeVar('S') def pack_two(x: T, y: S) -> Union[Tuple[T, T], Tuple[S, S]]: pass (x, y) = pack_two(1, 'a') reveal_type(x) # E: Revealed type is 'Union[builtins.int*, builtins.str*]' reveal_type(y) # E: Revealed type is 'Union[builtins.int*, builtins.str*]' [case testUnionMultiassignAny] from typing import Union, Tuple, Any d: Union[Any, Tuple[float, float]] (d1, d2) = d reveal_type(d1) # E: Revealed type is 'Union[Any, builtins.float]' reveal_type(d2) # E: Revealed type is 'Union[Any, builtins.float]' e: Union[Any, Tuple[float, float], int] (e1, e2) = e # E: 'builtins.int' object is not iterable [case testUnionMultiassignNotJoin] from typing import Union, List class A: pass class B(A): pass class C(A): pass a: Union[List[B], List[C]] x, y = a reveal_type(x) # E: Revealed type is 'Union[__main__.B*, __main__.C*]' [builtins fixtures/list.pyi] [case testUnionMultiassignRebind] from typing import Union, List class A: pass class B(A): pass class C(A): pass obj: object a: Union[List[B], List[C]] obj, new = a reveal_type(obj) # E: Revealed type is 'Union[__main__.B*, __main__.C*]' reveal_type(new) # E: Revealed type is 'Union[__main__.B*, __main__.C*]' obj = 1 reveal_type(obj) # E: Revealed type is 'builtins.int' [builtins fixtures/list.pyi] [case testUnionMultiassignAlreadyDeclared] from typing import Union, Tuple a: Union[Tuple[int, int], Tuple[int, float]] a1: object a2: int (a1, a2) = a # E: Incompatible types in assignment (expression has type "float", variable has type "int") b: Union[Tuple[float, int], Tuple[int, int]] b1: object b2: int (b1, b2) = b reveal_type(b1) # E: Revealed type is 'builtins.float' reveal_type(b2) # E: Revealed type is 'builtins.int' c: Union[Tuple[int, int], Tuple[int, int]] c1: object c2: int (c1, c2) = c reveal_type(c1) # E: Revealed type is 'builtins.int' reveal_type(c2) # E: Revealed type is 'builtins.int' d: Union[Tuple[int, int], Tuple[int, float]] d1: object (d1, d2) = d reveal_type(d1) # E: Revealed type is 'builtins.int' reveal_type(d2) # E: Revealed type is 'builtins.float' [case testUnionMultiassignIndexed] from typing import Union, Tuple, List class B: x: object x: List[int] b: B a: Union[Tuple[int, int], Tuple[int, object]] (x[0], b.x) = a reveal_type(x[0]) # E: Revealed type is 'builtins.int*' reveal_type(b.x) # E: Revealed type is 'builtins.object' [builtins fixtures/list.pyi] [case testUnionMultiassignIndexedWithError] from typing import Union, Tuple, List class A: pass class B: x: int x: List[A] b: B a: Union[Tuple[int, int], Tuple[int, object]] (x[0], b.x) = a # E: Incompatible types in assignment (expression has type "int", target has type "A") \ # E: Incompatible types in assignment (expression has type "object", variable has type "int") reveal_type(x[0]) # E: Revealed type is '__main__.A*' reveal_type(b.x) # E: Revealed type is 'builtins.int' [builtins fixtures/list.pyi] [case testUnionMultiassignPacked] from typing import Union, Tuple, List a: Union[Tuple[int, int, int], Tuple[int, int, str]] a1: int a2: object (a1, *xs, a2) = a reveal_type(a1) # E: Revealed type is 'builtins.int' reveal_type(xs) # E: Revealed type is 'builtins.list[builtins.int*]' reveal_type(a2) # E: Revealed type is 'Union[builtins.int, builtins.str]' [builtins fixtures/list.pyi] [case testUnpackingUnionOfListsInFunction] from typing import Union, List def f(x: bool) -> Union[List[int], List[str]]: if x: return [1, 1] else: return ['a', 'a'] def g(x: bool) -> None: a, b = f(x) reveal_type(a) # E: Revealed type is 'Union[builtins.int*, builtins.str*]' reveal_type(b) # E: Revealed type is 'Union[builtins.int*, builtins.str*]' [builtins fixtures/list.pyi] [case testUnionOfVariableLengthTupleUnpacking] from typing import Tuple, Union VarTuple = Union[Tuple[int, int], Tuple[int, int, int]] def make_tuple() -> VarTuple: pass x = make_tuple() a, b = x # E: Too many values to unpack (2 expected, 3 provided) a, b, c = x # E: Need more than 2 values to unpack (3 expected) c, *d = x reveal_type(c) # E: Revealed type is 'builtins.int' reveal_type(d) # E: Revealed type is 'builtins.list[builtins.int*]' [builtins fixtures/tuple.pyi] [case testUnionOfNonIterableUnpacking] from typing import Union bad: Union[int, str] x, y = bad # E: 'builtins.int' object is not iterable \ # E: 'builtins.str' object is not iterable reveal_type(x) # E: Revealed type is 'Any' reveal_type(y) # E: Revealed type is 'Any' [out] [case testUnionAlwaysTooMany] from typing import Union, Tuple bad: Union[Tuple[int, int, int], Tuple[str, str, str]] x, y = bad # E: Too many values to unpack (2 expected, 3 provided) reveal_type(x) # E: Revealed type is 'Any' reveal_type(y) # E: Revealed type is 'Any' [builtins fixtures/tuple.pyi] [out] [case testUnionAlwaysTooFew] from typing import Union, Tuple bad: Union[Tuple[int, int, int], Tuple[str, str, str]] x, y, z, w = bad # E: Need more than 3 values to unpack (4 expected) reveal_type(x) # E: Revealed type is 'Any' reveal_type(y) # E: Revealed type is 'Any' reveal_type(z) # E: Revealed type is 'Any' reveal_type(w) # E: Revealed type is 'Any' [builtins fixtures/tuple.pyi] [out] [case testUnionUnpackingChainedTuple] from typing import Union, Tuple good: Union[Tuple[int, int], Tuple[str, str]] x, y = t = good reveal_type(x) # E: Revealed type is 'Union[builtins.int, builtins.str]' reveal_type(y) # E: Revealed type is 'Union[builtins.int, builtins.str]' reveal_type(t) # E: Revealed type is 'Union[Tuple[builtins.int, builtins.int], Tuple[builtins.str, builtins.str]]' [builtins fixtures/tuple.pyi] [out] [case testUnionUnpackingChainedTuple2] from typing import Union, Tuple good: Union[Tuple[int, int], Tuple[str, str]] t = x, y = good reveal_type(x) # E: Revealed type is 'Union[builtins.int, builtins.str]' reveal_type(y) # E: Revealed type is 'Union[builtins.int, builtins.str]' reveal_type(t) # E: Revealed type is 'Union[Tuple[builtins.int, builtins.int], Tuple[builtins.str, builtins.str]]' [builtins fixtures/tuple.pyi] [out] [case testUnionUnpackingChainedTuple3] from typing import Union, Tuple good: Union[Tuple[int, int], Tuple[str, str]] x, y = a, b = good reveal_type(x) # E: Revealed type is 'Union[builtins.int, builtins.str]' reveal_type(y) # E: Revealed type is 'Union[builtins.int, builtins.str]' reveal_type(a) # E: Revealed type is 'Union[builtins.int, builtins.str]' reveal_type(b) # E: Revealed type is 'Union[builtins.int, builtins.str]' [builtins fixtures/tuple.pyi] [out] [case testUnionUnpackingChainedList] from typing import Union, List good: Union[List[int], List[str]] lst = x, y = good reveal_type(x) # E: Revealed type is 'Union[builtins.int*, builtins.str*]' reveal_type(y) # E: Revealed type is 'Union[builtins.int*, builtins.str*]' reveal_type(lst) # E: Revealed type is 'Union[builtins.list[builtins.int], builtins.list[builtins.str]]' [builtins fixtures/list.pyi] [out] [case testUnionUnpackingChainedList2] from typing import Union, List good: Union[List[int], List[str]] x, *y, z = lst = good reveal_type(x) # E: Revealed type is 'Union[builtins.int*, builtins.str*]' reveal_type(y) # E: Revealed type is 'Union[builtins.list[builtins.int], builtins.list[builtins.str]]' reveal_type(z) # E: Revealed type is 'Union[builtins.int*, builtins.str*]' reveal_type(lst) # E: Revealed type is 'Union[builtins.list[builtins.int], builtins.list[builtins.str]]' [builtins fixtures/list.pyi] [out] [case testUnionUnpackingInForTuple] from typing import Union, Tuple, NamedTuple class NTInt(NamedTuple): x: int y: int class NTStr(NamedTuple): x: str y: str nt: Union[NTInt, NTStr] for nx in nt: reveal_type(nx) # E: Revealed type is 'Union[builtins.int*, builtins.str*]' t: Union[Tuple[int, int], Tuple[str, str]] for x in t: # TODO(Ivan): This will be OK when tuple fallback patches are added (like above) reveal_type(x) # E: Revealed type is 'Any' [builtins fixtures/for.pyi] [out] [case testUnionUnpackingInForList] from typing import Union, List, Tuple t: Union[List[Tuple[int, int]], List[Tuple[str, str]]] for x, y in t: reveal_type(x) # E: Revealed type is 'Union[builtins.int, builtins.str]' reveal_type(y) # E: Revealed type is 'Union[builtins.int, builtins.str]' t2: List[Union[Tuple[int, int], Tuple[str, str]]] for x2, y2 in t2: reveal_type(x2) # E: Revealed type is 'Union[builtins.int, builtins.str]' reveal_type(y2) # E: Revealed type is 'Union[builtins.int, builtins.str]' [builtins fixtures/for.pyi] [out] [case testUnionUnpackingDoubleBinder] from typing import Union, Tuple x: object y: object class A: pass class B: pass t1: Union[Tuple[A, A], Tuple[B, B]] t2: Union[Tuple[int, int], Tuple[str, str]] x, y = t1 reveal_type(x) # E: Revealed type is 'Union[__main__.A, __main__.B]' reveal_type(y) # E: Revealed type is 'Union[__main__.A, __main__.B]' x, y = t2 reveal_type(x) # E: Revealed type is 'Union[builtins.int, builtins.str]' reveal_type(y) # E: Revealed type is 'Union[builtins.int, builtins.str]' x, y = object(), object() reveal_type(x) # E: Revealed type is 'builtins.object' reveal_type(y) # E: Revealed type is 'builtins.object' [builtins fixtures/tuple.pyi] [out] [case testUnionUnpackingFromNestedTuples] from typing import Union, Tuple t: Union[Tuple[int, Tuple[int, int]], Tuple[str, Tuple[str, str]]] x, (y, z) = t reveal_type(x) # E: Revealed type is 'Union[builtins.int, builtins.str]' reveal_type(y) # E: Revealed type is 'Union[builtins.int, builtins.str]' reveal_type(z) # E: Revealed type is 'Union[builtins.int, builtins.str]' [builtins fixtures/tuple.pyi] [out] [case testNestedUnionUnpackingFromNestedTuples] from typing import Union, Tuple class A: pass class B: pass t: Union[Tuple[int, Union[Tuple[int, int], Tuple[A, A]]], Tuple[str, Union[Tuple[str, str], Tuple[B, B]]]] x, (y, z) = t reveal_type(x) # E: Revealed type is 'Union[builtins.int, builtins.str]' reveal_type(y) # E: Revealed type is 'Union[builtins.int, __main__.A, builtins.str, __main__.B]' reveal_type(z) # E: Revealed type is 'Union[builtins.int, __main__.A, builtins.str, __main__.B]' [builtins fixtures/tuple.pyi] [out] [case testNestedUnionUnpackingFromNestedTuplesBinder] from typing import Union, Tuple class A: pass class B: pass x: object y: object z: object t: Union[Tuple[int, Union[Tuple[int, int], Tuple[A, A]]], Tuple[str, Union[Tuple[str, str], Tuple[B, B]]]] x, (y, z) = t reveal_type(x) # E: Revealed type is 'Union[builtins.int, builtins.str]' reveal_type(y) # E: Revealed type is 'Union[builtins.int, __main__.A, builtins.str, __main__.B]' reveal_type(z) # E: Revealed type is 'Union[builtins.int, __main__.A, builtins.str, __main__.B]' [builtins fixtures/tuple.pyi] [out] [case testUnpackUnionNoCrashOnPartialNone] # flags: --strict-optional from typing import Dict, Tuple, List, Any a: Any d: Dict[str, Tuple[List[Tuple[str, str]], str]] x, _ = d.get(a, (None, None)) for y in x: pass # E: Iterable expected \ # E: Item "None" of "Optional[List[Tuple[str, str]]]" has no attribute "__iter__" if x: for s, t in x: reveal_type(s) # E: Revealed type is 'builtins.str' [builtins fixtures/dict.pyi] [out] [case testUnpackUnionNoCrashOnPartialNone2] # flags: --strict-optional from typing import Dict, Tuple, List, Any a: Any x = None d: Dict[str, Tuple[List[Tuple[str, str]], str]] x, _ = d.get(a, (None, None)) for y in x: pass # E: Iterable expected \ # E: Item "None" of "Optional[List[Tuple[str, str]]]" has no attribute "__iter__" if x: for s, t in x: reveal_type(s) # E: Revealed type is 'builtins.str' [builtins fixtures/dict.pyi] [out] [case testUnpackUnionNoCrashOnPartialNoneBinder] # flags: --strict-optional from typing import Dict, Tuple, List, Any x: object a: Any d: Dict[str, Tuple[List[Tuple[str, str]], str]] x, _ = d.get(a, (None, None)) reveal_type(x) # E: Revealed type is 'Union[builtins.list[Tuple[builtins.str, builtins.str]], builtins.None]' if x: for y in x: pass [builtins fixtures/dict.pyi] [out] [case testUnpackUnionNoCrashOnPartialNoneList] # flags: --strict-optional from typing import Dict, Tuple, List, Any a: Any d: Dict[str, Tuple[List[Tuple[str, str]], str]] x, _ = d.get(a, ([], [])) reveal_type(x) # E: Revealed type is 'Union[builtins.list[Tuple[builtins.str, builtins.str]], builtins.list[]]' for y in x: pass [builtins fixtures/dict.pyi] [out] [case testLongUnionFormatting] from typing import Any, Generic, TypeVar, Union T = TypeVar('T') class ExtremelyLongTypeNameWhichIsGenericSoWeCanUseItMultipleTimes(Generic[T]): pass x: Union[ExtremelyLongTypeNameWhichIsGenericSoWeCanUseItMultipleTimes[int], ExtremelyLongTypeNameWhichIsGenericSoWeCanUseItMultipleTimes[object], ExtremelyLongTypeNameWhichIsGenericSoWeCanUseItMultipleTimes[float], ExtremelyLongTypeNameWhichIsGenericSoWeCanUseItMultipleTimes[str], ExtremelyLongTypeNameWhichIsGenericSoWeCanUseItMultipleTimes[Any], ExtremelyLongTypeNameWhichIsGenericSoWeCanUseItMultipleTimes[bytes]] def takes_int(arg: int) -> None: pass takes_int(x) # E: Argument 1 to "takes_int" has incompatible type ; expected "int" mypy-0.560/test-data/unit/check-unreachable-code.test0000644€tŠÔÚ€2›s®0000002776013215007205026705 0ustar jukkaDROPBOX\Domain Users00000000000000-- Type checker test cases for conditional checks that result in some -- blocks classified as unreachable (they are not type checked or semantically -- analyzed). -- -- For example, we skip blocks that will not be executed on the active -- Python version. [case testConditionalTypeAliasPY3] import typing def f(): pass PY3 = f() if PY3: t = int x = object() + 'x' # E: Unsupported left operand type for + ("object") else: t = str y = 'x' / 1 x z = 1 # type: t [case testConditionalTypeAliasPY3_python2] import typing def f(): pass PY3 = f() if PY3: t = int x = object() + 'x' else: t = str y = 'x' / 1 # E: "str" has no attribute "__div__" y z = '' # type: t [case testConditionalAssignmentPY2] import typing def f(): pass PY2 = f() if PY2: x = object() + 'x' else: y = 'x' / 1 # E: Unsupported left operand type for / ("str") y [case testConditionalAssignmentPY2_python2] import typing def f(): pass PY2 = f() if PY2: x = object() + 'x' # E: Unsupported left operand type for + ("object") else: y = 'x' / 1 x [case testConditionalImport] import typing def f(): pass PY2 = f() if PY2: import fuzzybar from barbar import * from pawwaw import a, bc else: import m [file m.py] import typing x = 1 x = 'a' [out] tmp/m.py:3: error: Incompatible types in assignment (expression has type "str", variable has type "int") [case testNegatedMypyConditional] import typing MYPY = 0 if not MYPY: import xyz753 else: import pow123 # E [builtins fixtures/bool.pyi] [out] main:6: error: Cannot find module named 'pow123' main:6: note: (Perhaps setting MYPYPATH or using the "--ignore-missing-imports" flag would help) [case testMypyConditional] import typing MYPY = 0 if MYPY: None + 1 # E: Unsupported left operand type for + ("None") else: None + '' [builtins fixtures/bool.pyi] [case testTypeCheckingConditional] import typing if typing.TYPE_CHECKING: import pow123 # E else: import xyz753 [out] main:3: error: Cannot find module named 'pow123' main:3: note: (Perhaps setting MYPYPATH or using the "--ignore-missing-imports" flag would help) [case testTypeCheckingConditionalFromImport] from typing import TYPE_CHECKING if TYPE_CHECKING: import pow123 # E else: import xyz753 [out] main:3: error: Cannot find module named 'pow123' main:3: note: (Perhaps setting MYPYPATH or using the "--ignore-missing-imports" flag would help) [case testNegatedTypeCheckingConditional] import typing if not typing.TYPE_CHECKING: import pow123 # E else: import xyz753 [builtins fixtures/bool.pyi] [out] main:5: error: Cannot find module named 'xyz753' main:5: note: (Perhaps setting MYPYPATH or using the "--ignore-missing-imports" flag would help) [case testUndefinedTypeCheckingConditional] if not TYPE_CHECKING: # E import pow123 else: import xyz753 [builtins fixtures/bool.pyi] [out] main:1: error: Name 'TYPE_CHECKING' is not defined main:4: error: Cannot find module named 'xyz753' main:4: note: (Perhaps setting MYPYPATH or using the "--ignore-missing-imports" flag would help) [case testConditionalClassDefPY3] def f(): pass PY3 = f() if PY3: pass else: class X(object): pass [case testUnreachabilityAndElifPY3] def f(): pass PY3 = f() if PY3: pass elif bool(): import nonexistent 1 + '' else: import bad_name 1 + '' [builtins fixtures/bool.pyi] [out] [case testSysVersionInfo_python2] import sys if sys.version_info[0] >= 3: def foo(): # type: () -> int return 0 else: def foo(): # type: () -> str return '' reveal_type(foo()) # E: Revealed type is 'builtins.str' [builtins_py2 fixtures/ops.pyi] [out] [case testSysVersionInfo] import sys if sys.version_info[0] >= 3: def foo() -> int: return 0 else: def foo() -> str: return '' reveal_type(foo()) # E: Revealed type is 'builtins.int' [builtins fixtures/ops.pyi] [out] [case testSysVersionInfoNegated_python2] import sys if not (sys.version_info[0] < 3): def foo(): # type: () -> int return 0 else: def foo(): # type: () -> str return '' reveal_type(foo()) # E: Revealed type is 'builtins.str' [builtins_py2 fixtures/ops.pyi] [out] [case testSysVersionInfoNegated] import sys if not (sys.version_info[0] < 3): def foo() -> int: return 0 else: def foo() -> str: return '' reveal_type(foo()) # E: Revealed type is 'builtins.int' [builtins fixtures/ops.pyi] [out] [case testSysVersionInfoSliced1] import sys if sys.version_info[:1] >= (3,): def foo() -> int: return 0 else: def foo() -> str: return '' foo() + 0 [builtins fixtures/ops.pyi] [out] [case testSysVersionInfoSliced2] import sys if sys.version_info[:2] >= (3, 0): def foo() -> int: return 0 else: def foo() -> str: return '' foo() + 0 [builtins fixtures/ops.pyi] [out] [case testSysVersionInfoSliced3] import sys if sys.version_info[:] >= (3, 0): def foo() -> int: return 0 else: def foo() -> str: return '' foo() + 0 [builtins fixtures/ops.pyi] [out] [case testSysVersionInfoSliced4] import sys if sys.version_info[0:2] >= (3, 0): def foo() -> int: return 0 else: def foo() -> str: return '' foo() + 0 [builtins fixtures/ops.pyi] [out] [case testSysVersionInfoSliced5] import sys if sys.version_info[0:] >= (3,): def foo() -> int: return 0 else: def foo() -> str: return '' foo() + 0 [builtins fixtures/ops.pyi] [out] [case testSysVersionInfoSliced6] import sys if sys.version_info[1:] >= (5,): def foo() -> int: return 0 else: def foo() -> str: return '' foo() + 0 [builtins fixtures/ops.pyi] [out] [case testSysVersionInfoSliced7] import sys if sys.version_info >= (3, 5): def foo() -> int: return 0 else: def foo() -> str: return '' foo() + 0 [builtins fixtures/ops.pyi] [out] [case testSysVersionInfoSliced8] # Our pyversion only has (major, minor), # so testing for (major, minor, bugfix) is unsupported. import sys if sys.version_info >= (3, 5, 0): def foo() -> int: return 0 else: def foo() -> str: return '' # E: All conditional function variants must have identical signatures [builtins fixtures/ops.pyi] [out] [case testSysVersionInfoSliced9] # Our pyversion only has (major, minor), # so testing for (minor, bugfix) is unsupported (also it's silly :-). import sys if sys.version_info[1:] >= (5, 0): def foo() -> int: return 0 else: def foo() -> str: return '' # E: All conditional function variants must have identical signatures [builtins fixtures/ops.pyi] [out] [case testSysPlatform1] import sys if sys.platform == 'fictional': def foo() -> int: return 0 else: def foo() -> str: return '' foo() + '' [builtins fixtures/ops.pyi] [out] [case testSysPlatform2] import sys if sys.platform != 'fictional': def foo() -> int: return 0 else: def foo() -> str: return '' foo() + 0 [builtins fixtures/ops.pyi] [out] [case testSysPlatformNegated] import sys if not (sys.platform == 'fictional'): def foo() -> int: return 0 else: def foo() -> str: return '' foo() + 0 [builtins fixtures/ops.pyi] [out] [case testSysVersionInfoClass] import sys if sys.version_info < (3, 5): class C: pass else: class C: def foo(self) -> int: return 0 C().foo() + 0 [builtins fixtures/ops.pyi] [out] [case testSysVersionInfoImport] import sys if sys.version_info >= (3, 5): import collections else: collections = None Pt = collections.namedtuple('Pt', 'x y z') [builtins fixtures/ops.pyi] [out] [case testSysVersionInfoVariable] import sys if sys.version_info >= (3, 5): x = '' else: x = 0 x + '' [builtins fixtures/ops.pyi] [out] [case testSysVersionInfoInClass] import sys class C: if sys.version_info >= (3, 5): def foo(self) -> int: return 0 else: def foo(self) -> str: return '' reveal_type(C().foo()) # E: Revealed type is 'builtins.int' [builtins fixtures/ops.pyi] [out] [case testSysVersionInfoInFunction] import sys def foo() -> None: if sys.version_info >= (3, 5): x = '' else: x = 0 reveal_type(x) # E: Revealed type is 'builtins.str' [builtins fixtures/ops.pyi] [out] [case testSysPlatformInMethod] import sys class C: def foo(self) -> None: if sys.platform != 'fictional': x = '' else: x = 0 reveal_type(x) # E: Revealed type is 'builtins.str' [builtins fixtures/ops.pyi] [out] [case testSysPlatformInFunctionImport1] import sys def foo() -> None: if sys.platform != 'fictional': import a else: import b as a a.x [file a.py] x = 1 [builtins fixtures/ops.pyi] [out] [case testSysPlatformInFunctionImport2] import sys def foo() -> None: if sys.platform == 'fictional': import b as a else: import a a.x [file a.py] x = 1 [builtins fixtures/ops.pyi] [out] [case testSysPlatformInMethodImport2] import sys class A: def foo(self) -> None: if sys.platform == 'fictional': # TODO: This is inconsistent with how top-level functions work # (https://github.com/python/mypy/issues/4324) import b as a # E: Cannot find module named 'b' \ # N: (Perhaps setting MYPYPATH or using the "--ignore-missing-imports" flag would help) else: import a a.x [file a.py] x = 1 [builtins fixtures/ops.pyi] [out] [case testCustomSysVersionInfo] # flags: --python-version 3.5 import sys if sys.version_info == (3, 5): x = "foo" else: x = 3 reveal_type(x) # E: Revealed type is 'builtins.str' [builtins fixtures/ops.pyi] [out] [case testCustomSysVersionInfo2] # flags: --python-version 3.5 import sys if sys.version_info == (3, 6): x = "foo" else: x = 3 reveal_type(x) # E: Revealed type is 'builtins.int' [builtins fixtures/ops.pyi] [out] [case testCustomSysPlatform] # flags: --platform linux import sys if sys.platform == 'linux': x = "foo" else: x = 3 reveal_type(x) # E: Revealed type is 'builtins.str' [builtins fixtures/ops.pyi] [out] [case testCustomSysPlatform2] # flags: --platform win32 import sys if sys.platform == 'linux': x = "foo" else: x = 3 reveal_type(x) # E: Revealed type is 'builtins.int' [builtins fixtures/ops.pyi] [out] [case testCustomSysPlatformStartsWith] # flags: --platform win32 import sys if sys.platform.startswith('win'): x = "foo" else: x = 3 reveal_type(x) # E: Revealed type is 'builtins.str' [builtins fixtures/ops.pyi] [out] [case testShortCircuitInExpression] import typing def make() -> bool: pass PY2 = PY3 = make() a = PY2 and 's' b = PY3 and 's' c = PY2 or 's' d = PY3 or 's' e = (PY2 or PY3) and 's' f = (PY3 or PY2) and 's' g = (PY2 or PY3) or 's' h = (PY3 or PY2) or 's' reveal_type(a) # E: Revealed type is 'builtins.bool' reveal_type(b) # E: Revealed type is 'builtins.str' reveal_type(c) # E: Revealed type is 'builtins.str' reveal_type(d) # E: Revealed type is 'builtins.bool' reveal_type(e) # E: Revealed type is 'builtins.str' reveal_type(f) # E: Revealed type is 'builtins.str' reveal_type(g) # E: Revealed type is 'builtins.bool' reveal_type(h) # E: Revealed type is 'builtins.bool' [builtins fixtures/ops.pyi] [out] [case testShortCircuitAndWithConditionalAssignment] # flags: --platform linux import sys def f(): pass PY2 = f() if PY2 and sys.platform == 'linux': x = 'foo' else: x = 3 reveal_type(x) # E: Revealed type is 'builtins.int' if sys.platform == 'linux' and PY2: y = 'foo' else: y = 3 reveal_type(y) # E: Revealed type is 'builtins.int' [builtins fixtures/ops.pyi] [case testShortCircuitOrWithConditionalAssignment] # flags: --platform linux import sys def f(): pass PY2 = f() if PY2 or sys.platform == 'linux': x = 'foo' else: x = 3 reveal_type(x) # E: Revealed type is 'builtins.str' if sys.platform == 'linux' or PY2: y = 'foo' else: y = 3 reveal_type(y) # E: Revealed type is 'builtins.str' [builtins fixtures/ops.pyi] [case testConditionalAssertWithoutElse] import typing class A: pass class B(A): pass x = A() reveal_type(x) # E: Revealed type is '__main__.A' if typing.TYPE_CHECKING: assert isinstance(x, B) reveal_type(x) # E: Revealed type is '__main__.B' reveal_type(x) # E: Revealed type is '__main__.B' [builtins fixtures/isinstancelist.pyi] mypy-0.560/test-data/unit/check-unsupported.test0000644€tŠÔÚ€2›s®0000000053613215007206026105 0ustar jukkaDROPBOX\Domain Users00000000000000-- Tests for unsupported features [case testDecorateOverloadedFunction] from foo import * [file foo.pyi] # The error messages are not the most informative ever. def d(x): pass @d def f(): pass def f(x): pass # E def g(): pass @d # E def g(x): pass [out] tmp/foo.pyi:5: error: Name 'f' already defined tmp/foo.pyi:7: error: Name 'g' already defined mypy-0.560/test-data/unit/check-varargs.test0000644€tŠÔÚ€2›s®0000004441613215007206025167 0ustar jukkaDROPBOX\Domain Users00000000000000-- Test cases for the type checker related to varargs. -- Varargs within body -- ------------------- [case testVarArgsWithinFunction] from typing import Tuple def f( *b: 'B') -> None: ab = None # type: Tuple[B, ...] ac = None # type: Tuple[C, ...] b = ac # E: Incompatible types in assignment (expression has type "Tuple[C, ...]", variable has type "Tuple[B, ...]") ac = b # E: Incompatible types in assignment (expression has type "Tuple[B, ...]", variable has type "Tuple[C, ...]") b = ab ab = b class B: pass class C: pass [builtins fixtures/tuple.pyi] [out] [case testVarArgsAreTuple] from typing import Tuple, Sequence def want_tuple(types: Tuple[type, ...]): pass def want_sequence(types: Sequence[type]): pass def test(*t: type) -> None: want_tuple(t) want_sequence(t) [builtins fixtures/tuple.pyi] [out] -- Calling varargs function -- ------------------------ [case testCallingVarArgsFunction] a = None # type: A b = None # type: B c = None # type: C f(c) # E: Argument 1 to "f" has incompatible type "C"; expected "A" f(a, b, c) # E: Argument 3 to "f" has incompatible type "C"; expected "A" f(g()) # E: "g" does not return a value f(a, g()) # E: "g" does not return a value f() f(a) f(b) f(a, b, a, b) def f( *a: 'A') -> None: pass def g() -> None: pass class A: pass class B(A): pass class C: pass [builtins fixtures/list.pyi] [case testCallingVarArgsFunctionWithAlsoNormalArgs] a = None # type: A b = None # type: B c = None # type: C f(a) # E: Argument 1 to "f" has incompatible type "A"; expected "C" f(c, c) # E: Argument 2 to "f" has incompatible type "C"; expected "A" f(c, a, b, c) # E: Argument 4 to "f" has incompatible type "C"; expected "A" f(c) f(c, a) f(c, b, b, a, b) def f(a: 'C', *b: 'A') -> None: pass class A: pass class B(A): pass class C: pass [builtins fixtures/list.pyi] [case testCallingVarArgsFunctionWithDefaultArgs] a = None # type: A b = None # type: B c = None # type: C f(a) # E: Argument 1 to "f" has incompatible type "A"; expected "Optional[C]" f(c, c) # E: Argument 2 to "f" has incompatible type "C"; expected "A" f(c, a, b, c) # E: Argument 4 to "f" has incompatible type "C"; expected "A" f() f(c) f(c, a) f(c, b, b, a, b) def f(a: 'C' = None, *b: 'A') -> None: pass class A: pass class B(A): pass class C: pass [builtins fixtures/list.pyi] [case testCallVarargsFunctionWithIterable] from typing import Iterable it1 = None # type: Iterable[int] it2 = None # type: Iterable[str] def f(*x: int) -> None: pass f(*it1) f(*it2) # E: Argument 1 to "f" has incompatible type "*Iterable[str]"; expected "int" [builtins fixtures/for.pyi] [case testCallVarargsFunctionWithIterableAndPositional] from typing import Iterable it1 = None # type: Iterable[int] def f(*x: int) -> None: pass f(*it1, 1, 2) f(*it1, 1, *it1, 2) f(*it1, '') # E: Argument 2 to "f" has incompatible type "str"; expected "int" [builtins fixtures/for.pyi] [case testCallVarargsFunctionWithTupleAndPositional] def f(*x: int) -> None: pass it1 = (1, 2) f(*it1, 1, 2) f(*it1, 1, *it1, 2) f(*it1, '') # E: Argument 2 to "f" has incompatible type "str"; expected "int" [builtins fixtures/for.pyi] -- Calling varargs function + type inference -- ----------------------------------------- [case testTypeInferenceWithCalleeVarArgs] from typing import TypeVar T = TypeVar('T') a = None # type: A b = None # type: B c = None # type: C o = None # type: object a = f(o) # E: Incompatible types in assignment (expression has type "object", variable has type "A") b = f(b, a) # E: Incompatible types in assignment (expression has type "A", variable has type "B") b = f(a, b) # E: Incompatible types in assignment (expression has type "A", variable has type "B") o = f() a = f(a) a = f(b) a = f(a, b, a) o = f(a, b, o) c = f(c) def f( *a: T) -> T: pass class A: pass class B(A): pass class C: pass [builtins fixtures/list.pyi] [case testTypeInferenceWithCalleeVarArgsAndDefaultArgs] from typing import TypeVar T = TypeVar('T') a = None # type: A o = None # type: object a = f(o) # E: Incompatible types in assignment (expression has type "object", variable has type "A") a = f(a, o) # E: Incompatible types in assignment (expression has type "object", variable has type "A") a = f(a, a, o) # E: Incompatible types in assignment (expression has type "object", variable has type "A") a = f(a, a, a, o) # E: Incompatible types in assignment (expression has type "object", variable has type "A") a = f(a) a = f(a, a) a = f(a, a, a) def f(a: T, b: T = None, *c: T) -> T: pass class A: pass [builtins fixtures/list.pyi] -- Calling normal function with varargs -- ------------------------------------ [case testCallingWithListVarArgs] from typing import List, Any, cast aa = None # type: List[A] ab = None # type: List[B] a = None # type: A b = None # type: B f(*aa) # Fail f(a, *ab) # Ok f(a, b) (cast(Any, f))(*aa) # IDEA: Move to check-dynamic? (cast(Any, f))(a, *ab) # IDEA: Move to check-dynamic? def f(a: 'A', b: 'B') -> None: pass class A: pass class B: pass [builtins fixtures/list.pyi] [out] main:7: error: Argument 1 to "f" has incompatible type "*List[A]"; expected "B" [case testCallingWithTupleVarArgs] a = None # type: A b = None # type: B c = None # type: C cc = None # type: CC f(*(a, b, b)) # E: Argument 1 to "f" has incompatible type "*Tuple[A, B, B]"; expected "C" f(*(b, b, c)) # E: Argument 1 to "f" has incompatible type "*Tuple[B, B, C]"; expected "A" f(a, *(b, b)) # E: Argument 2 to "f" has incompatible type "*Tuple[B, B]"; expected "C" f(b, *(b, c)) # E: Argument 1 to "f" has incompatible type "B"; expected "A" f(*(a, b)) # E: Too few arguments for "f" f(*(a, b, c, c)) # E: Too many arguments for "f" f(a, *(b, c, c)) # E: Too many arguments for "f" f(*(a, b, c)) f(a, *(b, c)) f(a, b, *(c,)) f(a, *(b, cc)) def f(a: 'A', b: 'B', c: 'C') -> None: pass class A: pass class B: pass class C: pass class CC(C): pass [builtins fixtures/tuple.pyi] [case testInvalidVarArg] a = None # type: A f(*None) f(*a) # E: List or tuple expected as variable arguments f(*(a,)) def f(a: 'A') -> None: pass class A: pass [builtins fixtures/tuple.pyi] -- Calling varargs function with varargs -- ------------------------------------- [case testCallingVarArgsFunctionWithListVarArgs] from typing import List aa, ab, a, b = None, None, None, None # type: (List[A], List[B], A, B) f(*aa) # Fail f(a, *aa) # Fail f(b, *ab) # Fail f(a, a, *ab) # Fail f(a, b, *aa) # Fail f(b, b, *ab) # Fail g(*ab) # Fail f(a, *ab) f(a, b, *ab) f(a, b, b, *ab) g(*aa) def f(a: 'A', *b: 'B') -> None: pass def g(a: 'A', *b: 'A') -> None: pass class A: pass class B: pass [builtins fixtures/list.pyi] [out] main:3: error: Argument 1 to "f" has incompatible type "*List[A]"; expected "B" main:4: error: Argument 2 to "f" has incompatible type "*List[A]"; expected "B" main:5: error: Argument 1 to "f" has incompatible type "B"; expected "A" main:6: error: Argument 2 to "f" has incompatible type "A"; expected "B" main:7: error: Argument 3 to "f" has incompatible type "*List[A]"; expected "B" main:8: error: Argument 1 to "f" has incompatible type "B"; expected "A" main:9: error: Argument 1 to "g" has incompatible type "*List[B]"; expected "A" [case testCallingVarArgsFunctionWithTupleVarArgs] a, b, c, cc = None, None, None, None # type: (A, B, C, CC) f(*(b, b, b)) # E: Argument 1 to "f" has incompatible type "*Tuple[B, B, B]"; expected "A" f(*(a, a, b)) # E: Argument 1 to "f" has incompatible type "*Tuple[A, A, B]"; expected "B" f(*(a, b, a)) # E: Argument 1 to "f" has incompatible type "*Tuple[A, B, A]"; expected "B" f(a, *(a, b)) # E: Argument 2 to "f" has incompatible type "*Tuple[A, B]"; expected "B" f(b, *(b, b)) # E: Argument 1 to "f" has incompatible type "B"; expected "A" f(b, b, *(b,)) # E: Argument 1 to "f" has incompatible type "B"; expected "A" f(a, a, *(b,)) # E: Argument 2 to "f" has incompatible type "A"; expected "B" f(a, b, *(a,)) # E: Argument 3 to "f" has incompatible type "*Tuple[A]"; expected "B" f(*()) # E: Too few arguments for "f" f(*(a, b, b)) f(a, *(b, b)) f(a, b, *(b,)) def f(a: 'A', *b: 'B') -> None: pass class A: pass class B: pass class C: pass class CC(C): pass [builtins fixtures/list.pyi] -- Varargs special cases -- --------------------- [case testDynamicVarArg] from typing import Any d, a = None, None # type: (Any, A) f(a, a, *d) # Fail f(a, *d) # Fail f(*d) # Ok g(*d) g(a, *d) g(a, a, *d) def f(a: 'A') -> None: pass def g(a: 'A', *b: 'A') -> None: pass class A: pass [builtins fixtures/list.pyi] [out] main:3: error: Too many arguments for "f" main:4: error: Too many arguments for "f" [case testListVarArgsAndSubtyping] from typing import List aa = None # type: List[A] ab = None # type: List[B] g(*aa) # E: Argument 1 to "g" has incompatible type "*List[A]"; expected "B" f(*aa) f(*ab) g(*ab) def f( *a: 'A') -> None: pass def g( *a: 'B') -> None: pass class A: pass class B(A): pass [builtins fixtures/list.pyi] [case testCallerVarArgsAndDefaultArgs] a, b = None, None # type: (A, B) f(*()) # Fail f(a, *[a]) # Fail f(a, b, *[a]) # Fail f(*(a, a, b)) # Fail f(*(a,)) f(*(a, b)) f(*(a, b, b, b)) f(a, *[]) f(a, *[b]) f(a, *[b, b]) def f(a: 'A', b: 'B' = None, *c: 'B') -> None: pass class A: pass class B: pass [builtins fixtures/list.pyi] [out] main:3: error: Too few arguments for "f" main:4: error: Argument 2 to "f" has incompatible type "*List[A]"; expected "Optional[B]" main:4: error: Argument 2 to "f" has incompatible type "*List[A]"; expected "B" main:5: error: Argument 3 to "f" has incompatible type "*List[A]"; expected "B" main:6: error: Argument 1 to "f" has incompatible type "*Tuple[A, A, B]"; expected "Optional[B]" [case testVarArgsAfterKeywordArgInCall1-skip] # see: mypy issue #2729 def f(x: int, y: str) -> None: pass f(x=1, *[2]) [builtins fixtures/list.pyi] [out] main:2: error: "f" gets multiple values for keyword argument "x" main:2: error: Argument 2 to "f" has incompatible type *List[int]; expected "str" [case testVarArgsAfterKeywordArgInCall2-skip] # see: mypy issue #2729 def f(x: int, y: str) -> None: pass f(y='x', *[1]) [builtins fixtures/list.pyi] [out] main:2: error: "f" gets multiple values for keyword argument "y" main:2: error: Argument 2 to "f" has incompatible type *List[int]; expected "str" [case testVarArgsAfterKeywordArgInCall3] def f(x: int, y: str) -> None: pass f(y='x', *(1,)) [builtins fixtures/list.pyi] [case testVarArgsAfterKeywordArgInCall4] def f(x: int, *, y: str) -> None: pass f(y='x', *[1]) [builtins fixtures/list.pyi] [case testVarArgsAfterKeywordArgInCall5] def f(x: int, *, y: str) -> None: pass f(y='x', *(1,)) [builtins fixtures/list.pyi] -- Overloads + varargs -- ------------------- [case testIntersectionTypesAndVarArgs] from foo import * [file foo.pyi] from typing import overload a, b = None, None # type: (A, B) b = f() # E: Incompatible types in assignment (expression has type "A", variable has type "B") b = f(a) # E: Incompatible types in assignment (expression has type "A", variable has type "B") b = f(a, b) # E: Incompatible types in assignment (expression has type "A", variable has type "B") a = f(b) # E: Incompatible types in assignment (expression has type "B", variable has type "A") a = f(b, b) # E: Incompatible types in assignment (expression has type "B", variable has type "A") b = f(a, *[b]) # E: Incompatible types in assignment (expression has type "A", variable has type "B") b = f(*()) # E: Incompatible types in assignment (expression has type "A", variable has type "B") b = f(*(a,)) # E: Incompatible types in assignment (expression has type "A", variable has type "B") b = f(*(a, b)) # E: Incompatible types in assignment (expression has type "A", variable has type "B") a = f(*(b,)) # E: Incompatible types in assignment (expression has type "B", variable has type "A") a = f(*(b, b)) # E: Incompatible types in assignment (expression has type "B", variable has type "A") a = f(*[b]) # E: Incompatible types in assignment (expression has type "B", variable has type "A") a = f() a = f(a) a = f(a, b) b = f(b) b = f(b, b) a = f(a, *[b]) a = f(*()) a = f(*(a,)) a = f(*(a, b)) b = f(*(b,)) b = f(*(b, b)) b = f(*[b]) class A: pass class B: pass @overload def f(a: A = None, *b: B) -> A: pass @overload def f(a: B, *b: B) -> B: pass [builtins fixtures/list.pyi] -- Caller varargs + type inference -- ------------------------------- [case testCallerVarArgsListWithTypeInference] from typing import List, TypeVar, Tuple S = TypeVar('S') T = TypeVar('T') a, b, aa = None, None, None # type: (A, B, List[A]) a, b = f(*aa) # Fail b, b = f(*aa) # Fail a, a = f(b, *aa) # Fail b, b = f(b, *aa) # Fail b, b = f(b, b, *aa) # Fail a, b = f(a, *a) # Fail a, b = f(*a) # Fail a, a = f(*aa) b, a = f(b, *aa) b, a = f(b, a, *aa) def f(a: S, *b: T) -> Tuple[S, T]: pass class A: pass class B: pass [builtins fixtures/list.pyi] [out] main:6: error: Argument 1 to "f" has incompatible type "*List[A]"; expected "B" main:7: error: Argument 1 to "f" has incompatible type "*List[A]"; expected "B" main:8: error: Argument 1 to "f" has incompatible type "B"; expected "A" main:9: error: Argument 2 to "f" has incompatible type "*List[A]"; expected "B" main:10: error: Argument 3 to "f" has incompatible type "*List[A]"; expected "B" main:11: error: List or tuple expected as variable arguments main:12: error: List or tuple expected as variable arguments [case testCallerVarArgsTupleWithTypeInference] from typing import TypeVar, Tuple S = TypeVar('S') T = TypeVar('T') a, b = None, None # type: (A, B) a, a = f(*(a, b)) # E: Argument 1 to "f" has incompatible type "*Tuple[A, B]"; expected "A" b, b = f(a, *(b,)) # E: Argument 1 to "f" has incompatible type "A"; expected "B" a, a = f(*(a, b)) # E: Argument 1 to "f" has incompatible type "*Tuple[A, B]"; expected "A" b, b = f(a, *(b,)) # E: Argument 1 to "f" has incompatible type "A"; expected "B" a, b = f(*(a, b, b)) # E: Too many arguments for "f" a, b = f(*(a, b)) a, b = f(a, *(b,)) def f(a: S, b: T) -> Tuple[S, T]: pass class A: pass class B: pass [builtins fixtures/list.pyi] [case testCallerVarargsAndComplexTypeInference] from typing import List, TypeVar, Generic, Tuple T = TypeVar('T') S = TypeVar('S') a, b = None, None # type: (A, B) ao = None # type: List[object] aa = None # type: List[A] ab = None # type: List[B] a, aa = G().f(*[a]) # Fail aa, a = G().f(*[a]) # Fail ab, aa = G().f(*[a]) # Fail ao, ao = G().f(*[a]) # E: Incompatible types in assignment (expression has type "List[]", variable has type "List[object]") aa, aa = G().f(*[a]) # E: Incompatible types in assignment (expression has type "List[]", variable has type "List[A]") class G(Generic[T]): def f(self, *a: S) -> Tuple[List[S], List[T]]: pass class A: pass class B: pass [builtins fixtures/list.pyi] [out] main:9: error: Incompatible types in assignment (expression has type "List[A]", variable has type "A") main:9: error: Incompatible types in assignment (expression has type "List[]", variable has type "List[A]") main:10: error: Incompatible types in assignment (expression has type "List[]", variable has type "A") main:11: error: Incompatible types in assignment (expression has type "List[]", variable has type "List[A]") main:11: error: Argument 1 to "f" of "G" has incompatible type "*List[A]"; expected "B" -- Comment signatures -- ------------------ [case testVarArgsAndCommentSignature] import typing def f(*x): # type: (*int) -> None pass f(1) f(1, 2) f('') # E: Argument 1 to "f" has incompatible type "str"; expected "int" f(1, '') # E: Argument 2 to "f" has incompatible type "str"; expected "int" [builtins fixtures/list.pyi] -- Subtyping -- --------- [case testVarArgsFunctionSubtyping] from typing import Callable x = None # type: Callable[[int], None] def f(*x: int) -> None: pass def g(*x: str) -> None: pass x = f x = g # E: Incompatible types in assignment (expression has type "Callable[[VarArg(str)], None]", variable has type "Callable[[int], None]") [builtins fixtures/list.pyi] [out] -- Decorated method where self is implied by *args -- ----------------------------------------------- [case testVarArgsCallableSelf] from typing import Callable def cm(func) -> Callable[..., None]: pass class C: @cm def foo(self) -> None: pass C().foo() C().foo(1) # The decorator's return type says this should be okay [case testInvariantDictArgNote] from typing import Dict, Sequence def f(x: Dict[str, Sequence[int]]) -> None: pass def g(x: Dict[str, float]) -> None: pass def h(x: Dict[str, int]) -> None: pass a = {'a': [1, 2]} b = {'b': ['c', 'd']} c = {'c': 1.0} d = {'d': 1} f(a) # E: Argument 1 to "f" has incompatible type "Dict[str, List[int]]"; expected "Dict[str, Sequence[int]]" \ # N: "Dict" is invariant -- see http://mypy.readthedocs.io/en/latest/common_issues.html#variance \ # N: Consider using "Mapping" instead, which is covariant in the value type f(b) # E: Argument 1 to "f" has incompatible type "Dict[str, List[str]]"; expected "Dict[str, Sequence[int]]" g(c) g(d) # E: Argument 1 to "g" has incompatible type "Dict[str, int]"; expected "Dict[str, float]" \ # N: "Dict" is invariant -- see http://mypy.readthedocs.io/en/latest/common_issues.html#variance \ # N: Consider using "Mapping" instead, which is covariant in the value type h(c) # E: Argument 1 to "h" has incompatible type "Dict[str, float]"; expected "Dict[str, int]" h(d) [builtins fixtures/dict.pyi] [case testInvariantListArgNote] from typing import List, Union def f(numbers: List[Union[int, float]]) -> None: pass a = [1, 2] f(a) # E: Argument 1 to "f" has incompatible type "List[int]"; expected "List[Union[int, float]]" \ # N: "List" is invariant -- see http://mypy.readthedocs.io/en/latest/common_issues.html#variance \ # N: Consider using "Sequence" instead, which is covariant x = [1] y = ['a'] x = y # E: Incompatible types in assignment (expression has type "List[str]", variable has type "List[int]") [builtins fixtures/list.pyi] [case testInvariantTypeConfusingNames] from typing import TypeVar class Listener: pass class DictReader: pass def f(x: Listener) -> None: pass def g(y: DictReader) -> None: pass a = [1, 2] b = {'b': 1} f(a) # E: Argument 1 to "f" has incompatible type "List[int]"; expected "Listener" g(b) # E: Argument 1 to "g" has incompatible type "Dict[str, int]"; expected "DictReader" [builtins fixtures/dict.pyi] mypy-0.560/test-data/unit/check-warnings.test0000644€tŠÔÚ€2›s®0000001122513215007206025342 0ustar jukkaDROPBOX\Domain Users00000000000000-- Test cases for warning generation. -- Redundant casts -- --------------- [case testRedundantCast] # flags: --warn-redundant-casts from typing import cast a = 1 b = cast(str, a) c = cast(int, a) [out] main:5: note: Redundant cast to "int" [case testRedundantCastWithIsinstance] # flags: --warn-redundant-casts from typing import cast, Union x = 1 # type: Union[int, str] if isinstance(x, str): cast(str, x) [builtins fixtures/isinstance.pyi] [out] main:5: note: Redundant cast to "str" [case testCastToSuperclassNotRedundant] # flags: --warn-redundant-casts from typing import cast, TypeVar, List T = TypeVar('T') def add(xs: List[T], ys: List[T]) -> List[T]: pass class A: pass class B(A): pass a = A() b = B() # Without the cast, the following line would fail to type check. c = add([cast(A, b)], [a]) [builtins fixtures/list.pyi] -- Unused 'type: ignore' comments -- ------------------------------ [case testUnusedTypeIgnore] # flags: --warn-unused-ignores a = 1 a = 'a' # type: ignore a = 2 # type: ignore # N: unused 'type: ignore' comment a = 'b' # E: Incompatible types in assignment (expression has type "str", variable has type "int") [case testUnusedTypeIgnoreImport] # flags: --warn-unused-ignores import banana # type: ignore import m # type: ignore from m import * # type: ignore [file m.py] pass [out] main:3: note: unused 'type: ignore' comment main:4: note: unused 'type: ignore' comment -- No return -- --------- [case testNoReturn] # flags: --warn-no-return def f() -> int: pass def g() -> int: if bool(): return 1 [builtins fixtures/list.pyi] [out] main:5: error: Missing return statement [case testNoReturnWhile] # flags: --warn-no-return def h() -> int: while True: if bool(): return 1 def i() -> int: while 1: if bool(): return 1 if bool(): break def j() -> int: while 1: if bool(): return 1 if bool(): continue [builtins fixtures/list.pyi] [out] main:7: error: Missing return statement [case testNoReturnExcept] # flags: --warn-no-return def f() -> int: try: return 1 except: pass def g() -> int: try: pass except: return 1 else: return 1 def h() -> int: try: pass except: pass else: pass finally: return 1 [builtins fixtures/exception.pyi] [out] main:2: error: Missing return statement [case testNoReturnEmptyBodyWithDocstring] def f() -> int: """Return the number of peppers.""" # This might be an @abstractmethod, for example pass [out] -- Returning Any -- ------------- [case testReturnAnyFromTypedFunction] # flags: --warn-return-any from typing import Any def g() -> Any: pass def f() -> int: return g() [out] main:4: warning: Returning Any from function declared to return "int" [case testReturnAnyFromTypedFunctionWithSpecificFormatting] # flags: --warn-return-any from typing import Any, Tuple typ = Tuple[int, int, int, int, int, int, int, int, int, int, int, int, int, int, int, int, int, int, int, int, int, int, int, int, int, int, int, int, int, int, int, int, int, int, int, int, int, int, int, int, int, int, int, int, int, int, int, int, int, int, int, int, int, int, int, int, int, int, int, int, int, int, int, int, int, int, int, int, int, int, int, int, int, int, int, int, int, int, int, int, int, int, int, int, int, int, int, int, int, int, int] def g() -> Any: pass def f() -> typ: return g() [out] main:11: warning: Returning Any from function declared to return [case testReturnAnySilencedFromTypedFunction] # flags: --warn-return-any from typing import Any def g() -> Any: pass def f() -> int: result = g() # type: int return result [out] [case testReturnAnyFromUntypedFunction] # flags: --warn-return-any from typing import Any def g() -> Any: pass def f(): return g() [out] [case testReturnAnyFromAnyTypedFunction] # flags: --warn-return-any from typing import Any def g() -> Any: pass def f() -> Any: return g() [out] [case testOKReturnAnyIfProperSubtype] # flags: --warn-return-any --strict-optional from typing import Any, Optional class Test(object): def __init__(self) -> None: self.attr = "foo" # type: Any def foo(self, do_it: bool) -> Optional[Any]: if do_it: return self.attr # Should not warn here else: return None [builtins fixtures/list.pyi] [out] [case testReturnAnyDeferred] # flags: --warn-return-any def foo(a1: A) -> int: if a1._x: return 1 n = 1 return n class A: def __init__(self, x: int) -> None: self._x = x mypy-0.560/test-data/unit/cmdline.test0000644€tŠÔÚ€2›s®0000005230513215007206024056 0ustar jukkaDROPBOX\Domain Users00000000000000-- Tests for command line parsing -- ------------------------------ -- -- The initial line specifies the command line, in the format -- -- # cmd: mypy -- Directories/packages on the command line -- ---------------------------------------- [case testCmdlinePackage] # cmd: mypy pkg [file pkg/__init__.py] [file pkg/a.py] undef [file pkg/subpkg/__init__.py] [file pkg/subpkg/a.py] undef import pkg.subpkg.a [out] pkg/a.py:1: error: Name 'undef' is not defined pkg/subpkg/a.py:1: error: Name 'undef' is not defined [case testCmdlinePackageSlash] # cmd: mypy pkg/ [file pkg/__init__.py] [file pkg/a.py] undef [file pkg/subpkg/__init__.py] [file pkg/subpkg/a.py] undef import pkg.subpkg.a [out] pkg/a.py:1: error: Name 'undef' is not defined pkg/subpkg/a.py:1: error: Name 'undef' is not defined [case testCmdlineNonPackage] # cmd: mypy dir [file dir/a.py] undef [file dir/subdir/a.py] undef [out] dir/a.py:1: error: Name 'undef' is not defined [case testCmdlineNonPackageSlash] # cmd: mypy dir/ [file dir/a.py] undef [file dir/subdir/a.py] undef [out] dir/a.py:1: error: Name 'undef' is not defined [case testCmdlinePackageContainingSubdir] # cmd: mypy pkg [file pkg/__init__.py] [file pkg/a.py] undef [file pkg/subdir/a.py] undef [out] pkg/a.py:1: error: Name 'undef' is not defined [case testCmdlineNonPackageContainingPackage] # cmd: mypy dir [file dir/a.py] undef import subpkg.a [file dir/subpkg/__init__.py] [file dir/subpkg/a.py] undef [out] dir/subpkg/a.py:1: error: Name 'undef' is not defined dir/a.py:1: error: Name 'undef' is not defined [case testCmdlineInvalidPackageName] # cmd: mypy dir/sub.pkg/a.py [file dir/sub.pkg/__init__.py] [file dir/sub.pkg/a.py] undef [out] sub.pkg is not a valid Python package name [case testBadFileEncoding] # cmd: mypy a.py [file a.py] # coding: uft-8 [out] mypy: can't decode file 'a.py': unknown encoding: uft-8 [case testCannotIgnoreDuplicateModule] # cmd: mypy one/mod/__init__.py two/mod/__init__.py [file one/mod/__init__.py] # type: ignore [file two/mod/__init__.py] # type: ignore [out] two/mod/__init__.py: error: Duplicate module named 'mod' [case testFlagsFile] # cmd: mypy @flagsfile [file flagsfile] -2 main.py [file main.py] def f(): try: 1/0 except ZeroDivisionError, err: print err [case testConfigFile] # cmd: mypy main.py [file mypy.ini] [[mypy] python_version = 2.7 [file main.py] def f(): try: 1/0 except ZeroDivisionError, err: print err [case testErrorContextConfig] # cmd: mypy main.py [file mypy.ini] [[mypy] show_error_context=True [file main.py] def f() -> None: 0 + "" [out] main.py: note: In function "f": main.py:2: error: Unsupported operand types for + ("int" and "str") [case testAltConfigFile] # cmd: mypy --config-file config.ini main.py [file config.ini] [[mypy] python_version = 2.7 [file main.py] def f(): try: 1/0 except ZeroDivisionError, err: print err [case testPerFileConfigSection] # cmd: mypy x.py y.py z.py [file mypy.ini] [[mypy] disallow_untyped_defs = True [[mypy-y*] disallow_untyped_defs = False [[mypy-z*] disallow_untyped_calls = True [file x.py] def f(a): pass def g(a: int) -> int: return f(a) [file y.py] def f(a): pass def g(a: int) -> int: return f(a) [file z.py] def f(a): pass def g(a: int) -> int: return f(a) [out] z.py:1: error: Function is missing a type annotation z.py:4: error: Call to untyped function "f" in typed context x.py:1: error: Function is missing a type annotation [case testPerFileConfigSectionMultipleMatches] # cmd: mypy xx.py xy.py yx.py yy.py [file mypy.ini] [[mypy] [[mypy-*x*] disallow_untyped_defs = True [[mypy-*y*] disallow_untyped_calls = True [file xx.py] def f(a): pass def g(a: int) -> int: return f(a) [file xy.py] def f(a): pass def g(a: int) -> int: return f(a) [file yx.py] def f(a): pass def g(a: int) -> int: return f(a) [file yy.py] def f(a): pass def g(a: int) -> int: return f(a) [out] yy.py:2: error: Call to untyped function "f" in typed context yx.py:1: error: Function is missing a type annotation yx.py:2: error: Call to untyped function "f" in typed context xy.py:1: error: Function is missing a type annotation xy.py:2: error: Call to untyped function "f" in typed context xx.py:1: error: Function is missing a type annotation [case testMultipleGlobConfigSection] # cmd: mypy x.py y.py z.py [file mypy.ini] [[mypy] [[mypy-x*,z*] disallow_untyped_defs = True [file x.py] def f(a): pass [file y.py] def f(a): pass [file z.py] def f(a): pass [out] z.py:1: error: Function is missing a type annotation x.py:1: error: Function is missing a type annotation [case testConfigErrorNoSection] # cmd: mypy -c pass [file mypy.ini] [out] mypy.ini: No [mypy] section in config file [case testConfigErrorUnknownFlag] # cmd: mypy -c pass [file mypy.ini] [[mypy] bad = 0 [out] mypy.ini: [mypy]: Unrecognized option: bad = 0 [case testConfigErrorBadBoolean] # cmd: mypy -c pass [file mypy.ini] [[mypy] ignore_missing_imports = nah [out] mypy.ini: [mypy]: ignore_missing_imports: Not a boolean: nah [case testConfigErrorNotPerFile] # cmd: mypy -c pass [file mypy.ini] [[mypy] [[mypy-*] python_version = 3.4 [out] mypy.ini: [mypy-*]: Per-module sections should only specify per-module flags (python_version) [case testConfigMypyPath] # cmd: mypy file.py [file mypy.ini] [[mypy] mypy_path = foo:bar , baz [file foo/foo.pyi] def foo(x: int) -> str: ... [file bar/bar.pyi] def bar(x: str) -> list: ... [file baz/baz.pyi] def baz(x: list) -> dict: ... [file file.py] import no_stubs from foo import foo from bar import bar from baz import baz baz(bar(foo(42))) baz(bar(foo('oof'))) [out] file.py:1: error: Cannot find module named 'no_stubs' file.py:1: note: (Perhaps setting MYPYPATH or using the "--ignore-missing-imports" flag would help) file.py:6: error: Argument 1 to "foo" has incompatible type "str"; expected "int" [case testIgnoreErrorsConfig] # cmd: mypy x.py y.py [file mypy.ini] [[mypy] [[mypy-x] ignore_errors = True [file x.py] "" + 0 [file y.py] "" + 0 [out] y.py:1: error: Unsupported operand types for + ("str" and "int") [case testConfigFollowImportsNormal] # cmd: mypy main.py [file main.py] from a import x x + 0 x + '' # E import a a.x + 0 a.x + '' # E a.y # E a + 0 # E [file mypy.ini] [[mypy] follow_imports = normal [file a.py] x = 0 x += '' # Error reported here [out] a.py:2: error: Unsupported operand types for + ("int" and "str") main.py:3: error: Unsupported operand types for + ("int" and "str") main.py:6: error: Unsupported operand types for + ("int" and "str") main.py:7: error: Module has no attribute "y" main.py:8: error: Unsupported operand types for + (Module and "int") [case testConfigFollowImportsSilent] # cmd: mypy main.py [file main.py] from a import x x + '' import a a.x + '' a.y a + 0 [file mypy.ini] [[mypy] follow_imports = silent [file a.py] x = 0 x += '' # No error reported [out] main.py:2: error: Unsupported operand types for + ("int" and "str") main.py:4: error: Unsupported operand types for + ("int" and "str") main.py:5: error: Module has no attribute "y" main.py:6: error: Unsupported operand types for + (Module and "int") [case testConfigFollowImportsSkip] # cmd: mypy main.py [file main.py] from a import x reveal_type(x) # Expect Any import a reveal_type(a.x) # Expect Any [file mypy.ini] [[mypy] follow_imports = skip [file a.py] / # No error reported [out] main.py:2: error: Revealed type is 'Any' main.py:4: error: Revealed type is 'Any' [case testConfigFollowImportsError] # cmd: mypy main.py [file main.py] from a import x reveal_type(x) # Expect Any import a # Error reported here reveal_type(a.x) # Expect Any [file mypy.ini] [[mypy] follow_imports = error [file a.py] / # No error reported [out] main.py:1: note: Import of 'a' ignored main.py:1: note: (Using --follow-imports=error, module not passed on command line) main.py:2: error: Revealed type is 'Any' main.py:4: error: Revealed type is 'Any' [case testConfigFollowImportsSelective] # cmd: mypy main.py [file mypy.ini] [[mypy] [[mypy-normal] follow_imports = normal [[mypy-silent] follow_imports = silent [[mypy-skip] follow_imports = skip [[mypy-error] follow_imports = error [file main.py] import normal import silent import skip import error reveal_type(normal.x) reveal_type(silent.x) reveal_type(skip) reveal_type(error) [file normal.py] x = 0 x += '' [file silent.py] x = 0 x += '' [file skip.py] bla bla [file error.py] bla bla [out] main.py:4: note: Import of 'error' ignored main.py:4: note: (Using --follow-imports=error, module not passed on command line) normal.py:2: error: Unsupported operand types for + ("int" and "str") main.py:5: error: Revealed type is 'builtins.int' main.py:6: error: Revealed type is 'builtins.int' main.py:7: error: Revealed type is 'Any' main.py:8: error: Revealed type is 'Any' [case testConfigSilentMissingImportsOff] # cmd: mypy main.py [file main.py] import missing # Expect error here reveal_type(missing.x) # Expect Any [file mypy.ini] [[mypy] ignore_missing_imports = False [out] main.py:1: error: Cannot find module named 'missing' main.py:1: note: (Perhaps setting MYPYPATH or using the "--ignore-missing-imports" flag would help) main.py:2: error: Revealed type is 'Any' [case testConfigSilentMissingImportsOn] # cmd: mypy main.py [file main.py] import missing # No error here reveal_type(missing.x) # Expect Any [file mypy.ini] [[mypy] ignore_missing_imports = True [out] main.py:2: error: Revealed type is 'Any' [case testDotInFilenameOKScript] # cmd: mypy a.b.py c.d.pyi [file a.b.py] undef [file c.d.pyi] whatever [out] c.d.pyi:1: error: Name 'whatever' is not defined a.b.py:1: error: Name 'undef' is not defined [case testDotInFilenameOKFolder] # cmd: mypy my.folder [file my.folder/tst.py] undef [out] my.folder/tst.py:1: error: Name 'undef' is not defined [case testDotInFilenameNoImport] # cmd: mypy main.py [file main.py] import a.b [file a.b.py] whatever [out] main.py:1: error: Cannot find module named 'a' main.py:1: note: (Perhaps setting MYPYPATH or using the "--ignore-missing-imports" flag would help) main.py:1: error: Cannot find module named 'a.b' [case testPythonVersionTooOld10] # cmd: mypy -c pass [file mypy.ini] [[mypy] python_version = 1.0 [out] mypy.ini: [mypy]: python_version: Python major version '1' out of range (must be 2 or 3) [case testPythonVersionTooOld26] # cmd: mypy -c pass [file mypy.ini] [[mypy] python_version = 2.6 [out] mypy.ini: [mypy]: python_version: Python 2.6 is not supported (must be 2.7) [case testPythonVersionTooOld32] # cmd: mypy -c pass [file mypy.ini] [[mypy] python_version = 3.2 [out] mypy.ini: [mypy]: python_version: Python 3.2 is not supported (must be 3.3 or higher) [case testPythonVersionTooNew28] # cmd: mypy -c pass [file mypy.ini] [[mypy] python_version = 2.8 [out] mypy.ini: [mypy]: python_version: Python 2.8 is not supported (must be 2.7) [case testPythonVersionTooNew40] # cmd: mypy -c pass [file mypy.ini] [[mypy] python_version = 4.0 [out] mypy.ini: [mypy]: python_version: Python major version '4' out of range (must be 2 or 3) [case testPythonVersionAccepted27] # cmd: mypy -c pass [file mypy.ini] [[mypy] python_version = 2.7 [out] [case testPythonVersionAccepted33] # cmd: mypy -c pass [file mypy.ini] [[mypy] python_version = 3.3 [out] [case testPythonVersionAccepted36] # cmd: mypy -c pass [file mypy.ini] [[mypy] python_version = 3.6 [out] [case testDisallowAnyUnimported] # cmd: mypy main.py [file mypy.ini] [[mypy] disallow_any_unimported = True ignore_missing_imports = True [file main.py] from unreal import F def f(x: F) -> None: pass [out] main.py:3: error: Argument 1 to "f" becomes "Any" due to an unfollowed import [case testDisallowAnyExplicitDefSignature] # cmd: mypy m.py [file mypy.ini] [[mypy] [[mypy-m*] disallow_any_explicit = True [file m.py] from typing import Any, List def f(x: Any) -> None: pass def g() -> Any: pass def h() -> List[Any]: pass [out] m.py:3: error: Explicit "Any" is not allowed m.py:6: error: Explicit "Any" is not allowed m.py:9: error: Explicit "Any" is not allowed [case testDisallowAnyExplicitVarDeclaration] # cmd: mypy m.py [file mypy.ini] [[mypy] [[mypy-m*] disallow_any_explicit = True [file m.py] from typing import Any, List v: Any = '' w = '' # type: Any class X: y = '' # type: Any [out] m.py:2: error: Explicit "Any" is not allowed m.py:3: error: Explicit "Any" is not allowed m.py:5: error: Explicit "Any" is not allowed [case testDisallowAnyExplicitGenericVarDeclaration] # cmd: mypy m.py [file mypy.ini] [[mypy] [[mypy-m*] disallow_any_explicit = True [file m.py] from typing import Any, List v: List[Any] = [] [out] m.py:2: error: Explicit "Any" is not allowed [case testDisallowAnyExplicitInheritance] # cmd: mypy m.py [file mypy.ini] [[mypy] [[mypy-m*] disallow_any_explicit = True [file m.py] from typing import Any, List class C(Any): pass class D(List[Any]): pass [out] m.py:3: error: Explicit "Any" is not allowed m.py:6: error: Explicit "Any" is not allowed [case testDisallowAnyExplicitAlias] # cmd: mypy m.py [file mypy.ini] [[mypy] [[mypy-m*] disallow_any_explicit = True [file m.py] from typing import Any, List X = Any Y = List[Any] def foo(x: X) -> Y: # no error x.nonexistent() # no error return x [out] m.py:3: error: Explicit "Any" is not allowed m.py:4: error: Explicit "Any" is not allowed [case testDisallowAnyExplicitGenericAlias] # cmd: mypy m.py [file mypy.ini] [[mypy] [[mypy-m*] disallow_any_explicit = True [file m.py] from typing import Any, List, TypeVar, Tuple T = TypeVar('T') TupleAny = Tuple[Any, T] # error def foo(x: TupleAny[str]) -> None: # no error pass def goo(x: TupleAny[Any]) -> None: # error pass [out] m.py:5: error: Explicit "Any" is not allowed m.py:10: error: Explicit "Any" is not allowed [case testDisallowAnyExplicitCast] # cmd: mypy m.py [file mypy.ini] [[mypy] [[mypy-m*] disallow_any_explicit = True [file m.py] from typing import Any, List, cast x = 1 y = cast(Any, x) z = cast(List[Any], x) [out] m.py:4: error: Explicit "Any" is not allowed m.py:5: error: Explicit "Any" is not allowed [case testDisallowAnyExplicitNamedTuple] # cmd: mypy m.py [file mypy.ini] [[mypy] [[mypy-m*] disallow_any_explicit = True [file m.py] from typing import Any, List, NamedTuple Point = NamedTuple('Point', [('x', List[Any]), ('y', Any)]) [out] m.py:3: error: Explicit "Any" is not allowed [case testDisallowAnyExplicitTypeVarConstraint] # cmd: mypy m.py [file mypy.ini] [[mypy] [[mypy-m*] disallow_any_explicit = True [file m.py] from typing import Any, List, TypeVar T = TypeVar('T', Any, List[Any]) [out] m.py:3: error: Explicit "Any" is not allowed [case testDisallowAnyExplicitNewType] # cmd: mypy m.py [file mypy.ini] [[mypy] [[mypy-m*] disallow_any_explicit = True [file m.py] from typing import Any, List, NewType Baz = NewType('Baz', Any) # this error does not come from `--disallow-any-explicit` flag Bar = NewType('Bar', List[Any]) [out] m.py:3: error: Argument 2 to NewType(...) must be subclassable (got "Any") m.py:4: error: Explicit "Any" is not allowed [case testDisallowAnyExplicitTypedDictSimple] # cmd: mypy m.py [file mypy.ini] [[mypy] [[mypy-m*] disallow_any_explicit = True [file m.py] from mypy_extensions import TypedDict from typing import Any M = TypedDict('M', {'x': str, 'y': Any}) # error M(x='x', y=2) # no error def f(m: M) -> None: pass # no error [out] m.py:4: error: Explicit "Any" is not allowed [case testDisallowAnyExplicitTypedDictGeneric] # cmd: mypy m.py [file mypy.ini] [[mypy] [[mypy-m*] disallow_any_explicit = True [file m.py] from mypy_extensions import TypedDict from typing import Any, List M = TypedDict('M', {'x': str, 'y': List[Any]}) # error N = TypedDict('N', {'x': str, 'y': List}) # no error [out] m.py:4: error: Explicit "Any" is not allowed [case testDisallowAnyGenericsTupleNoTypeParams] # cmd: mypy m.py [file mypy.ini] [[mypy] [[mypy-m] disallow_any_generics = True [file m.py] from typing import Tuple def f(s: Tuple) -> None: pass # error def g(s) -> Tuple: # error return 'a', 'b' def h(s) -> Tuple[str, str]: # no error return 'a', 'b' x: Tuple = () # error [out] m.py:3: error: Missing type parameters for generic type m.py:4: error: Missing type parameters for generic type m.py:8: error: Missing type parameters for generic type [case testDisallowAnyGenericsTupleWithNoTypeParamsGeneric] # cmd: mypy m.py [file mypy.ini] [[mypy] [[mypy-m] disallow_any_generics = True [file m.py] from typing import Tuple, List def f(s: List[Tuple]) -> None: pass # error def g(s: List[Tuple[str, str]]) -> None: pass # no error [out] m.py:3: error: Missing type parameters for generic type [case testDisallowAnyGenericsTypeType] # cmd: mypy m.py [file mypy.ini] [[mypy] [[mypy-m] disallow_any_generics = True [file m.py] from typing import Type, Any def f(s: Type[Any]) -> None: pass # no error def g(s) -> Type: # error return s def h(s) -> Type[str]: # no error return s x: Type = g(0) # error [out] m.py:4: error: Missing type parameters for generic type m.py:8: error: Missing type parameters for generic type [case testDisallowAnyGenericsAliasGenericType] # cmd: mypy m.py [file mypy.ini] [[mypy] [[mypy-m] disallow_any_generics = True [file m.py] from typing import List L = List # no error def f(l: L) -> None: pass # error def g(l: L[str]) -> None: pass # no error [out] m.py:5: error: Missing type parameters for generic type [case testDisallowAnyGenericsGenericAlias] # cmd: mypy m.py [file mypy.ini] [[mypy] [[mypy-m] disallow_any_generics = True [file m.py] from typing import List, TypeVar, Tuple T = TypeVar('T') A = Tuple[T, str, T] def f(s: A) -> None: pass # error def g(s) -> A: # error return 'a', 'b', 1 def h(s) -> A[str]: # no error return 'a', 'b', 'c' x: A = ('a', 'b', 1) # error [out] m.py:6: error: Missing type parameters for generic type m.py:7: error: Missing type parameters for generic type m.py:11: error: Missing type parameters for generic type [case testDisallowAnyGenericsPlainList] # cmd: mypy m.py [file mypy.ini] [[mypy] [[mypy-m] disallow_any_generics = True [file m.py] from typing import List def f(l: List) -> None: pass # error def g(l: List[str]) -> None: pass # no error def h(l: List[List]) -> None: pass # error def i(l: List[List[List[List]]]) -> None: pass # error x = [] # error: need type annotation y: List = [] # error [out] m.py:3: error: Missing type parameters for generic type m.py:5: error: Missing type parameters for generic type m.py:6: error: Missing type parameters for generic type m.py:8: error: Need type annotation for variable m.py:9: error: Missing type parameters for generic type [case testDisallowAnyGenericsCustomGenericClass] # cmd: mypy m.py [file mypy.ini] [[mypy] [[mypy-m] disallow_any_generics = True [file m.py] from typing import Generic, TypeVar, Any T = TypeVar('T') class G(Generic[T]): pass def f() -> G: # error return G() x: G[Any] = G() # no error y: G = x # error [out] m.py:6: error: Missing type parameters for generic type m.py:10: error: Missing type parameters for generic type [case testDisallowAnyGenericsBuiltinCollections] # cmd: mypy m.py [file mypy.ini] [[mypy] [[mypy-m] disallow_any_generics = True [file m.py] s = tuple([1, 2, 3]) # no error def f(t: tuple) -> None: pass def g() -> list: pass def h(s: dict) -> None: pass def i(s: set) -> None: pass def j(s: frozenset) -> None: pass [out] m.py:3: error: Implicit generic "Any". Use 'typing.Tuple' and specify generic parameters m.py:4: error: Implicit generic "Any". Use 'typing.List' and specify generic parameters m.py:5: error: Implicit generic "Any". Use 'typing.Dict' and specify generic parameters m.py:6: error: Implicit generic "Any". Use 'typing.Set' and specify generic parameters m.py:7: error: Implicit generic "Any". Use 'typing.FrozenSet' and specify generic parameters [case testDisallowAnyGenericsTypingCollections] # cmd: mypy m.py [file mypy.ini] [[mypy] [[mypy-m] disallow_any_generics = True [file m.py] from typing import Tuple, List, Dict, Set, FrozenSet def f(t: Tuple) -> None: pass def g() -> List: pass def h(s: Dict) -> None: pass def i(s: Set) -> None: pass def j(s: FrozenSet) -> None: pass [out] m.py:3: error: Missing type parameters for generic type m.py:4: error: Missing type parameters for generic type m.py:5: error: Missing type parameters for generic type m.py:6: error: Missing type parameters for generic type m.py:7: error: Missing type parameters for generic type [case testDisallowSubclassingAny] # cmd: mypy m.py y.py [file mypy.ini] [[mypy] disallow_subclassing_any = True [[mypy-m] disallow_subclassing_any = False [file m.py] from typing import Any x = None # type: Any class ShouldBeFine(x): ... [file y.py] from typing import Any x = None # type: Any class ShouldNotBeFine(x): ... [out] y.py:5: error: Class cannot subclass 'x' (has type 'Any') [case testDeterministicSectionOrdering] # cmd: mypy a [file a/__init__.py] [file a/b/__init__.py] [file a/b/c/__init__.py] [file a/b/c/d/__init__.py] [file a/b/c/d/e/__init__.py] 0() [file mypy.ini] [[mypy] [[mypy-a.*] ignore_errors = True [[mypy-a.b.*] ignore_errors = True [[mypy-a.b.c.*] ignore_errors = True [[mypy-a.b.c.d.*] ignore_errors = True [[mypy-a.b.c.d.e] ignore_errors = False [out] a/b/c/d/e/__init__.py:1: error: "int" not callable [case testDisallowUntypedDefsAndGenerics] # cmd: mypy a.py [file mypy.ini] [[mypy] disallow_untyped_defs = True disallow_any_generics = True [file a.py] def get_tasks(self): return 'whatever' [out] a.py:1: error: Function is missing a type annotation mypy-0.560/test-data/unit/deps-classes.test0000644€tŠÔÚ€2›s®0000000071213215007206025024 0ustar jukkaDROPBOX\Domain Users00000000000000-- Test cases for generating fine-grained dependencies for classes. -- -- The dependencies are used for fined-grained incremental checking. -- TODO: Move class related test cases from deps.test to here [case testNamedTuple] from typing import NamedTuple, Any from a import A N = NamedTuple('N', [('a', 'A')]) def f(a: Any) -> None: n = N(a) n.a [file a.py] class A: pass [out] -> m.f -> m.f -> m.f -> , m mypy-0.560/test-data/unit/deps-expressions.test0000644€tŠÔÚ€2›s®0000001753413215007206025763 0ustar jukkaDROPBOX\Domain Users00000000000000-- Test cases for generating fine-grained dependencies for expressions. -- -- The dependencies are used for fined-grained incremental checking. [case testListExpr] def f() -> int: pass def g() -> None: a = [f()] [builtins fixtures/list.pyi] [out] -> m.g [case testDictExpr] def f1() -> int: pass def f2() -> int: pass def g() -> None: a = {f1(): 1, 2: f2()} [builtins fixtures/dict.pyi] [out] -> m.g -> m.g [case testSetExpr] def f() -> int: pass def g() -> None: a = {f()} [builtins fixtures/set.pyi] [out] -> m.g [case testTupleExpr] def f1() -> int: pass def f2() -> int: pass def g() -> None: a = (f1(), f2()) [builtins fixtures/tuple.pyi] [out] -> m.g -> m.g [case testListComprehension] from typing import Iterator class A: def __iter__(self) -> Iterator[int]: pass def f1() -> int: pass def f2() -> int: pass def g() -> None: a = [f1() for x in A() if f2()] [builtins fixtures/list.pyi] [out] -> m.g -> m.g -> m.A, m.g -> m.g -> m.g [case testSetComprehension] from typing import Set def f1() -> int: pass def f2() -> Set[int]: pass def f3() -> int: pass def g() -> None: a = {f1() for x in f2() if f3()} [builtins fixtures/set.pyi] [out] -> m.g -> m.g -> m.g [case testDictComprehension] from typing import Iterator class A: def __iter__(self) -> Iterator[int]: pass def f1() -> int: pass def f2() -> int: pass def f3() -> int: pass def g() -> None: a = {f1(): f2() for x in A() if f3()} [builtins fixtures/dict.pyi] [out] -> m.g -> m.g -> m.A, m.g -> m.g -> m.g -> m.g [case testGeneratorExpr] from typing import List def f1() -> int: pass def f2() -> List[int]: pass def f3() -> int: pass def g() -> None: a = (f1() for x in f2() if f3()) [builtins fixtures/list.pyi] [out] -> m.g -> m.g -> m.g [case testConditionalExpr] def f1() -> int: pass def f2() -> int: pass def f3() -> int: pass def g() -> None: a = f1() if f2() else f3() [out] -> m.g -> m.g -> m.g [case testAwaitExpr] def f(): pass async def g() -> None: x = await f() [builtins fixtures/async_await.pyi] [typing fixtures/typing-full.pyi] [out] -> m.g [case testStarExpr] from typing import Iterator class A: def __iter__(self) -> Iterator[int]: pass def g() -> None: a = [*A()] [builtins fixtures/list.pyi] [out] -> m.g -> m.g -> m.A, m.g [case testCast] from typing import cast class A: pass def f() -> object: pass def g() -> None: x = cast(A, f()) [out] -> m.A, m.g -> m.g [case testTypeApplication] from typing import TypeVar, Generic T = TypeVar('T') S = TypeVar('S') class A(Generic[T, S]): def __init__(self, x): pass class B: pass class C: pass def f() -> int: pass def g() -> None: x = A[B, C](f()) [out] -> m.g -> m.A, m.g -> m.B, m.g -> m.C, m.g -> m.A -> m.A -> m.g [case testIndexExpr] class A: def __getitem__(self, x: int) -> int: pass def f1() -> A: pass def f2() -> int: pass def g(a: A) -> int: return f1()[f2()] [out] -> m.g -> , , m.A, m.f1, m.g -> m.g -> m.g [case testIndexExpr] class A: def __getitem__(self, x: int) -> int: pass def f1() -> A: pass def f2() -> int: pass def g(a: A) -> int: return f1()[f2()] [out] -> m.g -> , , m.A, m.f1, m.g -> m.g -> m.g [case testIndexExprLvalue] class A: def __setitem__(self, x: int, y: int) -> None: pass def f1() -> A: pass def f2() -> int: pass def f3() -> int: pass def g(a: A) -> None: f1()[f2()] = f3() [out] -- __getitem__ dependency is redundant but harmless -> m.g -> m.g -> , , m.A, m.f1, m.g -> m.g -> m.g -> m.g [case testUnaryExpr] class A: def __neg__(self) -> int: pass def __pos__(self) -> int: pass def __invert__(self) -> int: pass def f1() -> A: pass def f2() -> A: pass def f3() -> A: pass def g1() -> int: return +f1() def g2() -> int: return -f2() def g3() -> int: return ~f3() [out] -> m.g3 -> m.g2 -> m.g1 -> , , , m.A, m.f1, m.f2, m.f3 -> m.g1 -> m.g2 -> m.g3 [case testOpExpr] class A: def __add__(self, x: 'B') -> int: pass class B: pass def f() -> int: a: A b: B return a + b [out] -> m.f -> m.A, m.f -> m.f -> , m.A.__add__, m.B, m.f [case testComparisonExpr] class A: def __lt__(self, x: 'B') -> int: pass class B: pass def f() -> int: return A() < B() [out] -> m.f -> m.f -> m.A, m.f -> m.f -> m.f -> , m.A.__lt__, m.B, m.f [case testIsOp-skip] class A: pass class B: pass def f() -> bool: return A() is B() [builtins fixtures/bool.pyi] [out] -- fails because of https://github.com/python/mypy/issues/4055 -> m.f -> m.A, m.f -> m.f -> m.B, m.f [case testInOp] class A: def __contains__(self, x: B) -> int: pass class B: pass def f() -> int: return B() in A() [out] -> m.f -> m.f -> m.A, m.f -> m.f -> , m.A.__contains__, m.B, m.f [case testComparisonExprWithMultipleOperands] class A: def __lt__(self, x: 'B') -> int: pass class B: pass class C: def __ge__(self, x: 'B') -> int: pass def f() -> int: return A() < B() <= C() [out] -> m.f -> m.f -> m.A, m.f -> m.f -> m.f -> m.f -> , , m.A.__lt__, m.B, m.C.__ge__, m.f -> m.f -> m.f -> m.C, m.f [case testOperatorWithTupleOperand] from typing import Tuple class C(Tuple[int, str]): def __and__(self, x: D) -> int: pass def __neg__(self) -> int: pass class D: pass def f() -> None: c: C d: D x = c & d y = -c [builtins fixtures/tuple.pyi] [out] -> m.f -> m.f -> m.C, m.f -> m.f -> , m.C.__and__, m.D, m.f [case testUnionTypeOperation] from typing import Union class A: def __add__(self, x: str) -> int: pass class B: def __add__(self, x: str) -> int: pass def f(a: Union[A, B]) -> int: return a + '' [out] -> m.f -> , m.A, m.f -> m.f -> , m.B, m.f [case testBackquoteExpr_python2] def g(): # type: () -> int pass def f(): # type: () -> str return `g()` [out] -> m.f [case testComparison_python2] class A: def __cmp__(self, other): # type: (B) -> int pass class B: pass def f(a, b): # type: (A, B) -> None x = a == b def g(a, b): # type: (A, B) -> None x = a < b [out] -> m.f, m.g -> m.f -> m.g -> , , m.A, m.f, m.g -> m.f, m.g -> m.f -> m.g -> , , , m.A.__cmp__, m.B, m.f, m.g [case testSliceExpr] class A: def __getitem__(self, x) -> None: pass def f1() -> int: pass def f2() -> int: pass def f3() -> int: pass def f4() -> int: pass def f5() -> int: pass def f() -> None: a: A a[f1():f2():f3()] a[f4():] a[::f5()] [builtins fixtures/slice.pyi] [out] -> m.f -> m.A, m.f -> m.f -> m.f -> m.f -> m.f -> m.f [case testRevealTypeExpr] def f1() -> int: pass def f() -> None: reveal_type(f1()) # type: ignore [out] -> m.f [case testLambdaExpr] from typing import Callable def f1(c: Callable[[int], str]) -> None: pass def f2() -> str: pass def g() -> None: f1(lambda x: f2()) [out] -> m.g -> m.g mypy-0.560/test-data/unit/deps-generics.test0000644€tŠÔÚ€2›s®0000000450413215007206025171 0ustar jukkaDROPBOX\Domain Users00000000000000-- Test cases for generating fine-grained dependencies involving generics. -- -- The dependencies are used for fined-grained incremental checking. [case testGenericFunction] from typing import TypeVar T = TypeVar('T') class A: pass def f(x: T) -> T: y: T z: A return x [out] -> m.A, m.f -> , m.f [case testGenericClass] from typing import TypeVar, Generic T = TypeVar('T') class A(Generic[T]): pass class B: pass def f() -> None: a: A[B] [out] -> m.A, m.f -> m.B, m.f -> m.A [case testGenericClassWithMembers] from typing import TypeVar, Generic T = TypeVar('T') class A(Generic[T]): def g(self, a: T) -> None: self.x = a def f(self) -> T: return self.x [out] -> m.A.f, m.A.g -> m.A -> , , , m.A, m.A.f, m.A.g [case testGenericClassInit] from typing import TypeVar, Generic T = TypeVar('T') class A(Generic[T]): def __init__(self, a: T) -> None: self.x = a class B: pass def f() -> None: a = A(B()) [out] -> m.f -> m.A.__init__ -> m.A, m.f -> m.f -> m.B, m.f -> , , m.A, m.A.__init__ [case testGenericMethod] from typing import TypeVar T = TypeVar('T') class A: def f(self, x: T) -> T: return x [out] -> m.A -> , m.A.f [case testGenericBaseClass] from typing import TypeVar, Generic T = TypeVar('T') class A(Generic[T]): pass class B(A[C]): pass class C: pass [out] -> -> m.A, m.B -> m.B -> m.B, m.C -> m.A [case testGenericBaseClass2] from typing import TypeVar, Generic T = TypeVar('T') class A(Generic[T]): pass class B(A[T]): pass [out] -> -> m.A, m.B -> m.B -> m.A, m.B [case testTypeVarBound] from typing import TypeVar, Tuple class A: pass class B: pass T = TypeVar('T', bound=Tuple[A, B]) def f(x: T) -> T: return x [out] -> , m.A -> , m.B -> , m.f [case testTypeVarBoundOperations] from typing import TypeVar, Tuple class A: def f(self) -> None: pass def __add__(self, other: int) -> int: pass T = TypeVar('T', bound=A) def f(x: T) -> None: x.f() x + 1 [out] -> m.f -> m.f -> , m.A -> , m.f mypy-0.560/test-data/unit/deps-statements.test0000644€tŠÔÚ€2›s®0000002223513215007206025562 0ustar jukkaDROPBOX\Domain Users00000000000000-- Test cases for generating fine-grained dependencies for statements. -- -- The dependencies are used for fined-grained incremental checking. [case testIfStmt] def f1() -> int: pass def f2() -> None: pass def f3() -> int: pass def f4() -> None: pass def f5() -> None: pass def g() -> None: if f1(): f2() elif f3(): f4() else: f5() [out] -> m.g -> m.g -> m.g -> m.g -> m.g [case testWhileStmt] def f1() -> int: pass def f2() -> None: pass def f3() -> None: pass def g() -> None: while f1(): f2() else: f3() [out] -> m.g -> m.g -> m.g [case testAssertStmt] def f1() -> int: pass def f2() -> str: pass def f3() -> int: pass def g() -> None: assert f1(), f2() assert f3() [out] -> m.g -> m.g -> m.g [case testRaiseStmt] def f1() -> BaseException: pass def f2() -> BaseException: pass def g1() -> None: raise f1() def g2() -> None: raise f1() from f2() [builtins fixtures/exception.pyi] [out] -> m.g1, m.g2 -> m.g2 [case testTryFinallyStmt] def f1() -> None: pass def f2() -> None: pass def g() -> None: try: f1() finally: f2() [out] -> m.g -> m.g [case testPrintStmt_python2] def f1(): # type: () -> int pass def f2(): # type: () -> int pass def g1(): # type: () -> None print f1() def g2(): # type: () -> None print f1(), f2() [out] -> m.g1, m.g2 -> m.g2 [case testPrintStmtWithFile_python2] class A: def write(self, s): # type: (str) -> None pass def f1(): # type: () -> A pass def f2(): # type: () -> int pass def g(): # type: () -> None print >>f1(), f2() [out] -> m.g -> , m.A, m.f1 -> m.g [case testExecStmt_python2] def f1(): pass def f2(): pass def f3(): pass def g1(): # type: () -> None exec f1() def g2(): # type: () -> None exec f1() in f2() def g3(): # type: () -> None exec f1() in f2(), f3() [out] -> m.g1, m.g2, m.g3 -> m.g2, m.g3 -> m.g3 [case testForStmt] from typing import Iterator class A: def __iter__(self) -> Iterator[int]: pass def f1() -> None: pass def f2() -> None: pass def g() -> None: a: A for x in a: f1() else: f2() [builtins fixtures/list.pyi] [out] -> m.g -> m.g -> m.A, m.g -> m.g -> m.g [case testTryExceptStmt] class A(BaseException): pass class B(BaseException): def f(self) -> None: pass def f1() -> None: pass def f2() -> None: pass def f3() -> None: pass def g() -> None: try: f1() except A: f2() except B as e: e.f() else: f3() [builtins fixtures/exception.pyi] [out] -> m.A, m.g -> m.g -> m.B, m.g -> m.g -> m.g -> m.g [case testTryExceptStmt2] class A(BaseException): pass class B(BaseException): def f(self) -> None: pass def f1() -> None: pass def f2() -> None: pass def g() -> None: try: f1() except (A, B): f2() [builtins fixtures/exception.pyi] [out] -> m.A, m.g -> m.B, m.g -> m.g -> m.g [case testWithStmt] from typing import Any class A: def __enter__(self) -> 'B': pass def __exit__(self, a, b, c) -> None: pass class B: def f(self) -> None: pass def g() -> None: a: A with a as x: x.f() [out] -> m.g -> m.g -> m.A, m.g -> m.g -> , m.A.__enter__, m.B [case testWithStmt2] from typing import Any class A: def __enter__(self) -> 'C': pass def __exit__(self, a, b, c) -> None: pass class B: def __enter__(self) -> 'D': pass def __exit__(self, a, b, c) -> None: pass class C: pass class D: pass def g() -> None: a: A b: B with a as x, b as y: pass [out] -> m.g -> m.g -> m.A, m.g -> m.g -> m.g -> m.B, m.g -> , m.A.__enter__, m.C -> , m.B.__enter__, m.D [case testWithStmtAnnotation] from typing import Any class A: def __enter__(self) -> Any: pass def __exit__(self, a, b, c) -> None: pass class B: pass def f(b: B) -> None: pass def g() -> None: a: A with a as x: # type: B f(x) [out] -> m.g -> m.g -> m.A, m.g -> , m.B, m.f, m.g -> m.g [case testForStmtAnnotation] class A: def __iter__(self): pass class B: def f(self) -> None: pass def g() -> None: a: A for x in a: # type: B x.f() [builtins fixtures/list.pyi] [out] -> m.g -> m.g -> m.A, m.g -> m.g -> m.B, m.g [case testMultipleAssignment] from typing import Iterator class A: def __iter__(self) -> Iterator[int]: pass def f() -> None: a: A x, y = a [out] -> m.f -> m.A, m.f [case testMultipleLvalues] class A: def f(self) -> None: self.x = 1 self.y = 1 def g() -> None: a: A a.x = a.y = 1 [out] -> m.A.f, m.g -> m.A.f, m.g -> m.A, m.g [case testNestedLvalues] class A: def f(self) -> None: self.x = 1 self.y = '' def g() -> None: a: A a.x, a.y = 1, '' [out] -> m.A.f, m.g -> m.A.f, m.g -> m.A, m.g [case testForAndSetItem] class A: def __setitem__(self, x: int, y: int) -> None: pass def f(): pass def g() -> None: a: A for a[0] in f(): pass [builtins fixtures/list.pyi] [out] -> m.g -> m.g -> m.A, m.g -> m.g [case testMultipleAssignmentAndFor] from typing import Iterator, Iterable class A: def f(self) -> None: self.x = 1 self.y = 1 class B: def __iter__(self) -> Iterator[int]: pass def f() -> Iterable[B]: pass def g() -> None: a: A for a.x, a.y in f(): pass [builtins fixtures/list.pyi] [out] -> m.A.f, m.g -> m.A.f, m.g -> m.A, m.g -> m.g -> m.g -> , m.B, m.f -> m.g [case testNestedSetItem] class A: def __setitem__(self, x: int, y: int) -> None: pass class B: def __setitem__(self, x: int, y: int) -> None: pass def f(): pass def g() -> None: a: A b: B a[0], b[0] = f() [out] -> m.g -> m.g -> m.A, m.g -> m.g -> m.g -> m.B, m.g -> m.g [case testOperatorAssignmentStmt] class A: def __add__(self, other: 'B') -> 'A': pass class B: pass def f() -> B: pass def g() -> None: a: A a += f() [out] -> m.g -> m.g -> , m.A, m.A.__add__, m.g -> , , m.A.__add__, m.B, m.f -> m.g [case testOperatorAssignmentStmtSetItem] class A: def __add__(self, other: 'B') -> 'A': pass class B: pass class C: def __getitem__(self, x: int) -> A: pass def __setitem__(self, x: int, y: A) -> None: pass def f() -> int: pass def g() -> None: b: B c: C c[f()] += b [out] -> m.g -> m.g -> , , , m.A, m.A.__add__, m.C.__getitem__, m.C.__setitem__ -> , m.A.__add__, m.B, m.g -> m.g -> m.g -> m.C, m.g -> m.g [case testYieldStmt] from typing import Iterator class A: pass def f1() -> A: pass def g() -> Iterator[A]: yield f1() [builtins fixtures/list.pyi] [out] -> , , m.A, m.f1, m.g -> m.g [case testDelStmt] class A: def f(self) -> None: self.x = 1 def f() -> A: pass def g() -> None: del f().x [out] -> m.A.f, m.g -> , m.A, m.f -> m.g [case testDelStmtWithIndexing] class A: def __delitem__(self, x: int) -> None: pass def f1() -> A: pass def f2() -> int: pass def g() -> None: del f1()[f2()] [out] -> m.g -- __getitem__ is redundant but harmless -> m.g -> , m.A, m.f1 -> m.g -> m.g [case testYieldFrom] from typing import Iterator class A: def __iter__(self) -> Iterator[int]: pass def f() -> Iterator[int]: yield from A() [out] -> m.f -> m.f -> m.A, m.f [case testFunctionDecorator] from typing import Callable def dec(f: Callable[[int], int]) -> Callable[[str], str]: pass def f() -> int: pass @dec def g(x: int) -> int: return f() [out] -> m -> m.g -> m [case testMethodDecorator] from typing import Callable, Any def dec(f: Callable[[Any, int], int]) -> Callable[[Any, str], str]: pass def f() -> int: pass class A: @dec def g(self, x: int) -> int: return f() [out] -> m -> m.A -> m -> m.A.g [case testNestedFunction] class A: pass def h() -> None: pass def f() -> None: def g(x: A) -> None: h() [out] -> , m.A, m.f -> m.f [case testPlatformCheck] import a import sys def f() -> int: if sys.platform == 'nonexistent': return a.g() else: return 1 [file a.py] [builtins fixtures/ops.pyi] [out] -> m -> m.f -> m, m.f mypy-0.560/test-data/unit/deps-types.test0000644€tŠÔÚ€2›s®0000000450213215007206024534 0ustar jukkaDROPBOX\Domain Users00000000000000-- Test cases for generating fine-grained dependencies between types. -- -- The dependencies are used for fined-grained incremental checking. [case testFilterOutBuiltInTypes] class A: pass def f(x: int, y: str, z: A) -> None: pass [out] -> , m.A, m.f [case testTupleType] from typing import Tuple class A: pass class B: pass def f(x: Tuple[A, B]) -> None: pass [out] -> , m.A, m.f -> , m.B, m.f [case testUnionType] from typing import Union class A: pass class B: pass def f() -> None: x: Union[int, A, B] [out] -> m.A, m.f -> m.B, m.f [case testCallableType] from typing import Callable class A: pass class B: pass def f() -> None: x: Callable[[int, A], None] y: Callable[[int, str], B] [out] -> m.A, m.f -> m.B, m.f [case testTypeType] from typing import Type class A: pass def f() -> None: x: Type[A] y: Type[int] [out] -> m.A, m.f [case testTypeTypeAttribute] from typing import Type class A: @staticmethod def f() -> None: pass def f(x: Type[A]) -> None: x.f() [builtins fixtures/staticmethod.pyi] [out] -> m, m.f -> , m.A, m.f [case testComplexNestedType] from typing import Union, Callable, Type class A: pass class B: pass class C: pass def f() -> None: x: Union[int, Callable[[Type[A]], B], C] [out] -> m.A, m.f -> m.B, m.f -> m.C, m.f [case testUnionTypeAttributeAccess] from typing import Union class A: def f(self) -> None: self.x = 0 class B: def f(self) -> None: self.x = '' def f(a: Union[A, B]) -> None: a.x a.f() [out] -> m.f -> m.A.f, m.f -> , m.A, m.f -> m.f -> m.B.f, m.f -> , m.B, m.f [case testTupleTypeAttributeAccess] from typing import Tuple class C(Tuple[int, str]): def f(self) -> None: pass def f(c: C) -> None: c.f() [builtins fixtures/tuple.pyi] [out] -> m.f -> , m.C, m.f [case testOverloaded] from typing import overload class A: pass class B: pass def g() -> None: pass @overload def f(x: A) -> A: pass @overload def f(x: B) -> B: pass def f(x): g() ff = f def h() -> None: f(A()) ff(A()) [out] -> m.h -> , , m.A, m.f, m.h -> , , m.B, m.f -> m, m.h -> m, m.h -> m.f mypy-0.560/test-data/unit/deps.test0000644€tŠÔÚ€2›s®0000001764413215007206023405 0ustar jukkaDROPBOX\Domain Users00000000000000-- Test cases for generating dependencies between ASTs nodes. -- -- The dependencies are used for fined-grained incremental checking. [case testCallFunction] def f() -> None: g() def g() -> None: pass [out] -> m.f [case testCallMethod] def f(a: A) -> None: a.g() class A: def g(self) -> None: pass [out] -> m.f -> , m.A, m.f [case testAccessAttribute] def f(a: A) -> None: a.x class A: def g(self) -> None: self.x = 1 [out] -> m.A.g, m.f -> , m.A, m.f [case testConstructInstance] def f() -> None: A() class A: pass [out] -> m.f -> m.A, m.f [case testAccessModuleAttribute] class A: pass x = A() def f() -> None: x [out] -> m -> , m, m.A -> m, m.f [case testAccessModuleAttribute2] import n def f() -> None: n.x [file n.py] x = 1 [out] -> m.f -> m, m.f [case testImport] import n [file n.py] x = 1 [out] -> m [case testCallImportedFunction] import n n.f() [file n.py] def f() -> None: pass [out] -> m -> m [case testImportModuleAs] import n as x x.f() [file n.py] def f() -> None: pass [out] -> m -> m [case testCallImportedFunctionInFunction] import n def g() -> None: n.f() [file n.py] def f() -> None: pass [out] -> m.g -> m, m.g [case testInheritanceSimple] class A: pass class B(A): pass [out] -> -> m, m.A, m.B -> m.B [case testInheritanceWithMethodAndAttribute] class A: pass class B(A): def f(self) -> None: self.x = 1 [out] -> -> m.B.f -> -> m, m.A, m.B -> m.B.f -> m.B [case testInheritanceWithMethodAndAttributeAndDeepHierarchy] class A: pass class B(A): pass class C(B): def f(self) -> None: self.x = 1 [out] -> , -> m.C.f -> -> m, m.A, m.B -> -> m.C.f -> -> m, m.B, m.C -> m.C.f -> m.C [case testInheritAttribute] import n class B(n.A): def f(self) -> None: a = 1 a = self.x [file n.py] class A: def g(self) -> None: self.x = 1 [out] -> m.B.f -> m.B -> -> m.B.f -> -> -> m, m.B -> m [case testInheritMethod] class A: def g(self) -> None: pass class B(A): def f(self) -> None: self.g() [out] -> -> m.B.f -> -> m, m.A, m.B -> m.B.f -> m.B [case testPackage] import a.b def f() -> None: a.b.g() [file a/__init__.py] [file a/b.py] def g() -> None: pass [out] -> m.f -> m, m.f -> m.f [case testClassInPackage] import a.b def f(x: a.b.A) -> None: x.g() x.y [file a/__init__.py] [file a/b.py] class A: def g(self) -> None: self.y = 1 [out] -> m.f -> m.f -> , m.f -> m [case testPackage__init__] import a def f() -> None: a.g() [file a/__init__.py] def g() -> None: pass [out] -> m.f -> m, m.f [case testClassInPackage__init__] import a def f(x: a.A) -> None: x.g() x.y [file a/__init__.py] class A: def g(self) -> None: self.y = 1 [out] -> m.f -> m.f -> , m.f -> m [case testConstructor] class A: def __init__(self, x: C) -> None: pass class C: pass def f() -> None: A(C()) [out] -> m.f -> m.A, m.f -> m.f -> , m.A.__init__, m.C, m.f [case testNonTrivialConstructor] class C: def __init__(self) -> None: self.x = 1 [out] -> m.C.__init__ -> m.C [case testImportFrom] from n import f def g() -> None: f() [file n.py] def f() -> None: pass [out] -> m, m.g [case testImportFromAs] from n import f as ff def g() -> None: ff() [file n.py] def f() -> None: pass [out] -> m, m.g [case testNestedClass] def f() -> None: b = A.B() b.f() class A: class B: def f(self) -> None: pass [out] -> m.f -> m.f -> m.A.B, m.f -> m.A, m.f [case testNestedClassAttribute] def f() -> None: b = A.B() b.x class A: class B: def f(self) -> None: self.x = 1 [out] -> m.f -> m.A.B.f, m.f -> m.A.B, m.f -> m.A, m.f [case testNestedClassInAnnotation] def f(x: A.B) -> None: pass class A: class B: pass [out] -> , m.A.B, m.f -> m.A [case testNestedClassInAnnotation2] def f(x: A.B) -> None: x.f() class A: class B: def f(self) -> None: pass [out] -> m.f -> , m.A.B, m.f -> m.A [case testDefaultArgValue] def f1(x: int) -> int: pass def f2() -> int: pass def g(x: int = f1(f2())) -> None: pass [out] -> m.g -> m.g [case testIsInstance] class A: def g(self) -> None: pass def f(x: object) -> None: if isinstance(x, A): x.g() [builtins fixtures/isinstancelist.pyi] [out] -> m.f -> m.A, m.f [case testUnreachableIsInstance] class A: x: int class B: y: int def f(x: A) -> None: if isinstance(x, B): x.y [builtins fixtures/isinstancelist.pyi] [out] -> , m.A, m.f -> m.B, m.f [case testAttributeWithClassType1] from n import A class B: def h(self, z: A) -> None: self.z = z [file n.py] class A: pass [out] -> m.B.h -> m.B -> , , m, m.B.h [case testAttributeWithClassType2] from m import A class B: def f(self) -> None: self.x = A() [file m.py] class A: pass [out] -> m.B.f -> m.B -> m.B.f -> , m, m.B.f [case testAttributeWithClassType3] from n import A, x class B: def g(self) -> None: self.x = x [file n.py] class A: pass x = A() [out] -> m.B.g -> m.B -> , m -> m, m.B.g [case testAttributeWithClassType4] from n import A class B: def g(self) -> None: self.x: A [file n.py] class A: pass [out] -> m.B.g -> m.B -> , m, m.B.g [case testClassBody] def f() -> int: pass def g() -> int: pass def h() -> int: pass class A: h() if f(): g() [out] -> m.A -> m -> m -> m [case testVariableInitializedInClass] from n import A class B: x = None # type: A [file n.py] class A: pass [out] -> m.B -> , m [case testVariableAnnotationInClass] from n import A class B: x: A def f(self) -> None: y = self.x [file n.py] class A: pass [out] -> m.B.f -> m.B -> , m [case testGlobalVariableInitialized] from n import A x = A() [file n.py] class A: pass [out] -> m -> m -> , m [case testGlobalVariableAnnotation] from n import A x: A [file n.py] class A: pass [out] -> m -> , m [case testProperty] class B: pass class A: @property def x(self) -> B: pass def f(a: A) -> None: b = a.x [builtins fixtures/property.pyi] [out] -> m, m.f -> , m.A, m.f -> , m.A.x, m.B [case testUnreachableAssignment] from typing import List, Tuple def f() -> None: pass class C: def __init__(self, x: int) -> None: if isinstance(x, int): self.y = 1 else: self.y = f() [builtins fixtures/isinstancelist.pyi] [out] -> m.C.__init__ -> m.C -> m.C.__init__ [case testPartialNoneTypeAttributeCrash1] class C: pass class A: x = None def f(self) -> None: self.x = C() [out] -> m.A.f -> m.A -> m.A.f -> , m.A.f, m.C [case testPartialNoneTypeAttributeCrash2] # flags: --strict-optional class C: pass class A: x = None def f(self) -> None: self.x = C() [out] -> m.A.f -> m.A -> m.A.f -> , m.A.f, m.C mypy-0.560/test-data/unit/diff.test0000644€tŠÔÚ€2›s®0000001611513215007206023352 0ustar jukkaDROPBOX\Domain Users00000000000000-- Test cases for taking a diff of two module ASTs/symbol tables. -- The diffs are used for fined-grained incremental checking. [case testChangeTypeOfModuleAttribute] x = 1 y = 1 [file next.py] x = '' y = 1 [out] __main__.x [case testChangeSignatureOfModuleFunction] def f(x: int) -> None: pass def g(y: str) -> None: pass [file next.py] def f(x: str) -> None: x = '' def g(y: str) -> None: y = '' [out] __main__.f [case testAddModuleAttribute] x = 1 [file next.py] x = 1 y = 1 [out] __main__.y [case testRemoveModuleAttribute] x = 1 y = 1 [file next.py] x = 1 [out] __main__.y -- -- Classes -- [case testChangeMethodSignature] class A: def f(self) -> None: pass def g(self) -> None: pass [file next.py] class A: def f(self, x: int) -> None: pass def g(self) -> None: pass [out] __main__.A.f [case testChangeAttributeType] class A: def f(self) -> None: self.x = 1 self.y = 1 [file next.py] class A: def f(self) -> None: self.x = 1 self.y = '' [out] __main__.A.y [case testAddAttribute] class A: pass [file next.py] class A: def f(self) -> None: self.x = 1 [out] __main__.A.f __main__.A.x [case testAddAttribute2] class A: def f(self) -> None: pass [file next.py] class A: def f(self) -> None: self.x = 1 [out] __main__.A.x [case testRemoveAttribute] class A: def f(self) -> None: self.x = 1 [file next.py] class A: pass [out] __main__.A.f __main__.A.x [case testAddMethod] class A: def f(self) -> None: pass [file next.py] class A: def f(self) -> None: pass def g(self) -> None: pass [out] __main__.A.g [case testRemoveMethod] class A: def f(self) -> None: pass def g(self) -> None: pass [file next.py] class A: def f(self) -> None: pass [out] __main__.A.g [case testAddImport] import nn [file next.py] import n import nn [file n.py] x = 1 [file nn.py] y = 1 [out] __main__.n [case testRemoveImport] import n [file next.py] [file n.py] x = 1 [out] __main__.n [case testChangeClassIntoFunction] class A: pass [file next.py] def A() -> None: pass [out] __main__.A [case testDeleteClass] class A: pass [file next.py] [out] __main__.A [case testAddBaseClass] class A: pass [file next.py] class B: pass class A(B): pass [out] __main__.A __main__.B [case testChangeBaseClass] class A: pass class B: pass class C(A): pass [file next.py] class A: pass class B: pass class C(B): pass [out] __main__.C [case testRemoveBaseClass] class A: pass class B(A): pass [file next.py] class A: pass class B: pass [out] __main__.B [case testRemoveClassFromMiddleOfMro] class A: pass class B(A): pass class C(B): pass [file next.py] class A: pass class B: pass class C(B): pass [out] __main__.B __main__.C [case testDifferenceInConstructor] class A: def __init__(self) -> None: pass [file next.py] class A: def __init__(self, x: int) -> None: pass [out] __main__.A.__init__ [case testChangeSignatureOfMethodInNestedClass] class A: class B: def f(self) -> int: pass [file next.py] class A: class B: def f(self) -> str: pass [out] __main__.A.B.f [case testChangeTypeOfAttributeInNestedClass] class A: class B: def f(self) -> None: self.x = 1 [file next.py] class A: class B: def f(self) -> None: self.x = '' [out] __main__.A.B.x [case testAddMethodToNestedClass] class A: class B: pass [file next.py] class A: class B: def f(self) -> str: pass [out] __main__.A.B.f [case testAddNestedClass] class A: pass [file next.py] class A: class B: def f(self) -> None: pass [out] __main__.A.B [case testRemoveNestedClass] class A: class B: def f(self) -> None: pass [file next.py] class A: pass [out] __main__.A.B [case testChangeNestedClassToMethod] class A: class B: pass [file next.py] class A: def B(self) -> None: pass [out] __main__.A.B [case testChangeNamedTupleAttribute] from typing import NamedTuple class A: x: str N = NamedTuple('N', [('x', int), ('y', str)]) M = NamedTuple('M', [('x', int), ('y', str)]) [file next.py] from typing import NamedTuple N = NamedTuple('N', [('x', int), ('y', int)]) M = NamedTuple('M', [('x', int), ('y', str)]) [out] __main__.A __main__.N.__init__ __main__.N._asdict __main__.N._make __main__.N._replace __main__.N.y [case testSimpleDecoratedFunction] from a import dec @dec def f() -> None: pass @dec def g() -> None: pass [file next.py] from a import dec @dec def f(x: int) -> None: pass @dec def g() -> None: pass [file a.py] from typing import TypeVar T = TypeVar('T') def dec(f: T) -> T: return f [out] __main__.f [case testSimpleDecoratedMethod] from a import dec class A: @dec def f(self) -> None: self.g() @dec def g(self) -> None: pass [file next.py] from a import dec class A: @dec def f(self, x: int) -> None: self.g() @dec def g(self) -> None: pass [file a.py] from typing import TypeVar T = TypeVar('T') def dec(f: T) -> T: return f [out] __main__.A.f [case testTypeVarBound] from typing import TypeVar T = TypeVar('T') S = TypeVar('S') [file next.py] from typing import TypeVar T = TypeVar('T', bound=int) S = TypeVar('S') [out] __main__.T [case testTypeVarVariance] from typing import TypeVar A = TypeVar('A', covariant=True) B = TypeVar('B', covariant=True) C = TypeVar('C', covariant=True) [file next.py] from typing import TypeVar A = TypeVar('A', covariant=True) B = TypeVar('B', contravariant=True) C = TypeVar('C') [out] __main__.B __main__.C [case testTypeVarValues] from typing import TypeVar A = TypeVar('A', int, str) B = TypeVar('B', int, str) C = TypeVar('C', int, str) [file next.py] from typing import TypeVar A = TypeVar('A', int, str) B = TypeVar('B', int, str, object) C = TypeVar('C') [out] __main__.B __main__.C [case testGenericFunction] from typing import TypeVar T = TypeVar('T') S = TypeVar('S') def f(x: T) -> T: pass def g(x: S) -> S: pass [file next.py] from typing import TypeVar T = TypeVar('T', int, str) S = TypeVar('S') def f(x: T) -> T: pass def g(x: S) -> S: pass [out] __main__.T __main__.f [case testGenericTypes] from typing import List x: List[int] y: List[int] [file next.py] from typing import List x: List[int] y: List[str] [builtins fixtures/list.pyi] [out] __main__.y [case testTypeAliasOfList] from typing import List X = List[int] Y = List[int] [file next.py] from typing import List X = List[str] Y = List[int] [builtins fixtures/list.pyi] [out] __main__.X [case testTypeAliasOfCallable] from typing import Callable A = Callable[[int], str] B = Callable[[int], str] C = Callable[[int], str] [file next.py] from typing import Callable A = Callable[[int], str] B = Callable[[], str] C = Callable[[int], int] [out] __main__.B __main__.C [case testGenericTypeAlias] from typing import Callable, TypeVar T = TypeVar('T') A = Callable[[T], T] B = Callable[[T], T] [file next.py] from typing import Callable, TypeVar T = TypeVar('T') S = TypeVar('S') A = Callable[[T], T] B = Callable[[T], S] [out] __main__.B __main__.S [case testDifferentListTypes] from typing import List A = List B = list C = List [file next.py] from typing import List A = List B = list C = list [builtins fixtures/list.pyi] [out] __main__.C mypy-0.560/test-data/unit/fine-grained-blockers.test0000644€tŠÔÚ€2›s®0000001443413215007206026576 0ustar jukkaDROPBOX\Domain Users00000000000000-- Test cases for fine-grained incremental mode and blocking errors -- -- The comments in fine-grained.test explain how these tests work. -- TODO: -- - blocking error while other existing errors as well (that get preserved) -- - differences in other modules + blocking error [case testParseError] import a a.f() [file a.py] def f() -> None: pass [file a.py.2] def f(x: int) -> [file a.py.3] def f(x: int) -> None: pass [file a.py.4] def f() -> None: pass [out] == a.py:1: error: invalid syntax == main:2: error: Too few arguments for "f" == [case testParseErrorMultipleTimes] import a a.f() [file a.py] def f() -> None: pass [file a.py.2] def f(x: int) -> [file a.py.3] def f(x: int ) -> None [file a.py.4] def f(x: int) -> None: pass [out] == a.py:1: error: invalid syntax == a.py:2: error: invalid syntax == main:2: error: Too few arguments for "f" [case testSemanticAnalysisBlockingError] import a a.f() [file a.py] def f() -> None: pass [file a.py.2] def f() -> None: pass break [file a.py.3] def f(x: int) -> None: pass [out] == a.py:2: error: 'break' outside loop == main:2: error: Too few arguments for "f" [case testBlockingErrorWithPreviousError] import a import b a.f(1) def g() -> None: b.f(1) [file a.py] def f() -> None: pass [file b.py] def f() -> None: pass [file a.py.2] def f() -> None [file a.py.3] def f() -> None: pass [out] main:3: error: Too many arguments for "f" main:5: error: Too many arguments for "f" == a.py:1: error: invalid syntax == main:3: error: Too many arguments for "f" main:5: error: Too many arguments for "f" [case testUpdateClassReferenceAcrossBlockingError] import a c: a.C def f() -> None: c.f() [file a.py] class C: def f(self) -> None: pass [file a.py.2] error error [file a.py.3] class C: def f(self, x: int) -> None: pass [out] == a.py:1: error: invalid syntax == main:5: error: Too few arguments for "f" of "C" [case testAddFileWithBlockingError] import a a.f(1) [file a.py.2] x x [file a.py.3] def f() -> None: pass [out] main:1: error: Cannot find module named 'a' main:1: note: (Perhaps setting MYPYPATH or using the "--ignore-missing-imports" flag would help) == a.py:1: error: invalid syntax == main:2: error: Too many arguments for "f" [case testModifyTwoFilesOneWithBlockingError1] import a [file a.py] import b def f() -> None: pass b.g() [file b.py] import a a.f() def g() -> None: pass [file a.py.2] import b # Dummy edit def f() -> None: pass b.g() [file b.py.2] import a a # Syntax error a.f() def g() -> None: pass [file b.py.3] import a a.f() def g() -> None: pass [out] == b.py:1: error: invalid syntax == [case testModifyTwoFilesOneWithBlockingError2] import a [file a.py] import b def f() -> None: pass b.g() [file b.py] import a a.f() def g() -> None: pass [file a.py.2] import b b def f() -> None: pass b.g() [file b.py.2] import a # Dummy edit a.f() def g() -> None: pass [file a.py.3] import b def f() -> None: pass b.g() [out] == a.py:1: error: invalid syntax == [case testBlockingErrorRemainsUnfixed] import a [file a.py] import b b.f() [file b.py] def f() -> None: pass [file a.py.2] x x [file b.py.3] def f(x: int) -> None: pass [file a.py.4] import b b.f() [out] == a.py:1: error: invalid syntax == a.py:1: error: invalid syntax == a.py:2: error: Too few arguments for "f" [case testModifyTwoFilesIntroduceTwoBlockingErrors] import a [file a.py] import b def f() -> None: pass b.g() [file b.py] import a a.f() def g() -> None: pass [file a.py.2] import b b def f() -> None: pass b.g() [file b.py.2] import a a a.f() def g() -> None: pass [file a.py.3] import b b def f() -> None: pass b.g() [file b.py.3] import a a a.f() def g() -> None: pass [file a.py.4] import b def f() -> None: pass b.g(1) [file b.py.4] import a def g() -> None: pass a.f(1) [out] == a.py:1: error: invalid syntax == a.py:1: error: invalid syntax == b.py:3: error: Too many arguments for "f" a.py:3: error: Too many arguments for "g" [case testDeleteFileWithBlockingError] import a import b [file a.py] def f() -> None: pass [file b.py] import a a.f() [file a.py.2] x x [delete a.py.3] [out] == a.py:1: error: invalid syntax == main:1: error: Cannot find module named 'a' main:1: note: (Perhaps setting MYPYPATH or using the "--ignore-missing-imports" flag would help) b.py:1: error: Cannot find module named 'a' -- TODO: Remove redundant errors main:1: error: Cannot find module named 'a' b.py:1: error: Cannot find module named 'a' [case testModifyFileWhileBlockingErrorElsewhere] import a import b [file a.py] [file b.py] import a [file a.py.2] x x [file b.py.3] import a a.f() 1() [file a.py.4] [builtins fixtures/module.pyi] [out] == a.py:1: error: invalid syntax == a.py:1: error: invalid syntax == b.py:2: error: Module has no attribute "f" b.py:3: error: "int" not callable [case testImportBringsAnotherFileWithBlockingError1] import a [file a.py] [file a.py.2] import blocker 1() [file a.py.3] 1() def f() -> None: pass [out] == /test-data/unit/lib-stub/blocker.pyi:2: error: invalid syntax == a.py:1: error: "int" not callable [case testImportBringsAnotherFileWithSemanticAnalysisBlockingError] import a [file a.py] [file a.py.2] import blocker2 1() [file a.py.3] 1() [out] == /test-data/unit/lib-stub/blocker2.pyi:2: error: 'continue' outside loop == a.py:1: error: "int" not callable [case testFixingBlockingErrorTriggersDeletion1] import a def g(x: a.A) -> None: x.f() [file a.py] class A: def f(self) -> None: pass [delete a.py.2] [file a.py.3] class A: pass [builtins fixtures/module.pyi] [out] == main:1: error: Cannot find module named 'a' main:1: note: (Perhaps setting MYPYPATH or using the "--ignore-missing-imports" flag would help) == main:4: error: "A" has no attribute "f" [case testFixingBlockingErrorTriggersDeletion2] from a import A def g(x: A) -> None: x.f() [file a.py] class A: def f(self) -> None: pass [delete a.py.2] [file a.py.3] [builtins fixtures/module.pyi] [out] == main:1: error: Cannot find module named 'a' main:1: note: (Perhaps setting MYPYPATH or using the "--ignore-missing-imports" flag would help) == main:1: error: Module 'a' has no attribute 'A' [case testFixingBlockingErrorBringsInAnotherModuleWithBlocker] import a [file a.py] [file a.py.2] x y [file a.py.3] import blocker 1() [file a.py.4] import sys 1() [out] == a.py:1: error: invalid syntax == /test-data/unit/lib-stub/blocker.pyi:2: error: invalid syntax == a.py:2: error: "int" not callable mypy-0.560/test-data/unit/fine-grained-cycles.test0000644€tŠÔÚ€2›s®0000000565013215007206026254 0ustar jukkaDROPBOX\Domain Users00000000000000-- Test cases for fine-grained incremental checking and import cycles -- -- The comment at the top of fine-grained.test explains how these tests -- work. [case testFunctionSelfReferenceThroughImportCycle] import a [file a.py] from b import f [file b.py] import a def f() -> None: a.f() [file b.py.2] import a def f(x: int) -> None: a.f() [out] == b.py:4: error: Too few arguments for "f" [case testClassSelfReferenceThroughImportCycle] import a [file a.py] from b import A [file b.py] import a class A: def g(self) -> None: pass def f() -> None: a.A().g() [file b.py.2] import a class A: def g(self, x: int) -> None: pass def f() -> None: a.A().g() [out] == b.py:7: error: Too few arguments for "g" of "A" [case testAnnotationSelfReferenceThroughImportCycle] import a [file a.py] from b import A [file b.py] import a x: a.A class A: def g(self) -> None: pass def f() -> None: x.g() [file b.py.2] import a x: a.A class A: def g(self, x: int) -> None: pass def f() -> None: x.g() [out] == b.py:9: error: Too few arguments for "g" of "A" [case testModuleSelfReferenceThroughImportCycle] import a [file a.py] import b [file b.py] import a def f() -> None: a.b.f() [file b.py.2] import a def f(x: int) -> None: a.b.f() [out] == b.py:4: error: Too few arguments for "f" [case testVariableSelfReferenceThroughImportCycle] import a [file a.py] from b import x [file b.py] import a x: int def f() -> None: a.x = 1 [file b.py.2] import a x: str def f() -> None: a.x = 1 [out] == b.py:6: error: Incompatible types in assignment (expression has type "int", variable has type "str") [case testReferenceToTypeThroughCycle] import a [file a.py] from b import C def f() -> C: pass [file b.py] import a class C: def g(self) -> None: pass def h() -> None: c = a.f() c.g() [file b.py.2] import a class C: def g(self, x: int) -> None: pass def h() -> None: c = a.f() c.g() [out] == b.py:8: error: Too few arguments for "g" of "C" [case testReferenceToTypeThroughCycleAndDeleteType] import a [file a.py] from b import C def f() -> C: pass [file b.py] import a class C: def g(self) -> None: pass def h() -> None: c = a.f() c.g() [file b.py.2] import a def h() -> None: c = a.f() c.g() [out] == a.py:1: error: Module 'b' has no attribute 'C' [case testReferenceToTypeThroughCycleAndReplaceWithFunction] import a [file a.py] from b import C def f() -> C: pass [file b.py] import a class C: def g(self) -> None: pass def h() -> None: c = a.f() c.g() [file b.py.2] import a def C() -> int: pass def h() -> None: c = a.f() c.g() [out] == a.py:3: error: Invalid type "b.C" -- TODO: More import cycle: -- -- * "from x import y" through cycle -- * "from x import *" through cycle -- * "Cls.module" though cycle -- * TypeVar -- * type alias -- * all kinds of reference deleted -- * all kinds of reference rebound to different kind -- -- etc. mypy-0.560/test-data/unit/fine-grained-modules.test0000644€tŠÔÚ€2›s®0000002521613215007206026442 0ustar jukkaDROPBOX\Domain Users00000000000000-- Test cases for fine-grained incremental mode related to modules -- -- Covers adding and deleting modules, changes to multiple modules, and -- changes to import graph. -- -- The comments in fine-grained.test explain how these tests work. -- Add file -- -------- [case testAddFile] import b [file b.py] [file a.py.2] def f() -> None: pass [file b.py.3] import a a.f(1) [out] == == b.py:2: error: Too many arguments for "f" [case testAddFileWithErrors] import b [file b.py] [file a.py.2] def f() -> str: return 1 [file b.py.3] import a a.f(1) [file a.py.4] def f(x: int) -> None: pass [out] == a.py:2: error: Incompatible return value type (got "int", expected "str") == b.py:2: error: Too many arguments for "f" a.py:2: error: Incompatible return value type (got "int", expected "str") == [case testAddFileFixesError] import b [file b.py] [file b.py.2] from a import f f() [file a.py.3] def f() -> None: pass [out] == b.py:1: error: Cannot find module named 'a' b.py:1: note: (Perhaps setting MYPYPATH or using the "--ignore-missing-imports" flag would help) == [case testAddFileFixesAndGeneratesError] import b [file b.py] [file b.py.2] from a import f [file b.py.3] from a import f f(1) [file a.py.4] def f() -> None: pass [out] == b.py:1: error: Cannot find module named 'a' b.py:1: note: (Perhaps setting MYPYPATH or using the "--ignore-missing-imports" flag would help) == b.py:1: error: Cannot find module named 'a' b.py:1: note: (Perhaps setting MYPYPATH or using the "--ignore-missing-imports" flag would help) == b.py:2: error: Too many arguments for "f" [case testAddFilePreservesError1] import b [file b.py] [file b.py.2] from a import f f(1) [file x.py.3] # unrelated change [out] == b.py:1: error: Cannot find module named 'a' b.py:1: note: (Perhaps setting MYPYPATH or using the "--ignore-missing-imports" flag would help) == b.py:1: error: Cannot find module named 'a' b.py:1: note: (Perhaps setting MYPYPATH or using the "--ignore-missing-imports" flag would help) [case testAddFilePreservesError2] import b [file b.py] f() [file a.py.2] [out] b.py:1: error: Name 'f' is not defined == b.py:1: error: Name 'f' is not defined [case testImportLineNumber1] import b [file b.py] [file b.py.2] x = 1 import a [out] == b.py:2: error: Cannot find module named 'a' b.py:2: note: (Perhaps setting MYPYPATH or using the "--ignore-missing-imports" flag would help) [case testImportLineNumber2] import b [file b.py] [file b.py.2] x = 1 import a from c import f [file x.py.3] [out] == b.py:2: error: Cannot find module named 'a' b.py:2: note: (Perhaps setting MYPYPATH or using the "--ignore-missing-imports" flag would help) b.py:3: error: Cannot find module named 'c' == b.py:2: error: Cannot find module named 'a' b.py:2: note: (Perhaps setting MYPYPATH or using the "--ignore-missing-imports" flag would help) b.py:3: error: Cannot find module named 'c' -- Delete file -- ----------- [case testDeleteBasic] import a [file a.py] import b [file b.py] def f() -> None: pass [file a.py.2] [delete b.py.3] [out] == == [case testDeletionTriggersImportFrom] import a [file a.py] from b import f def g() -> None: f() [file b.py] def f() -> None: pass [delete b.py.2] [file b.py.3] def f(x: int) -> None: pass [out] == a.py:1: error: Cannot find module named 'b' a.py:1: note: (Perhaps setting MYPYPATH or using the "--ignore-missing-imports" flag would help) == a.py:4: error: Too few arguments for "f" [case testDeletionTriggersImport] import a [file a.py] def f() -> None: pass [delete a.py.2] [file a.py.3] def f() -> None: pass [out] == main:1: error: Cannot find module named 'a' main:1: note: (Perhaps setting MYPYPATH or using the "--ignore-missing-imports" flag would help) == [case testDeletionOfSubmoduleTriggersImportFrom1] from p import q [file p/__init__.py] [file p/q.py] [delete p/q.py.2] [file p/q.py.3] [out] == main:1: error: Cannot find module named 'p.q' -- TODO: The following messages are different compared to non-incremental mode main:1: note: (Perhaps setting MYPYPATH or using the "--ignore-missing-imports" flag would help) main:1: error: Module 'p' has no attribute 'q' == [case testDeletionOfSubmoduleTriggersImportFrom2] from p.q import f f() [file p/__init__.py] [file p/q.py] def f() -> None: pass [delete p/q.py.2] [file p/q.py.3] def f(x: int) -> None: pass [out] == main:1: error: Cannot find module named 'p.q' main:1: note: (Perhaps setting MYPYPATH or using the "--ignore-missing-imports" flag would help) == main:2: error: Too few arguments for "f" [case testDeletionOfSubmoduleTriggersImport] import p.q [file p/__init__.py] [file p/q.py] def f() -> None: pass [delete p/q.py.2] [file p/q.py.3] def f(x: int) -> None: pass [out] == main:1: error: Cannot find module named 'p.q' main:1: note: (Perhaps setting MYPYPATH or using the "--ignore-missing-imports" flag would help) == [case testDeleteModuleWithError] import a [file a.py] def f() -> int: return 1 [file a.py.2] def f() -> str: return 1 [delete a.py.3] def f() -> str: return 1 [out] == a.py:2: error: Incompatible return value type (got "int", expected "str") == main:1: error: Cannot find module named 'a' main:1: note: (Perhaps setting MYPYPATH or using the "--ignore-missing-imports" flag would help) [case testDeleteModuleWithErrorInsidePackage] import a.b [file a/__init__.py] [file a/b.py] def f() -> int: return '' [delete a/b.py.2] def f() -> str: return 1 [out] a/b.py:2: error: Incompatible return value type (got "str", expected "int") == main:1: error: Cannot find module named 'a.b' main:1: note: (Perhaps setting MYPYPATH or using the "--ignore-missing-imports" flag would help) [case testModifyTwoFilesNoError1] import a [file a.py] import b b.f() [file b.py] def f() -> None: pass [file a.py.2] import b b.f(1) [file b.py.2] def f(x: int) -> None: pass [out] == [case testModifyTwoFilesNoError2] import a [file a.py] from b import g def f() -> None: pass [file b.py] import a def g() -> None: pass a.f() [file a.py.2] from b import g def f(x: int) -> None: pass [file b.py.2] import a def g() -> None: pass a.f(1) [out] == [case testModifyTwoFilesErrorsElsewhere] import a import b a.f() b.g(1) [file a.py] def f() -> None: pass [file b.py] def g(x: int) -> None: pass [file a.py.2] def f(x: int) -> None: pass [file b.py.2] def g() -> None: pass [out] == main:3: error: Too few arguments for "f" main:4: error: Too many arguments for "g" [case testModifyTwoFilesErrorsInBoth] import a [file a.py] import b def f() -> None: pass b.g(1) [file b.py] import a def g(x: int) -> None: pass a.f() [file a.py.2] import b def f(x: int) -> None: pass b.g(1) [file b.py.2] import a def g() -> None: pass a.f() [out] == b.py:3: error: Too few arguments for "f" a.py:3: error: Too many arguments for "g" [case testModifyTwoFilesFixErrorsInBoth] import a [file a.py] import b def f(x: int) -> None: pass b.g(1) [file b.py] import a def g() -> None: pass a.f() [file a.py.2] import b def f() -> None: pass b.g(1) [file b.py.2] import a def g(x: int) -> None: pass a.f() [out] b.py:3: error: Too few arguments for "f" a.py:3: error: Too many arguments for "g" == [case testAddTwoFilesNoError] import a [file a.py] import b import c b.f() c.g() [file b.py.2] import c def f() -> None: pass c.g() [file c.py.2] import b def g() -> None: pass b.f() [out] a.py:1: error: Cannot find module named 'b' a.py:1: note: (Perhaps setting MYPYPATH or using the "--ignore-missing-imports" flag would help) a.py:2: error: Cannot find module named 'c' == [case testAddTwoFilesErrorsInBoth] import a [file a.py] import b import c b.f() c.g() [file b.py.2] import c def f() -> None: pass c.g(1) [file c.py.2] import b def g() -> None: pass b.f(1) [out] a.py:1: error: Cannot find module named 'b' a.py:1: note: (Perhaps setting MYPYPATH or using the "--ignore-missing-imports" flag would help) a.py:2: error: Cannot find module named 'c' == b.py:3: error: Too many arguments for "g" c.py:3: error: Too many arguments for "f" [case testAddTwoFilesErrorsElsewhere] import a import b a.f(1) b.g(1) [file a.py.2] def f() -> None: pass [file b.py.2] def g() -> None: pass [out] main:1: error: Cannot find module named 'a' main:1: note: (Perhaps setting MYPYPATH or using the "--ignore-missing-imports" flag would help) main:2: error: Cannot find module named 'b' == main:3: error: Too many arguments for "f" main:4: error: Too many arguments for "g" [case testDeleteTwoFilesErrorsElsewhere] import a import b a.f() b.g() [file a.py] def f() -> None: pass [file b.py] def g() -> None: pass [delete a.py.2] [delete b.py.2] [out] == main:1: error: Cannot find module named 'a' main:1: note: (Perhaps setting MYPYPATH or using the "--ignore-missing-imports" flag would help) -- TODO: Remove redundant error message main:1: error: Cannot find module named 'b' main:2: error: Cannot find module named 'b' [case testDeleteTwoFilesNoErrors] import a [file a.py] import b import c b.f() c.g() [file b.py] def f() -> None: pass [file c.py] def g() -> None: pass [file a.py.2] [delete b.py.3] [delete c.py.3] [out] == == [case testDeleteTwoFilesFixErrors] import a import b a.f() b.g() [file a.py] import b def f() -> None: pass b.g(1) [file b.py] import a def g() -> None: pass a.f(1) [delete a.py.2] [delete b.py.2] [out] b.py:3: error: Too many arguments for "f" a.py:3: error: Too many arguments for "g" == main:1: error: Cannot find module named 'a' main:1: note: (Perhaps setting MYPYPATH or using the "--ignore-missing-imports" flag would help) -- TODO: Remove redundant error message main:1: error: Cannot find module named 'b' main:2: error: Cannot find module named 'b' [case testAddFileWhichImportsLibModule] import a a.x = 0 [file a.py.2] import sys x = sys.platform [out] main:1: error: Cannot find module named 'a' main:1: note: (Perhaps setting MYPYPATH or using the "--ignore-missing-imports" flag would help) == main:2: error: Incompatible types in assignment (expression has type "int", variable has type "str") [case testAddFileWhichImportsLibModuleWithErrors] import a a.x = 0 [file a.py.2] import broken x = broken.x z [out] main:1: error: Cannot find module named 'a' main:1: note: (Perhaps setting MYPYPATH or using the "--ignore-missing-imports" flag would help) == a.py:3: error: Name 'z' is not defined /test-data/unit/lib-stub/broken.pyi:2: error: Name 'y' is not defined [case testRenameModule] import a [file a.py] import b b.f() [file b.py] def f() -> None: pass [file a.py.2] import c c.f() [file c.py.2] def f() -> None: pass [file a.py.3] import c c.f(1) [out] == == a.py:2: error: Too many arguments for "f" -- TODO: -- - add one file which imports another new file, blocking error in new file -- - arbitrary blocking errors -- - packages -- - add two files that form a package -- - delete two files that form a package -- - order of processing makes a difference -- - mix of modify, add and delete in one iteration mypy-0.560/test-data/unit/fine-grained.test0000644€tŠÔÚ€2›s®0000004376713215007206025007 0ustar jukkaDROPBOX\Domain Users00000000000000-- Test cases for fine-grained incremental checking -- -- Test cases may define multiple versions of a file -- (e.g. m.py, m.py.2). There is always an initial batch -- pass that processes all files present initially, followed -- by one or more fine-grained incremental passes that use -- alternative versions of files, if available. If a file -- just has a single .py version, it is used for all passes. -- TODO: what if version for some passes but not all -- Output is laid out like this: -- -- [out] -- -- == -- [case testReprocessFunction] import m def g() -> int: return m.f() [file m.py] def f() -> int: pass [file m.py.2] def f() -> str: pass [out] == main:3: error: Incompatible return value type (got "str", expected "int") [case testReprocessTopLevel] import m m.f(1) def g() -> None: pass [file m.py] def f(x: int) -> None: pass [file m.py.2] def f(x: str) -> None: pass [out] == main:2: error: Argument 1 to "f" has incompatible type "int"; expected "str" [case testReprocessMethod] import m class B: def f(self, a: m.A) -> None: a.g() # E [file m.py] class A: def g(self) -> None: pass [file m.py.2] class A: def g(self, a: A) -> None: pass [out] == main:4: error: Too few arguments for "g" of "A" [case testFunctionMissingModuleAttribute] import m def h() -> None: m.f(1) [file m.py] def f(x: int) -> None: pass [file m.py.2] def g(x: str) -> None: pass [builtins fixtures/fine_grained.pyi] [out] == main:3: error: Module has no attribute "f" [case testTopLevelMissingModuleAttribute] import m m.f(1) def g() -> None: pass [file m.py] def f(x: int) -> None: pass [file m.py.2] def g(x: int) -> None: pass [builtins fixtures/fine_grained.pyi] [out] == main:2: error: Module has no attribute "f" [case testClassChangedIntoFunction] import m def f(a: m.A) -> None: pass [file m.py] class A: pass [file m.py.2] def A() -> None: pass [out] == main:2: error: Invalid type "m.A" [case testClassChangedIntoFunction2] import m class B: def f(self, a: m.A) -> None: pass [file m.py] class A: pass [file m.py.2] def A() -> None: pass [out] == main:3: error: Invalid type "m.A" [case testAttributeTypeChanged] import m def f(a: m.A) -> int: return a.x [file m.py] class A: def f(self) -> None: self.x = 1 [file m.py.2] class A: def f(self) -> None: self.x = 'x' [out] == main:3: error: Incompatible return value type (got "str", expected "int") [case testAttributeRemoved] import m def f(a: m.A) -> int: return a.x [file m.py] class A: def f(self) -> None: self.x = 1 [file m.py.2] class A: def f(self) -> None: pass [out] == main:3: error: "A" has no attribute "x" [case testVariableTypeBecomesInvalid] import m def f() -> None: a = None # type: m.A [file m.py] class A: pass [file m.py.2] [out] == main:3: error: Name 'm.A' is not defined [case testTwoIncrementalSteps] import m import n [file m.py] def f() -> None: pass [file n.py] import m def g() -> None: m.f() # E [file m.py.2] import n def f(x: int) -> None: n.g() # E [file n.py.3] import m def g(a: str) -> None: m.f('') # E [out] == n.py:3: error: Too few arguments for "f" == n.py:3: error: Argument 1 to "f" has incompatible type "str"; expected "int" m.py:3: error: Too few arguments for "g" [case testTwoRounds] import m def h(a: m.A) -> int: return a.x [file m.py] import n class A: def g(self, b: n.B) -> None: self.x = b.f() [file n.py] class B: def f(self) -> int: pass [file n.py.2] class B: def f(self) -> str: pass [out] == main:3: error: Incompatible return value type (got "str", expected "int") [case testFixTypeError] import m def f(a: m.A) -> None: a.f(a) [file m.py] class A: def f(self, a: 'A') -> None: pass [file m.py.2] class A: def f(self) -> None: pass [file m.py.3] class A: def f(self, a: 'A') -> None: pass [out] == main:3: error: Too many arguments for "f" of "A" == [case testFixTypeError2] import m def f(a: m.A) -> None: a.f() [file m.py] class A: def f(self) -> None: pass [file m.py.2] class A: def g(self) -> None: pass [file m.py.3] class A: def f(self) -> None: pass [out] == main:3: error: "A" has no attribute "f" == [case testFixSemanticAnalysisError] import m def f() -> None: m.A() [file m.py] class A: pass [file m.py.2] class B: pass [file m.py.3] class A: pass [builtins fixtures/fine_grained.pyi] [out] == main:3: error: Module has no attribute "A" == [case testContinueToReportTypeCheckError] import m def f(a: m.A) -> None: a.f() def g(a: m.A) -> None: a.g() [file m.py] class A: def f(self) -> None: pass def g(self) -> None: pass [file m.py.2] class A: pass [file m.py.3] class A: def f(self) -> None: pass [out] == main:3: error: "A" has no attribute "f" main:5: error: "A" has no attribute "g" == main:5: error: "A" has no attribute "g" [case testContinueToReportSemanticAnalysisError] import m def f() -> None: m.A() def g() -> None: m.B() [file m.py] class A: pass class B: pass [file m.py.2] [file m.py.3] class A: pass [builtins fixtures/fine_grained.pyi] [out] == main:3: error: Module has no attribute "A" main:5: error: Module has no attribute "B" == main:5: error: Module has no attribute "B" [case testContinueToReportErrorAtTopLevel] import n import m m.A().f() [file n.py] import m m.A().g() [file m.py] class A: def f(self) -> None: pass def g(self) -> None: pass [file m.py.2] class A: pass [file m.py.3] class A: def f(self) -> None: pass [out] == main:3: error: "A" has no attribute "f" n.py:2: error: "A" has no attribute "g" == n.py:2: error: "A" has no attribute "g" [case testContinueToReportErrorInMethod] import m class C: def f(self, a: m.A) -> None: a.f() def g(self, a: m.A) -> None: a.g() [file m.py] class A: def f(self) -> None: pass def g(self) -> None: pass [file m.py.2] class A: pass [file m.py.3] class A: def f(self) -> None: pass [out] == main:4: error: "A" has no attribute "f" main:6: error: "A" has no attribute "g" == main:6: error: "A" has no attribute "g" [case testInitialBatchGeneratedError] import m def g() -> None: m.f() def h() -> None: m.g() [file m.py] def f(x: object) -> None: pass [file m.py.2] def f() -> None: pass [file m.py.3] def f() -> None: pass def g() -> None: pass [builtins fixtures/fine_grained.pyi] [out] main:3: error: Too few arguments for "f" main:5: error: Module has no attribute "g" == main:5: error: Module has no attribute "g" == [case testKeepReportingErrorIfNoChanges] import m def h() -> None: m.g() [file m.py] [file m.py.2] [builtins fixtures/fine_grained.pyi] [out] main:3: error: Module has no attribute "g" == main:3: error: Module has no attribute "g" [case testFixErrorAndReintroduce] import m def h() -> None: m.g() [file m.py] [file m.py.2] def g() -> None: pass [file m.py.3] [builtins fixtures/fine_grained.pyi] [out] main:3: error: Module has no attribute "g" == == main:3: error: Module has no attribute "g" [case testAddBaseClassMethodCausingInvalidOverride] import m class B(m.A): def f(self) -> str: pass [file m.py] class A: pass [file m.py.2] class A: def f(self) -> int: pass [out] == main:3: error: Return type of "f" incompatible with supertype "A" [case testModifyBaseClassMethodCausingInvalidOverride] import m class B(m.A): def f(self) -> str: pass [file m.py] class A: def f(self) -> str: pass [file m.py.2] class A: def f(self) -> int: pass [out] == main:3: error: Return type of "f" incompatible with supertype "A" [case testAddBaseClassAttributeCausingErrorInSubclass] import m class B(m.A): def a(self) -> None: x = 1 x = self.x def f(self) -> None: self.x = 1 def z(self) -> None: x = 1 x = self.x [file m.py] class A: pass [file m.py.2] class A: def g(self) -> None: self.x = 'a' [out] == main:5: error: Incompatible types in assignment (expression has type "str", variable has type "int") main:8: error: Incompatible types in assignment (expression has type "int", variable has type "str") main:12: error: Incompatible types in assignment (expression has type "str", variable has type "int") [case testChangeBaseClassAttributeType] import m class B(m.A): def f(sel) -> None: sel.x = 1 [file m.py] class A: def g(self) -> None: self.x = 1 [file m.py.2] class A: def g(self) -> None: self.x = 'a' [out] == main:4: error: Incompatible types in assignment (expression has type "int", variable has type "str") [case testRemoveAttributeInBaseClass] import m class B(m.A): def f(self) -> None: a = 1 a = self.x [file m.py] class A: def g(self) -> None: self.x = 1 [file m.py.2] class A: pass [out] == main:5: error: "B" has no attribute "x" [case testTestSignatureOfInheritedMethod] import m class B(m.A): def f(self) -> None: self.g() [file m.py] class A: def g(self) -> None: pass [file m.py.2] class A: def g(self, a: 'A') -> None: pass [out] == main:4: error: Too few arguments for "g" of "A" [case testRemoveBaseClass] import m class A(m.B): def f(self) -> None: self.g() self.x self.y = 1 [file m.py] class C: def g(self) -> None: self.x = 1 class B(C): pass [file m.py.2] class C: pass class B: pass [out] == main:4: error: "A" has no attribute "g" main:5: error: "A" has no attribute "x" [case testRemoveBaseClass2] import m class A(m.B): def f(self) -> None: self.g() self.x self.y = 1 [file m.py] class C: def g(self) -> None: self.x = 1 class B(C): pass [file m.py.2] class C: def g(self) -> None: self.x = 1 class B: pass [out] == main:4: error: "A" has no attribute "g" main:5: error: "A" has no attribute "x" [case testChangeInPackage] import m.n def f() -> None: m.n.g() [file m/__init__.py] [file m/n.py] def g() -> None: pass [file m/n.py.2] def g(x: int) -> None: pass [out] == main:3: error: Too few arguments for "g" [case testTriggerTargetInPackage] import m.n [file m/__init__.py] [file m/n.py] import a def f() -> None: a.g() [file a.py] def g() -> None: pass [file a.py.2] def g(x: int) -> None: pass [out] == m/n.py:3: error: Too few arguments for "g" [case testChangeInPackage__init__] import m import m.n def f() -> None: m.g() [file m/__init__.py] def g() -> None: pass [file m/__init__.py.2] def g(x: int) -> None: pass [file m/n.py] [out] == main:4: error: Too few arguments for "g" [case testTriggerTargetInPackage__init__] import m import m.n [file m/__init__.py] import a def f() -> None: a.g() [file a.py] def g() -> None: pass [file a.py.2] def g(x: int) -> None: pass [file m/n.py] [out] == m/__init__.py:3: error: Too few arguments for "g" [case testModuleAttributeTypeChanges] import m def f() -> None: x = 1 x = m.x [file m.py] x = 1 [file m.py.2] x = '' [out] == main:4: error: Incompatible types in assignment (expression has type "str", variable has type "int") [case testTwoStepsDueToModuleAttribute] import m x = m.f() def g() -> None: y = 1 y = x # E [file m.py] def f() -> int: pass [file m.py.2] def f() -> str: pass [out] == main:6: error: Incompatible types in assignment (expression has type "str", variable has type "int") [case testTwoStepsDueToMultipleNamespaces] import m x = m.f() def g() -> None: xx = 1 xx = x class A: def a(self) -> None: self.y = m.f() def b(self) -> None: yy = 1 yy = self.y class B: def c(self) -> None: self.z = m.f() def b(self) -> None: zz = 1 zz = self.z [file m.py] def f() -> int: pass [file m.py.2] def f() -> str: pass [out] == main:7: error: Incompatible types in assignment (expression has type "str", variable has type "int") main:14: error: Incompatible types in assignment (expression has type "str", variable has type "int") main:21: error: Incompatible types in assignment (expression has type "str", variable has type "int") [case testConstructorSignatureChanged] import m def f() -> None: m.A() [file m.py] class A: def __init__(self) -> None: pass [file m.py.2] class A: def __init__(self, x: int) -> None: pass [out] == main:4: error: Too few arguments for "A" [case testConstructorAdded] import m def f() -> None: m.A() [file m.py] class A: pass [file m.py.2] class A: def __init__(self, x: int) -> None: pass [out] == main:4: error: Too few arguments for "A" [case testConstructorDeleted] import m def f() -> None: m.A(1) [file m.py] class A: def __init__(self, x: int) -> None: pass [file m.py.2] class A: pass [out] == main:4: error: Too many arguments for "A" [case testBaseClassConstructorChanged] import m def f() -> None: m.B() [file m.py] class A: def __init__(self) -> None: pass class B(A): pass [file m.py.2] class A: def __init__(self, x: int) -> None: pass class B(A): pass [out] == main:4: error: Too few arguments for "B" [case testImportFrom] from m import f def g() -> None: f() [file m.py] def f() -> None: pass [file m.py.2] def f(x: int) -> None: pass [builtins fixtures/fine_grained.pyi] [out] == main:4: error: Too few arguments for "f" [case testImportFrom2] from m import f f() [file m.py] def f() -> None: pass [file m.py.2] def f(x: int) -> None: pass [out] == main:2: error: Too few arguments for "f" [case testImportFromTargetsClass] from m import C def f(c: C) -> None: c.g() [file m.py] class C: def g(self) -> None: pass [file m.py.2] class C: def g(self, x: int) -> None: pass [out] == main:4: error: Too few arguments for "g" of "C" [case testImportFromTargetsVariable] from m import x def f() -> None: y = 1 y = x [file m.py] x = 1 [file m.py.2] x = '' [out] == main:5: error: Incompatible types in assignment (expression has type "str", variable has type "int") [case testImportFromSubmoduleOfPackage] from m import n def f() -> None: n.g() [file m/__init__.py] [file m/n.py] def g() -> None: pass [file m/n.py.2] def g(x: int) -> None: pass [out] == main:4: error: Too few arguments for "g" [case testImportedFunctionGetsImported] from m import f def g() -> None: f() [file m.py] from n import f [file n.py] def f() -> None: pass [file n.py.2] def f(x: int) -> None: pass [out] == main:4: error: Too few arguments for "f" [case testNestedClassMethodSignatureChanges] from m import A def f(x: A.B) -> None: x.g() [file m.py] class A: class B: def g(self) -> None: pass [file m.py.2] class A: class B: def g(self, x: int) -> None: pass [out] == main:4: error: Too few arguments for "g" of "B" [case testNestedClassAttributeTypeChanges] from m import A def f(x: A.B) -> None: z = 1 z = x.y [file m.py] class A: class B: def g(self) -> None: self.y = 1 [file m.py.2] class A: class B: def g(self) -> None: self.y = '' [out] == main:5: error: Incompatible types in assignment (expression has type "str", variable has type "int") [case testReprocessMethodInNestedClass] from m import f class A: class B: def g(self) -> None: x = 1 x = f() [file m.py] def f() -> int: pass [file m.py.2] def f() -> str: pass [out] == main:7: error: Incompatible types in assignment (expression has type "str", variable has type "int") [case testBaseClassDeleted] import m class A(m.C): def f(self) -> None: self.g() # No error here because m.C becomes an Any base class def g(self) -> None: self.x [file m.py] class C: def g(self) -> None: pass [file m.py.2] [out] main:7: error: "A" has no attribute "x" == main:3: error: Name 'm.C' is not defined [case testBaseClassOfNestedClassDeleted] import m class A: class B(m.C): def f(self) -> None: self.g() # No error here because m.C becomes an Any base class def g(self) -> None: self.x [file m.py] class C: def g(self) -> None: pass [file m.py.2] [out] main:8: error: "B" has no attribute "x" == main:4: error: Name 'm.C' is not defined [case testImportQualifiedModuleName] import a [file a.py] import b.c b.c.f() [file a.py.2] import b.c b.c.f() # dummy change [file b/__init__.py] [file b/c.py] def f() -> None: pass [out] == [case testTypeAliasRefresh] from typing import Callable from a import f C = Callable[[int], str] [file a.py] def f() -> None: pass [file a.py.2] [out] == main:2: error: Module 'a' has no attribute 'f' [case testTypeVarRefresh] from typing import TypeVar from a import f T = TypeVar('T') [file a.py] def f() -> None: pass [file a.py.2] [out] == main:2: error: Module 'a' has no attribute 'f' [case testNamedTupleRefresh] from typing import NamedTuple from a import f N = NamedTuple('N', [('x', int)]) [file a.py] def f() -> None: pass [file a.py.2] [out] == main:2: error: Module 'a' has no attribute 'f' [case testModuleLevelAttributeRefresh] from typing import Callable from a import f x = 1 y = '' # type: str [file a.py] def f() -> None: pass [file a.py.2] [out] == main:2: error: Module 'a' has no attribute 'f' [case testClassBodyRefresh] from a import f class A: x = 1 y = '' # type: str def f(self) -> None: self.x = 1 [file a.py] f = 1 [file a.py.2] [out] == main:1: error: Module 'a' has no attribute 'f' [case testDecoratedMethodRefresh] from typing import Iterator, Callable, List from a import f import a def dec(f: Callable[['A'], Iterator[int]]) -> Callable[[int], int]: pass class A: @dec def f(self) -> Iterator[int]: self.x = a.g() # type: int return None [builtins fixtures/list.pyi] [file a.py] f = 1 def g() -> int: pass [file a.py.2] def f() -> None: pass def g() -> int: pass [file a.py.3] def f() -> None: pass def g() -> str: pass [out] == == main:10: error: Incompatible types in assignment (expression has type "str", variable has type "int") [case testTwoPassTypeChecking] import a [file a.py] [file a.py.2] class A: def __init__(self, b: B) -> None: self.a = b.a class B: def __init__(self) -> None: self.a = int() [file a.py.3] class A: def __init__(self, b: B) -> None: self.a = b.a reveal_type(self.a) # E class B: def __init__(self) -> None: self.a = int() [out] == == a.py:4: error: Revealed type is 'builtins.int' mypy-0.560/test-data/unit/fixtures/0000755€tŠÔÚ€2›s®0000000000013215007244023410 5ustar jukkaDROPBOX\Domain Users00000000000000mypy-0.560/test-data/unit/fixtures/__new__.pyi0000644€tŠÔÚ€2›s®0000000036013215007206025515 0ustar jukkaDROPBOX\Domain Users00000000000000# builtins stub with object.__new__ class object: def __init__(self) -> None: pass def __new__(cls): pass class type: def __init__(self, x) -> None: pass class int: pass class bool: pass class str: pass class function: pass mypy-0.560/test-data/unit/fixtures/alias.pyi0000644€tŠÔÚ€2›s®0000000033513215007206025223 0ustar jukkaDROPBOX\Domain Users00000000000000# Builtins test fixture with a type alias 'bytes' class object: def __init__(self) -> None: pass class type: def __init__(self, x) -> None: pass class int: pass class str: pass class function: pass bytes = str mypy-0.560/test-data/unit/fixtures/args.pyi0000644€tŠÔÚ€2›s®0000000146113215007206025067 0ustar jukkaDROPBOX\Domain Users00000000000000# Builtins stub used to support *args, **kwargs. from typing import TypeVar, Generic, Iterable, Tuple, Dict, Any, overload, Mapping Tco = TypeVar('Tco', covariant=True) T = TypeVar('T') S = TypeVar('S') class object: def __init__(self) -> None: pass def __eq__(self, o: object) -> bool: pass def __ne__(self, o: object) -> bool: pass class type: @overload def __init__(self, o: object) -> None: pass @overload def __init__(self, name: str, bases: Tuple[type, ...], dict: Dict[str, Any]) -> None: pass def __call__(self, *args: Any, **kwargs: Any) -> Any: pass class tuple(Iterable[Tco], Generic[Tco]): pass class dict(Iterable[T], Mapping[T, S], Generic[T, S]): pass class int: def __eq__(self, o: object) -> bool: pass class str: pass class bool: pass class function: pass mypy-0.560/test-data/unit/fixtures/async_await.pyi0000644€tŠÔÚ€2›s®0000000107713215007206026440 0ustar jukkaDROPBOX\Domain Users00000000000000import typing T = typing.TypeVar('T') U = typing.TypeVar('U') class list(typing.Sequence[T]): pass class object: def __init__(self) -> None: pass class type: pass class function: pass class int: pass class str: pass class bool: pass class dict(typing.Generic[T, U]): pass class set(typing.Generic[T]): pass class tuple(typing.Generic[T]): pass class BaseException: pass class StopIteration(BaseException): pass class StopAsyncIteration(BaseException): pass def iter(obj: typing.Any) -> typing.Any: pass def next(obj: typing.Any) -> typing.Any: pass class ellipsis: ... mypy-0.560/test-data/unit/fixtures/bool.pyi0000644€tŠÔÚ€2›s®0000000044713215007206025071 0ustar jukkaDROPBOX\Domain Users00000000000000# builtins stub used in boolean-related test cases. from typing import Generic, TypeVar T = TypeVar('T') class object: def __init__(self) -> None: pass class type: pass class tuple(Generic[T]): pass class function: pass class bool: pass class int: pass class str: pass class unicode: pass mypy-0.560/test-data/unit/fixtures/callable.pyi0000644€tŠÔÚ€2›s®0000000113513215007206025670 0ustar jukkaDROPBOX\Domain Users00000000000000from typing import Generic, Tuple, TypeVar, Union T = TypeVar('T') class object: def __init__(self) -> None: pass class type: def __init__(self, x) -> None: pass class tuple(Generic[T]): pass class function: pass def isinstance(x: object, t: Union[type, Tuple[type, ...]]) -> bool: pass def callable(x: object) -> bool: pass class int: def __add__(self, other: 'int') -> 'int': pass def __eq__(self, other: 'int') -> 'bool': pass class float: pass class bool(int): pass class str: def __add__(self, other: 'str') -> 'str': pass def __eq__(self, other: 'str') -> bool: pass mypy-0.560/test-data/unit/fixtures/classmethod.pyi0000644€tŠÔÚ€2›s®0000000071613215007206026443 0ustar jukkaDROPBOX\Domain Users00000000000000import typing _T = typing.TypeVar('_T') class object: def __init__(self) -> None: pass class type: def __init__(self, x) -> None: pass def mro(self) -> typing.Any: pass class function: pass # Dummy definitions. classmethod = object() staticmethod = object() class int: @classmethod def from_bytes(cls, bytes: bytes, byteorder: str) -> int: pass class str: pass class bytes: pass class bool: pass class tuple(typing.Generic[_T]): pass mypy-0.560/test-data/unit/fixtures/complex.pyi0000644€tŠÔÚ€2›s®0000000032113215007206025574 0ustar jukkaDROPBOX\Domain Users00000000000000# Builtins stub used for some float/complex test cases. class object: def __init__(self): pass class type: pass class function: pass class int: pass class float: pass class complex: pass class str: pass mypy-0.560/test-data/unit/fixtures/dict.pyi0000644€tŠÔÚ€2›s®0000000266713215007206025067 0ustar jukkaDROPBOX\Domain Users00000000000000# Builtins stub used in dictionary-related test cases. from typing import TypeVar, Generic, Iterable, Iterator, Mapping, Tuple, overload, Optional, Union T = TypeVar('T') KT = TypeVar('KT') VT = TypeVar('VT') class object: def __init__(self) -> None: pass class type: pass class dict(Generic[KT, VT]): @overload def __init__(self, **kwargs: VT) -> None: pass @overload def __init__(self, arg: Iterable[Tuple[KT, VT]], **kwargs: VT) -> None: pass def __getitem__(self, key: KT) -> VT: pass def __setitem__(self, k: KT, v: VT) -> None: pass def __iter__(self) -> Iterator[KT]: pass def __contains__(self, item: object) -> bool: pass def update(self, a: Mapping[KT, VT]) -> None: pass @overload def get(self, k: KT) -> Optional[VT]: pass @overload def get(self, k: KT, default: Union[KT, T]) -> Union[VT, T]: pass def __len__(self) -> int: ... class int: # for convenience def __add__(self, x: int) -> int: pass class str: pass # for keyword argument key type class unicode: pass # needed for py2 docstrings class list(Generic[T]): # needed by some test cases def __getitem__(self, x: int) -> T: pass def __iter__(self) -> Iterator[T]: pass def __mul__(self, x: int) -> list[T]: pass class tuple(Generic[T]): pass class function: pass class float: pass class bool: pass class ellipsis: pass def isinstance(x: object, t: Union[type, Tuple]) -> bool: pass class BaseException: pass mypy-0.560/test-data/unit/fixtures/exception.pyi0000644€tŠÔÚ€2›s®0000000040613215007206026127 0ustar jukkaDROPBOX\Domain Users00000000000000from typing import Generic, TypeVar T = TypeVar('T') class object: def __init__(self): pass class type: pass class tuple(Generic[T]): pass class function: pass class int: pass class str: pass class unicode: pass class bool: pass class BaseException: pass mypy-0.560/test-data/unit/fixtures/f_string.pyi0000644€tŠÔÚ€2›s®0000000146413215007206025751 0ustar jukkaDROPBOX\Domain Users00000000000000# Builtins stub used for format-string-related test cases. # We need str and list, and str needs join and format methods. from typing import TypeVar, Generic, Iterable, Iterator, List, overload T = TypeVar('T') class object: def __init__(self): pass class type: def __init__(self, x) -> None: pass class ellipsis: pass class list(Iterable[T], Generic[T]): @overload def __init__(self) -> None: pass @overload def __init__(self, x: Iterable[T]) -> None: pass def append(self, x: T) -> None: pass class tuple(Generic[T]): pass class function: pass class int: def __add__(self, i: int) -> int: pass class float: pass class bool(int): pass class str: def __add__(self, s: str) -> str: pass def format(self, *args) -> str: pass def join(self, l: List[str]) -> str: pass mypy-0.560/test-data/unit/fixtures/fine_grained.pyi0000644€tŠÔÚ€2›s®0000000116213215007206026543 0ustar jukkaDROPBOX\Domain Users00000000000000# Small stub for fine-grained incremental checking test cases # # TODO: Migrate to regular stubs once fine-grained incremental is robust # enough to handle them. import types from typing import TypeVar, Generic T = TypeVar('T') class Any: pass class object: def __init__(self) -> None: pass class type: def __init__(self, x: Any) -> None: pass class int: def __add__(self, other: 'int') -> 'int': pass class str: def __add__(self, other: 'str') -> 'str': pass class float: pass class bytes: pass class tuple(Generic[T]): pass class function: pass class ellipsis: pass class list(Generic[T]): pass mypy-0.560/test-data/unit/fixtures/float.pyi0000644€tŠÔÚ€2›s®0000000132013215007206025232 0ustar jukkaDROPBOX\Domain Users00000000000000from typing import Generic, TypeVar T = TypeVar('T') Any = 0 class object: def __init__(self) -> None: pass class type: def __init__(self, x: Any) -> None: pass class str: def __add__(self, other: 'str') -> 'str': pass def __rmul__(self, n: int) -> str: ... class bytes: pass class tuple(Generic[T]): pass class function: pass class ellipsis: pass class int: def __float__(self) -> float: ... def __int__(self) -> int: ... def __mul__(self, x: int) -> int: ... def __rmul__(self, x: int) -> int: ... class float: def __float__(self) -> float: ... def __int__(self) -> int: ... def __mul__(self, x: float) -> float: ... def __rmul__(self, x: float) -> float: ... mypy-0.560/test-data/unit/fixtures/floatdict.pyi0000644€tŠÔÚ€2›s®0000000343613215007206026110 0ustar jukkaDROPBOX\Domain Users00000000000000from typing import TypeVar, Generic, Iterable, Iterator, Mapping, Tuple, overload, Optional, Union T = TypeVar('T') KT = TypeVar('KT') VT = TypeVar('VT') Any = 0 class object: def __init__(self) -> None: pass class type: def __init__(self, x: Any) -> None: pass class str: def __add__(self, other: 'str') -> 'str': pass def __rmul__(self, n: int) -> str: ... class bytes: pass class tuple(Generic[T]): pass class function: pass class ellipsis: pass class list(Iterable[T], Generic[T]): @overload def __init__(self) -> None: pass @overload def __init__(self, x: Iterable[T]) -> None: pass def __iter__(self) -> Iterator[T]: pass def __add__(self, x: list[T]) -> list[T]: pass def __mul__(self, x: int) -> list[T]: pass def __getitem__(self, x: int) -> T: pass def append(self, x: T) -> None: pass def extend(self, x: Iterable[T]) -> None: pass class dict(Iterable[KT], Mapping[KT, VT], Generic[KT, VT]): @overload def __init__(self, **kwargs: VT) -> None: pass @overload def __init__(self, arg: Iterable[Tuple[KT, VT]], **kwargs: VT) -> None: pass def __setitem__(self, k: KT, v: VT) -> None: pass def __getitem__(self, k: KT) -> VT: pass def __iter__(self) -> Iterator[KT]: pass def update(self, a: Mapping[KT, VT]) -> None: pass @overload def get(self, k: KT) -> Optional[VT]: pass @overload def get(self, k: KT, default: Union[KT, T]) -> Union[VT, T]: pass class int: def __float__(self) -> float: ... def __int__(self) -> int: ... def __mul__(self, x: int) -> int: ... def __rmul__(self, x: int) -> int: ... class float: def __float__(self) -> float: ... def __int__(self) -> int: ... def __mul__(self, x: float) -> float: ... def __rmul__(self, x: float) -> float: ... mypy-0.560/test-data/unit/fixtures/for.pyi0000644€tŠÔÚ€2›s®0000000076613215007206024730 0ustar jukkaDROPBOX\Domain Users00000000000000# builtins stub used in for statement test cases from typing import TypeVar, Generic, Iterable, Iterator, Generator from abc import abstractmethod, ABCMeta t = TypeVar('t') class object: def __init__(self) -> None: pass class type: pass class tuple(Generic[t]): def __iter__(self) -> Iterator[t]: pass class function: pass class bool: pass class int: pass # for convenience class str: pass # for convenience class list(Iterable[t], Generic[t]): def __iter__(self) -> Iterator[t]: pass mypy-0.560/test-data/unit/fixtures/function.pyi0000644€tŠÔÚ€2›s®0000000016213215007206025755 0ustar jukkaDROPBOX\Domain Users00000000000000class object: def __init__(self): pass class type: pass class function: pass class int: pass class str: pass mypy-0.560/test-data/unit/fixtures/isinstance.pyi0000644€tŠÔÚ€2›s®0000000106113215007206026267 0ustar jukkaDROPBOX\Domain Users00000000000000from typing import Tuple, TypeVar, Generic, Union T = TypeVar('T') class object: def __init__(self) -> None: pass class type: def __init__(self, x) -> None: pass class tuple(Generic[T]): pass class function: pass def isinstance(x: object, t: Union[type, Tuple[type, ...]]) -> bool: pass def issubclass(x: object, t: Union[type, Tuple[type, ...]]) -> bool: pass class int: def __add__(self, other: 'int') -> 'int': pass class float: pass class bool(int): pass class str: def __add__(self, other: 'str') -> 'str': pass class ellipsis: pass mypy-0.560/test-data/unit/fixtures/isinstancelist.pyi0000644€tŠÔÚ€2›s®0000000267413215007206027176 0ustar jukkaDROPBOX\Domain Users00000000000000from typing import Iterable, Iterator, TypeVar, List, Mapping, overload, Tuple, Set, Union, Generic class object: def __init__(self) -> None: pass class type: def __init__(self, x) -> None: pass class function: pass class ellipsis: pass def isinstance(x: object, t: Union[type, Tuple]) -> bool: pass def issubclass(x: object, t: Union[type, Tuple]) -> bool: pass class int: def __add__(self, x: int) -> int: pass class bool(int): pass class str: def __add__(self, x: str) -> str: pass def __getitem__(self, x: int) -> str: pass T = TypeVar('T') KT = TypeVar('KT') VT = TypeVar('VT') class tuple(Generic[T]): def __len__(self) -> int: pass class list(Generic[T]): def __iter__(self) -> Iterator[T]: pass def __mul__(self, x: int) -> list[T]: pass def __setitem__(self, x: int, v: T) -> None: pass def __getitem__(self, x: int) -> T: pass def __add__(self, x: List[T]) -> T: pass class dict(Mapping[KT, VT]): @overload def __init__(self, **kwargs: VT) -> None: pass @overload def __init__(self, arg: Iterable[Tuple[KT, VT]], **kwargs: VT) -> None: pass def __setitem__(self, k: KT, v: VT) -> None: pass def __iter__(self) -> Iterator[KT]: pass def update(self, a: Mapping[KT, VT]) -> None: pass class set(Generic[T]): def __iter__(self) -> Iterator[T]: pass def add(self, x: T) -> None: pass def discard(self, x: T) -> None: pass def update(self, x: Set[T]) -> None: pass mypy-0.560/test-data/unit/fixtures/list.pyi0000644€tŠÔÚ€2›s®0000000162113215007206025104 0ustar jukkaDROPBOX\Domain Users00000000000000# Builtins stub used in list-related test cases. from typing import TypeVar, Generic, Iterable, Iterator, overload T = TypeVar('T') class object: def __init__(self): pass class type: pass class ellipsis: pass class list(Generic[T]): @overload def __init__(self) -> None: pass @overload def __init__(self, x: Iterable[T]) -> None: pass def __iter__(self) -> Iterator[T]: pass def __contains__(self, item: object) -> bool: pass def __add__(self, x: list[T]) -> list[T]: pass def __mul__(self, x: int) -> list[T]: pass def __getitem__(self, x: int) -> T: pass def __setitem__(self, x: int, v: T) -> None: pass def append(self, x: T) -> None: pass def extend(self, x: Iterable[T]) -> None: pass class tuple(Generic[T]): pass class function: pass class int: pass class float: pass class str: pass class bool(int): pass property = object() # Dummy definition. mypy-0.560/test-data/unit/fixtures/module.pyi0000644€tŠÔÚ€2›s®0000000066213215007206025422 0ustar jukkaDROPBOX\Domain Users00000000000000from typing import Any, Dict, Generic, TypeVar, Sequence from types import ModuleType T = TypeVar('T') S = TypeVar('S') class list(Generic[T], Sequence[T]): pass class object: def __init__(self) -> None: pass class type: pass class function: pass class int: pass class str: pass class bool: pass class tuple(Generic[T]): pass class dict(Generic[T, S]): pass class ellipsis: pass classmethod = object() staticmethod = object() mypy-0.560/test-data/unit/fixtures/module_all.pyi0000644€tŠÔÚ€2›s®0000000071613215007206026252 0ustar jukkaDROPBOX\Domain Users00000000000000from typing import Generic, Sequence, TypeVar from types import ModuleType _T = TypeVar('_T') class object: def __init__(self) -> None: pass class type: pass class function: pass class int: pass class str: pass class bool: pass class list(Generic[_T], Sequence[_T]): def append(self, x: _T): pass def extend(self, x: Sequence[_T]): pass def __add__(self, rhs: Sequence[_T]) -> list[_T]: pass class tuple(Generic[_T]): pass class ellipsis: pass mypy-0.560/test-data/unit/fixtures/module_all_python2.pyi0000644€tŠÔÚ€2›s®0000000063613215007206027736 0ustar jukkaDROPBOX\Domain Users00000000000000from typing import Generic, Sequence, TypeVar _T = TypeVar('_T') class object: def __init__(self) -> None: pass class type: pass class function: pass class int: pass class str: pass class unicode: pass class list(Generic[_T], Sequence[_T]): def append(self, x: _T): pass def extend(self, x: Sequence[_T]): pass def __add__(self, rhs: Sequence[_T]) -> list[_T]: pass class tuple(Generic[_T]): pass mypy-0.560/test-data/unit/fixtures/ops.pyi0000644€tŠÔÚ€2›s®0000000332113215007206024731 0ustar jukkaDROPBOX\Domain Users00000000000000from typing import overload, Any, Generic, Sequence, Tuple, TypeVar Tco = TypeVar('Tco', covariant=True) # This is an extension of transform builtins with additional operations. class object: def __init__(self) -> None: pass def __eq__(self, o: 'object') -> 'bool': pass def __ne__(self, o: 'object') -> 'bool': pass class type: pass class slice: pass class tuple(Sequence[Tco], Generic[Tco]): def __getitem__(self, x: int) -> Tco: pass def __eq__(self, x: object) -> bool: pass def __ne__(self, x: object) -> bool: pass def __lt__(self, x: 'tuple') -> bool: pass def __le__(self, x: 'tuple') -> bool: pass def __gt__(self, x: 'tuple') -> bool: pass def __ge__(self, x: 'tuple') -> bool: pass class function: pass class bool: pass class str: def __init__(self, x: 'int') -> None: pass def __add__(self, x: 'str') -> 'str': pass def __eq__(self, x: object) -> bool: pass def startswith(self, x: 'str') -> bool: pass class unicode: pass class int: def __add__(self, x: 'int') -> 'int': pass def __sub__(self, x: 'int') -> 'int': pass def __mul__(self, x: 'int') -> 'int': pass def __mod__(self, x: 'int') -> 'int': pass def __floordiv__(self, x: 'int') -> 'int': pass def __pow__(self, x: 'int') -> Any: pass def __pos__(self) -> 'int': pass def __neg__(self) -> 'int': pass def __eq__(self, x: object) -> bool: pass def __ne__(self, x: object) -> bool: pass def __lt__(self, x: 'int') -> bool: pass def __le__(self, x: 'int') -> bool: pass def __gt__(self, x: 'int') -> bool: pass def __ge__(self, x: 'int') -> bool: pass class float: pass class BaseException: pass def __print(a1=None, a2=None, a3=None, a4=None): pass mypy-0.560/test-data/unit/fixtures/primitives.pyi0000644€tŠÔÚ€2›s®0000000103713215007206026325 0ustar jukkaDROPBOX\Domain Users00000000000000# builtins stub with non-generic primitive types from typing import Generic, TypeVar T = TypeVar('T') class object: def __init__(self) -> None: pass def __str__(self) -> str: pass class type: def __init__(self, x) -> None: pass class int: def __add__(self, i: int) -> int: pass class float: pass class complex: pass class bool(int): pass class str: def __add__(self, s: str) -> str: pass def format(self, *args) -> str: pass class bytes: pass class bytearray: pass class tuple(Generic[T]): pass class function: pass mypy-0.560/test-data/unit/fixtures/property.pyi0000644€tŠÔÚ€2›s®0000000051013215007206026011 0ustar jukkaDROPBOX\Domain Users00000000000000import typing _T = typing.TypeVar('_T') class object: def __init__(self) -> None: pass class type: def __init__(self, x: typing.Any) -> None: pass class function: pass property = object() # Dummy definition. class int: pass class str: pass class bytes: pass class bool: pass class tuple(typing.Generic[_T]): pass mypy-0.560/test-data/unit/fixtures/python2.pyi0000644€tŠÔÚ€2›s®0000000052113215007206025532 0ustar jukkaDROPBOX\Domain Users00000000000000from typing import Generic, Iterable, TypeVar class object: def __init__(self) -> None: pass class type: def __init__(self, x) -> None: pass class function: pass class int: pass class str: pass class unicode: pass class bool: pass T = TypeVar('T') class list(Iterable[T], Generic[T]): pass # Definition of None is implicit mypy-0.560/test-data/unit/fixtures/set.pyi0000644€tŠÔÚ€2›s®0000000106113215007206024722 0ustar jukkaDROPBOX\Domain Users00000000000000# Builtins stub used in set-related test cases. from typing import TypeVar, Generic, Iterator, Iterable, Set T = TypeVar('T') class object: def __init__(self) -> None: pass class type: pass class tuple(Generic[T]): pass class function: pass class int: pass class str: pass class bool: pass class set(Iterable[T], Generic[T]): def __iter__(self) -> Iterator[T]: pass def __contains__(self, item: object) -> bool: pass def add(self, x: T) -> None: pass def discard(self, x: T) -> None: pass def update(self, x: Set[T]) -> None: pass mypy-0.560/test-data/unit/fixtures/slice.pyi0000644€tŠÔÚ€2›s®0000000040613215007206025230 0ustar jukkaDROPBOX\Domain Users00000000000000# Builtins stub used in slicing test cases. from typing import Generic, TypeVar T = TypeVar('T') class object: def __init__(self): pass class type: pass class tuple(Generic[T]): pass class function: pass class int: pass class str: pass class slice: pass mypy-0.560/test-data/unit/fixtures/staticmethod.pyi0000644€tŠÔÚ€2›s®0000000051513215007206026622 0ustar jukkaDROPBOX\Domain Users00000000000000import typing class object: def __init__(self) -> None: pass class type: def __init__(self, x) -> None: pass class function: pass staticmethod = object() # Dummy definition. class int: @staticmethod def from_bytes(bytes: bytes, byteorder: str) -> int: pass class str: pass class unicode: pass class bytes: pass mypy-0.560/test-data/unit/fixtures/transform.pyi0000644€tŠÔÚ€2›s®0000000164713215007206026154 0ustar jukkaDROPBOX\Domain Users00000000000000# Builtins stubs used implicitly in program transformation test cases. class object: def __init__(self) -> None: pass class type: pass # str is handy for debugging; allows outputting messages. class str: pass # Primitive types int/float have special coercion behaviour (they may have # a different representation from ordinary values). class int: pass class float: pass # The functions below are special functions used in test cases; their # implementations are actually in the __dynchk module, but they are defined # here so that the semantic analyzer and the type checker are happy without # having to analyze the entire __dynchk module all the time. # # The transformation implementation has special case handling for these # functions; it's a bit ugly but it works for now. def __print(a1=None, a2=None, a3=None, a4=None): # Do not use *args since this would require list and break many test # cases. pass mypy-0.560/test-data/unit/fixtures/tuple-simple.pyi0000644€tŠÔÚ€2›s®0000000072413215007206026554 0ustar jukkaDROPBOX\Domain Users00000000000000# Builtins stub used in some tuple-related test cases. # # This is a simpler version of tuple.py which is useful # and makes some test cases easier to write/debug. from typing import Iterable, TypeVar, Generic T = TypeVar('T') class object: def __init__(self): pass class type: pass class tuple(Generic[T]): def __getitem__(self, x: int) -> T: pass class function: pass # We need int for indexing tuples. class int: pass class str: pass # For convenience mypy-0.560/test-data/unit/fixtures/tuple.pyi0000644€tŠÔÚ€2›s®0000000163413215007206025266 0ustar jukkaDROPBOX\Domain Users00000000000000# Builtins stub used in tuple-related test cases. from typing import Iterable, Iterator, TypeVar, Generic, Sequence, Any Tco = TypeVar('Tco', covariant=True) class object: def __init__(self): pass class type: def __init__(self, *a) -> None: pass def __call__(self, *a) -> object: pass class tuple(Sequence[Tco], Generic[Tco]): def __iter__(self) -> Iterator[Tco]: pass def __contains__(self, item: object) -> bool: pass def __getitem__(self, x: int) -> Tco: pass def count(self, obj: Any) -> int: pass class function: pass class ellipsis: pass # We need int and slice for indexing tuples. class int: pass class slice: pass class bool: pass class str: pass # For convenience class unicode: pass T = TypeVar('T') class list(Sequence[T], Generic[T]): pass def isinstance(x: object, t: type) -> bool: pass def sum(iterable: Iterable[T], start: T = None) -> T: pass class BaseException: pass mypy-0.560/test-data/unit/fixtures/type.pyi0000644€tŠÔÚ€2›s®0000000064513215007206025117 0ustar jukkaDROPBOX\Domain Users00000000000000# builtins stub used in type-related test cases. from typing import Generic, TypeVar, List T = TypeVar('T') class object: def __init__(self) -> None: pass def __str__(self) -> 'str': pass class list(Generic[T]): pass class type: __name__: str def mro(self) -> List['type']: pass class tuple(Generic[T]): pass class function: pass class bool: pass class int: pass class str: pass class unicode: pass mypy-0.560/test-data/unit/fixtures/typing-full.pyi0000644€tŠÔÚ€2›s®0000000712213215007206026405 0ustar jukkaDROPBOX\Domain Users00000000000000# More complete stub for typing module. # # Use [typing fixtures/typing-full.pyi] to use this instead of lib-stub/typing.pyi # in a particular test case. # # Many of the definitions have special handling in the type checker, so they # can just be initialized to anything. from abc import abstractmethod class GenericMeta(type): pass cast = 0 overload = 0 Any = 0 Union = 0 Optional = 0 TypeVar = 0 Generic = 0 Protocol = 0 Tuple = 0 Callable = 0 _promote = 0 NamedTuple = 0 Type = 0 no_type_check = 0 ClassVar = 0 NoReturn = 0 NewType = 0 # Type aliases. List = 0 Dict = 0 Set = 0 T = TypeVar('T') T_co = TypeVar('T_co', covariant=True) T_contra = TypeVar('T_contra', contravariant=True) U = TypeVar('U') V = TypeVar('V') S = TypeVar('S') # Note: definitions below are different from typeshed, variances are declared # to silence the protocol variance checks. Maybe it is better to use type: ignore? @runtime class Container(Protocol[T_contra]): @abstractmethod # Use int because bool isn't in the default test builtins def __contains__(self, arg: T_contra) -> int: pass @runtime class Sized(Protocol): @abstractmethod def __len__(self) -> int: pass @runtime class Iterable(Protocol[T_co]): @abstractmethod def __iter__(self) -> 'Iterator[T_co]': pass @runtime class Iterator(Iterable[T_co], Protocol): @abstractmethod def __next__(self) -> T_co: pass class Generator(Iterator[T], Generic[T, U, V]): @abstractmethod def send(self, value: U) -> T: pass @abstractmethod def throw(self, typ: Any, val: Any=None, tb: Any=None) -> None: pass @abstractmethod def close(self) -> None: pass @abstractmethod def __iter__(self) -> 'Generator[T, U, V]': pass class AsyncGenerator(AsyncIterator[T], Generic[T, U]): @abstractmethod def __anext__(self) -> Awaitable[T]: pass @abstractmethod def asend(self, value: U) -> Awaitable[T]: pass @abstractmethod def athrow(self, typ: Any, val: Any=None, tb: Any=None) -> Awaitable[T]: pass @abstractmethod def aclose(self) -> Awaitable[T]: pass @abstractmethod def __aiter__(self) -> 'AsyncGenerator[T, U]': pass @runtime class Awaitable(Protocol[T]): @abstractmethod def __await__(self) -> Generator[Any, Any, T]: pass class AwaitableGenerator(Generator[T, U, V], Awaitable[V], Generic[T, U, V, S]): pass @runtime class AsyncIterable(Protocol[T]): @abstractmethod def __aiter__(self) -> 'AsyncIterator[T]': pass @runtime class AsyncIterator(AsyncIterable[T], Protocol): def __aiter__(self) -> 'AsyncIterator[T]': return self @abstractmethod def __anext__(self) -> Awaitable[T]: pass @runtime class Sequence(Iterable[T_co], Protocol): @abstractmethod def __getitem__(self, n: Any) -> T_co: pass @runtime class Mapping(Iterable[T], Protocol[T, T_co]): def __getitem__(self, key: T) -> T_co: pass @overload def get(self, k: T) -> Optional[T_co]: pass @overload def get(self, k: T, default: Union[T_co, V]) -> Union[T_co, V]: pass def values(self) -> Iterable[T_co]: pass # Approximate return type def __len__(self) -> int: ... def __contains__(self, arg: object) -> int: pass @runtime class MutableMapping(Mapping[T, U], Protocol): def __setitem__(self, k: T, v: U) -> None: pass class SupportsInt(Protocol): def __int__(self) -> int: pass def runtime(cls: T) -> T: return cls class ContextManager(Generic[T]): def __enter__(self) -> T: pass # Use Any because not all the precise types are in the fixtures. def __exit__(self, exc_type: Any, exc_value: Any, traceback: Any) -> Any: pass TYPE_CHECKING = 1 mypy-0.560/test-data/unit/fixtures/union.pyi0000644€tŠÔÚ€2›s®0000000053413215007206025263 0ustar jukkaDROPBOX\Domain Users00000000000000# Builtins stub used in tuple-related test cases. from isinstance import isinstance from typing import Iterable, TypeVar, Generic T = TypeVar('T') class object: def __init__(self): pass class type: pass class function: pass class tuple(Generic[T]): pass # We need int for indexing tuples. class int: pass class str: pass # For convenience mypy-0.560/test-data/unit/lib-stub/0000755€tŠÔÚ€2›s®0000000000013215007244023260 5ustar jukkaDROPBOX\Domain Users00000000000000mypy-0.560/test-data/unit/lib-stub/__builtin__.pyi0000644€tŠÔÚ€2›s®0000000063513215007206026247 0ustar jukkaDROPBOX\Domain Users00000000000000Any = 0 class object: def __init__(self): # type: () -> None pass class type: def __init__(self, x): # type: (Any) -> None pass # These are provided here for convenience. class int: pass class float: pass class str: pass class unicode: pass class tuple: pass class function: pass class ellipsis: pass def print(*args, end=''): pass # Definition of None is implicit mypy-0.560/test-data/unit/lib-stub/abc.pyi0000644€tŠÔÚ€2›s®0000000012013215007206024517 0ustar jukkaDROPBOX\Domain Users00000000000000class ABCMeta(type): pass abstractmethod = object() abstractproperty = object() mypy-0.560/test-data/unit/lib-stub/blocker.pyi0000644€tŠÔÚ€2›s®0000000006613215007206025424 0ustar jukkaDROPBOX\Domain Users00000000000000# Stub file that generates a blocking parse error x y mypy-0.560/test-data/unit/lib-stub/blocker2.pyi0000644€tŠÔÚ€2›s®0000000010713215007206025502 0ustar jukkaDROPBOX\Domain Users00000000000000# Stub file that generates a blocking semantic analysis error continue mypy-0.560/test-data/unit/lib-stub/broken.pyi0000644€tŠÔÚ€2›s®0000000005213215007206025256 0ustar jukkaDROPBOX\Domain Users00000000000000# Stub file that generates an error x = y mypy-0.560/test-data/unit/lib-stub/builtins.pyi0000644€tŠÔÚ€2›s®0000000063413215007206025635 0ustar jukkaDROPBOX\Domain Users00000000000000class object: def __init__(self) -> None: pass class type: def __init__(self, x: object) -> None: pass # These are provided here for convenience. class int: def __add__(self, other: 'int') -> 'int': pass class float: pass class str: def __add__(self, other: 'str') -> 'str': pass class bytes: pass class tuple: pass class function: pass class ellipsis: pass # Definition of None is implicit mypy-0.560/test-data/unit/lib-stub/collections.pyi0000644€tŠÔÚ€2›s®0000000004513215007206026316 0ustar jukkaDROPBOX\Domain Users00000000000000import typing namedtuple = object() mypy-0.560/test-data/unit/lib-stub/contextlib.pyi0000644€tŠÔÚ€2›s®0000000057113215007206026157 0ustar jukkaDROPBOX\Domain Users00000000000000from typing import Generic, TypeVar, Callable, Iterator from typing import ContextManager as ContextManager _T = TypeVar('_T') class GeneratorContextManager(ContextManager[_T], Generic[_T]): def __call__(self, func: Callable[..., _T]) -> Callable[..., _T]: ... def contextmanager(func: Callable[..., Iterator[_T]]) -> Callable[..., GeneratorContextManager[_T]]: ... mypy-0.560/test-data/unit/lib-stub/enum.pyi0000644€tŠÔÚ€2›s®0000000127413215007206024751 0ustar jukkaDROPBOX\Domain Users00000000000000from typing import Any, TypeVar, Union class Enum: def __new__(cls, value: Any) -> None: pass def __repr__(self) -> str: pass def __str__(self) -> str: pass def __format__(self, format_spec: str) -> str: pass def __hash__(self) -> Any: pass def __reduce_ex__(self, proto: Any) -> Any: pass name = '' # type: str value = None # type: Any class IntEnum(int, Enum): value = 0 # type: int _T = TypeVar('_T') def unique(enumeration: _T) -> _T: pass # In reality Flag and IntFlag are 3.6 only class Flag(Enum): def __or__(self: _T, other: Union[int, _T]) -> _T: pass class IntFlag(int, Flag): def __and__(self: _T, other: Union[int, _T]) -> _T: pass mypy-0.560/test-data/unit/lib-stub/mypy_extensions.pyi0000644€tŠÔÚ€2›s®0000000104613215007206027257 0ustar jukkaDROPBOX\Domain Users00000000000000from typing import Dict, Type, TypeVar, Optional, Any _T = TypeVar('_T') def Arg(type: _T = ..., name: Optional[str] = ...) -> _T: ... def DefaultArg(type: _T = ..., name: Optional[str] = ...) -> _T: ... def NamedArg(type: _T = ..., name: Optional[str] = ...) -> _T: ... def DefaultNamedArg(type: _T = ..., name: Optional[str] = ...) -> _T: ... def VarArg(type: _T = ...) -> _T: ... def KwArg(type: _T = ...) -> _T: ... def TypedDict(typename: str, fields: Dict[str, Type[_T]], *, total: Any = ...) -> Type[dict]: ... class NoReturn: pass mypy-0.560/test-data/unit/lib-stub/six.pyi0000644€tŠÔÚ€2›s®0000000024613215007206024606 0ustar jukkaDROPBOX\Domain Users00000000000000from typing import Type, Callable def with_metaclass(mcls: Type[type], *args: type) -> type: pass def add_metaclass(mcls: Type[type]) -> Callable[[type], type]: pass mypy-0.560/test-data/unit/lib-stub/sys.pyi0000644€tŠÔÚ€2›s®0000000005613215007206024620 0ustar jukkaDROPBOX\Domain Users00000000000000version_info = (0, 0, 0, '', 0) platform = '' mypy-0.560/test-data/unit/lib-stub/types.pyi0000644€tŠÔÚ€2›s®0000000022713215007206025146 0ustar jukkaDROPBOX\Domain Users00000000000000from typing import TypeVar _T = TypeVar('_T') def coroutine(func: _T) -> _T: pass class bool: ... class ModuleType: __file__ = ... # type: str mypy-0.560/test-data/unit/lib-stub/typing.pyi0000644€tŠÔÚ€2›s®0000000360513215007206025317 0ustar jukkaDROPBOX\Domain Users00000000000000# Stub for typing module. Many of the definitions have special handling in # the type checker, so they can just be initialized to anything. from abc import abstractmethod class GenericMeta(type): pass cast = 0 overload = 0 Any = 0 Union = 0 Optional = 0 TypeVar = 0 Generic = 0 Protocol = 0 # This is not yet defined in typeshed, see PR typeshed/#1220 Tuple = 0 Callable = 0 _promote = 0 NamedTuple = 0 Type = 0 no_type_check = 0 ClassVar = 0 NoReturn = 0 NewType = 0 # Type aliases. List = 0 Dict = 0 Set = 0 T = TypeVar('T') T_co = TypeVar('T_co', covariant=True) T_contra = TypeVar('T_contra', contravariant=True) U = TypeVar('U') V = TypeVar('V') S = TypeVar('S') # Note: definitions below are different from typeshed, variances are declared # to silence the protocol variance checks. Maybe it is better to use type: ignore? @runtime class Container(Protocol[T_contra]): @abstractmethod # Use int because bool isn't in the default test builtins def __contains__(self, arg: T_contra) -> int: pass @runtime class Sized(Protocol): @abstractmethod def __len__(self) -> int: pass @runtime class Iterable(Protocol[T_co]): @abstractmethod def __iter__(self) -> 'Iterator[T_co]': pass @runtime class Iterator(Iterable[T_co], Protocol): @abstractmethod def __next__(self) -> T_co: pass class Generator(Iterator[T], Generic[T, U, V]): @abstractmethod def __iter__(self) -> 'Generator[T, U, V]': pass @runtime class Sequence(Iterable[T_co], Protocol): @abstractmethod def __getitem__(self, n: Any) -> T_co: pass @runtime class Mapping(Protocol[T_contra, T_co]): def __getitem__(self, key: T_contra) -> T_co: pass @runtime class MutableMapping(Mapping[T_contra, U], Protocol): def __setitem__(self, k: T_contra, v: U) -> None: pass class SupportsInt(Protocol): def __int__(self) -> int: pass def runtime(cls: T) -> T: return cls TYPE_CHECKING = 1 mypy-0.560/test-data/unit/lib-stub/typing_extensions.pyi0000644€tŠÔÚ€2›s®0000000014413215007206027571 0ustar jukkaDROPBOX\Domain Users00000000000000from typing import TypeVar _T = TypeVar('_T') class Protocol: pass def runtime(x: _T) -> _T: pass mypy-0.560/test-data/unit/merge.test0000644€tŠÔÚ€2›s®0000004626013215007206023545 0ustar jukkaDROPBOX\Domain Users00000000000000-- Test cases for AST merge (used for fine-grained incremental checking) -- -- Each test case has two versions of the module 'target' (target.py and -- target.py.next). A test cases type checks both of them, merges the ASTs, -- and finally dumps certain parts of the ASTs for both versions (==> -- separates the first and second versions). A test case passes if the -- dumped output is as expected. -- -- The dumped output uses to denote identities of objects. Objects -- suffixed by the same refer to the same object; and (if -- N != M) refer to different objects. The objective of these test cases -- is to verify that identities of publicly visible AST nodes is -- preserved across merge. Other AST nodes may get new identities. -- -- Each test case dumps one of four kinds of information: -- -- 1) ASTs (test case has no magic suffix) -- 2) Symbol tables (_symtable test case name suffix) -- 3) TypeInfos (_typeinfo suffix) -- 4) Inferred types (_types suffix) -- -- If you need to dump multiple different kinds of information, write -- multiple test cases. [case testFunction] import target [file target.py] def f() -> int: pass [file target.py.next] def f() -> int: pass [out] MypyFile:1<0>( tmp/main Import:1(target)) MypyFile:1<1>( tmp/target.py FuncDef:1<2>( f def () -> builtins.int<3> Block:1<4>( PassStmt:2<5>()))) ==> MypyFile:1<0>( tmp/main Import:1(target)) MypyFile:1<1>( tmp/target.py FuncDef:1<2>( f def () -> builtins.int<3> Block:1<6>( PassStmt:2<7>()))) [case testClass] import target [file target.py] class A: def f(self, x: str) -> int: pass [file target.py.next] class A: def f(self, x: int) -> str: pass [out] MypyFile:1<0>( tmp/main Import:1(target)) MypyFile:1<1>( tmp/target.py ClassDef:1<2>( A FuncDef:2<3>( f Args( Var(self) Var(x)) def (self: target.A<4>, x: builtins.str<5>) -> builtins.int<6> Block:2<7>( PassStmt:3<8>())))) ==> MypyFile:1<0>( tmp/main Import:1(target)) MypyFile:1<1>( tmp/target.py ClassDef:1<9>( A FuncDef:2<3>( f Args( Var(self) Var(x)) def (self: target.A<4>, x: builtins.int<6>) -> builtins.str<5> Block:2<10>( PassStmt:3<11>())))) [case testClass_typeinfo] import target [file target.py] class A: def f(self, x: str) -> int: pass def g(self, x: str) -> int: pass [file target.py.next] class A: def f(self, x: int) -> str: pass def h(self, x: int) -> str: pass [out] TypeInfo<0>( Name(target.A) Bases(builtins.object<1>) Mro(target.A<0>, builtins.object<1>) Names( f<2> g<3>)) ==> TypeInfo<0>( Name(target.A) Bases(builtins.object<1>) Mro(target.A<0>, builtins.object<1>) Names( f<2> h<4>)) [case testConstructInstance] import target [file target.py] class A: def f(self) -> B: return B() class B: pass [file target.py.next] class B: pass class A: def f(self) -> B: 1 return B() [out] MypyFile:1<0>( tmp/main Import:1(target)) MypyFile:1<1>( tmp/target.py ClassDef:1<2>( A FuncDef:2<3>( f Args( Var(self)) def (self: target.A<4>) -> target.B<5> Block:2<6>( ReturnStmt:3<7>( CallExpr:3<8>( NameExpr(B [target.B<5>]) Args()))))) ClassDef:4<9>( B PassStmt:4<10>())) ==> MypyFile:1<0>( tmp/main Import:1(target)) MypyFile:1<1>( tmp/target.py ClassDef:1<11>( B PassStmt:1<12>()) ClassDef:2<13>( A FuncDef:3<3>( f Args( Var(self)) def (self: target.A<4>) -> target.B<5> Block:3<14>( ExpressionStmt:4<15>( IntExpr(1)) ReturnStmt:5<16>( CallExpr:5<17>( NameExpr(B [target.B<5>]) Args())))))) [case testCallMethod] import target [file target.py] class A: def f(self) -> None: self.f() [file target.py.next] class A: def f(self) -> None: self.f() [out] MypyFile:1<0>( tmp/main Import:1(target)) MypyFile:1<1>( tmp/target.py ClassDef:1<2>( A FuncDef:2<3>( f Args( Var(self)) def (self: target.A<4>) Block:2<5>( ExpressionStmt:3<6>( CallExpr:3<7>( MemberExpr:3<8>( NameExpr(self [l<9>]) f) Args())))))) ==> MypyFile:1<0>( tmp/main Import:1(target)) MypyFile:1<1>( tmp/target.py ClassDef:1<10>( A FuncDef:2<3>( f Args( Var(self)) def (self: target.A<4>) Block:2<11>( ExpressionStmt:3<12>( CallExpr:3<13>( MemberExpr:3<14>( NameExpr(self [l<15>]) f) Args())))))) [case testClassAttribute] import target [file target.py] class A: def f(self) -> None: self.x = 1 self.x [file target.py.next] class A: def f(self) -> None: self.x = 1 self.x [out] MypyFile:1<0>( tmp/main Import:1(target)) MypyFile:1<1>( tmp/target.py ClassDef:1<2>( A FuncDef:2<3>( f Args( Var(self)) def (self: target.A<4>) Block:2<5>( AssignmentStmt:3<6>( MemberExpr:3<8>( NameExpr(self [l<9>]) x*<7>) IntExpr(1)) ExpressionStmt:4<10>( MemberExpr:4<11>( NameExpr(self [l<9>]) x)))))) ==> MypyFile:1<0>( tmp/main Import:1(target)) MypyFile:1<1>( tmp/target.py ClassDef:1<12>( A FuncDef:2<3>( f Args( Var(self)) def (self: target.A<4>) Block:2<13>( AssignmentStmt:3<14>( MemberExpr:3<15>( NameExpr(self [l<16>]) x*<7>) IntExpr(1)) ExpressionStmt:4<17>( MemberExpr:4<18>( NameExpr(self [l<16>]) x)))))) [case testClassAttribute_typeinfo] import target [file target.py] class A: def f(self) -> None: self.x = 1 self.x self.y = A() [file target.py.next] class A: def f(self) -> None: self.x = 1 self.x self.y = A() [out] TypeInfo<0>( Name(target.A) Bases(builtins.object<1>) Mro(target.A<0>, builtins.object<1>) Names( f<2> x<3> (builtins.int<4>) y<5> (target.A<0>))) ==> TypeInfo<0>( Name(target.A) Bases(builtins.object<1>) Mro(target.A<0>, builtins.object<1>) Names( f<2> x<3> (builtins.int<4>) y<5> (target.A<0>))) [case testFunction_symtable] import target [file target.py] def f() -> int: pass [file target.py.next] def f() -> int: pass [out] __main__: target: MypyFile<0> target: f: FuncDef<1> ==> __main__: target: MypyFile<0> target: f: FuncDef<1> [case testClass_symtable] import target [file target.py] class A: pass class B: pass [file target.py.next] class A: pass class C: pass [out] __main__: target: MypyFile<0> target: A: TypeInfo<1> B: TypeInfo<2> ==> __main__: target: MypyFile<0> target: A: TypeInfo<1> C: TypeInfo<3> [case testTopLevelExpression] import target [file target.py] class A: pass A() [file target.py.next] class A: pass class B: pass A() B() [out] MypyFile:1<0>( tmp/main Import:1(target)) MypyFile:1<1>( tmp/target.py ClassDef:1<2>( A PassStmt:1<3>()) ExpressionStmt:2<4>( CallExpr:2<5>( NameExpr(A [target.A<6>]) Args()))) ==> MypyFile:1<0>( tmp/main Import:1(target)) MypyFile:1<1>( tmp/target.py ClassDef:1<7>( A PassStmt:1<8>()) ClassDef:2<9>( B PassStmt:2<10>()) ExpressionStmt:3<11>( CallExpr:3<12>( NameExpr(A [target.A<6>]) Args())) ExpressionStmt:4<13>( CallExpr:4<14>( NameExpr(B [target.B<15>]) Args()))) [case testExpression_types] import target [file target.py] class A: pass def f(a: A) -> None: 1 a [file target.py.next] class A: pass def f(a: A) -> None: a 1 [out] ## target IntExpr:3: builtins.int<0> NameExpr:4: target.A<1> ==> ## target NameExpr:3: target.A<1> IntExpr:4: builtins.int<0> [case testClassAttribute_types] import target [file target.py] class A: def f(self) -> None: self.x = A() self.x self.y = 1 self.y [file target.py.next] class A: def f(self) -> None: self.y = 1 self.y self.x = A() self.x [out] ## target CallExpr:3: target.A<0> MemberExpr:3: target.A<0> NameExpr:3: def () -> target.A<0> NameExpr:3: target.A<0> MemberExpr:4: target.A<0> NameExpr:4: target.A<0> IntExpr:5: builtins.int<1> MemberExpr:5: builtins.int<1> NameExpr:5: target.A<0> MemberExpr:6: builtins.int<1> NameExpr:6: target.A<0> ==> ## target IntExpr:3: builtins.int<1> MemberExpr:3: builtins.int<1> NameExpr:3: target.A<0> MemberExpr:4: builtins.int<1> NameExpr:4: target.A<0> CallExpr:5: target.A<0> MemberExpr:5: target.A<0> NameExpr:5: def () -> target.A<0> NameExpr:5: target.A<0> MemberExpr:6: target.A<0> NameExpr:6: target.A<0> [case testMethod_types] import target [file target.py] class A: def f(self) -> A: return self.f() [file target.py.next] class A: # Extra line to change line numbers def f(self) -> A: return self.f() [out] ## target CallExpr:3: target.A<0> MemberExpr:3: def () -> target.A<0> NameExpr:3: target.A<0> ==> ## target CallExpr:4: target.A<0> MemberExpr:4: def () -> target.A<0> NameExpr:4: target.A<0> [case testRenameFunction] import target [file target.py] def f() -> int: pass [file target.py.next] def g() -> int: pass [out] MypyFile:1<0>( tmp/main Import:1(target)) MypyFile:1<1>( tmp/target.py FuncDef:1<2>( f def () -> builtins.int<3> Block:1<4>( PassStmt:1<5>()))) ==> MypyFile:1<0>( tmp/main Import:1(target)) MypyFile:1<1>( tmp/target.py FuncDef:1<6>( g def () -> builtins.int<3> Block:1<7>( PassStmt:1<8>()))) [case testRenameFunction_symtable] import target [file target.py] def f() -> int: pass [file target.py.next] def g() -> int: pass [out] __main__: target: MypyFile<0> target: f: FuncDef<1> ==> __main__: target: MypyFile<0> target: g: FuncDef<2> [case testMergeWithBaseClass_typeinfo] import target [file target.py] class A: pass class B(A): def f(self) -> None: pass [file target.py.next] class C: pass class A: pass class B(A): def f(self) -> None: pass [out] TypeInfo<0>( Name(target.A) Bases(builtins.object<1>) Mro(target.A<0>, builtins.object<1>) Names()) TypeInfo<2>( Name(target.B) Bases(target.A<0>) Mro(target.B<2>, target.A<0>, builtins.object<1>) Names( f<3>)) ==> TypeInfo<0>( Name(target.A) Bases(builtins.object<1>) Mro(target.A<0>, builtins.object<1>) Names()) TypeInfo<2>( Name(target.B) Bases(target.A<0>) Mro(target.B<2>, target.A<0>, builtins.object<1>) Names( f<3>)) TypeInfo<4>( Name(target.C) Bases(builtins.object<1>) Mro(target.C<4>, builtins.object<1>) Names()) [case testModuleAttribute] import target [file target.py] x = 1 [file target.py.next] x = 2 [out] MypyFile:1<0>( tmp/main Import:1(target)) MypyFile:1<1>( tmp/target.py AssignmentStmt:1<2>( NameExpr(x [target.x<3>]) IntExpr(1) builtins.int<4>)) ==> MypyFile:1<0>( tmp/main Import:1(target)) MypyFile:1<1>( tmp/target.py AssignmentStmt:1<5>( NameExpr(x [target.x<3>]) IntExpr(2) builtins.int<4>)) [case testNestedClassMethod_typeinfo] import target [file target.py] class A: class B: def f(self) -> None: pass [file target.py.next] class A: class B: def f(self) -> None: pass [out] TypeInfo<0>( Name(target.A) Bases(builtins.object<1>) Mro(target.A<0>, builtins.object<1>) Names( B<2>)) TypeInfo<2>( Name(target.A.B) Bases(builtins.object<1>) Mro(target.A.B<2>, builtins.object<1>) Names( f<3>)) ==> TypeInfo<0>( Name(target.A) Bases(builtins.object<1>) Mro(target.A<0>, builtins.object<1>) Names( B<2>)) TypeInfo<2>( Name(target.A.B) Bases(builtins.object<1>) Mro(target.A.B<2>, builtins.object<1>) Names( f<3>)) [case testNamedTuple_typeinfo] import target [file target.py] from typing import NamedTuple class A: pass N = NamedTuple('N', [('x', A)]) [file target.py.next] from typing import NamedTuple class A: pass N = NamedTuple('N', [('x', A), ('y', A)]) [out] TypeInfo<0>( Name(target.A) Bases(builtins.object<1>) Mro(target.A<0>, builtins.object<1>) Names()) TypeInfo<2>( Name(target.N) Bases(builtins.tuple[target.A<0>]<3>) Mro(target.N<2>, builtins.tuple<3>, builtins.object<1>) Names( __annotations__<4> (builtins.object<1>) __doc__<5> (builtins.str<6>) __init__<7> _asdict<8> _field_defaults<9> (builtins.object<1>) _field_types<10> (builtins.object<1>) _fields<11> (Tuple[builtins.str<6>]) _make<12> _replace<13> _source<14> (builtins.str<6>) x<15> (target.A<0>))) ==> TypeInfo<0>( Name(target.A) Bases(builtins.object<1>) Mro(target.A<0>, builtins.object<1>) Names()) TypeInfo<2>( Name(target.N) Bases(builtins.tuple[target.A<0>]<3>) Mro(target.N<2>, builtins.tuple<3>, builtins.object<1>) Names( __annotations__<4> (builtins.object<1>) __doc__<5> (builtins.str<6>) __init__<7> _asdict<8> _field_defaults<9> (builtins.object<1>) _field_types<10> (builtins.object<1>) _fields<11> (Tuple[builtins.str<6>, builtins.str<6>]) _make<12> _replace<13> _source<14> (builtins.str<6>) x<15> (target.A<0>) y<16> (target.A<0>))) [case testUnionType_types] import target [file target.py] from typing import Union class A: pass a: A [file target.py.next] from typing import Union class A: pass a: Union[A, int] [out] ## target TempNode:-1: Any NameExpr:3: target.A<0> ==> ## target TempNode:-1: Any NameExpr:3: Union[target.A<0>, builtins.int<1>] [case testTypeType_types] import target [file target.py] from typing import Type class A: pass a: Type[A] [file target.py.next] from typing import Type class A: pass a: Type[A] [out] ## target TempNode:-1: Any NameExpr:3: Type[target.A<0>] ==> ## target TempNode:-1: Any NameExpr:3: Type[target.A<0>] [case testTypeVar_types] import target [file target.py] from typing import TypeVar, Generic T = TypeVar('T', bound=int) class A(Generic[T]): x: T [file target.py.next] from typing import TypeVar T = TypeVar('T', bound='A') class A(Generic[T]): x: T [out] ## target TempNode:-1: Any CallExpr:2: Any NameExpr:2: Any TypeVarExpr:2: Any NameExpr:4: T`1(upper_bound=builtins.int<0>) ==> ## target TempNode:-1: Any CallExpr:2: Any NameExpr:2: Any TypeVarExpr:2: Any NameExpr:4: T`1(upper_bound=target.A[Any]<1>) [case testUnboundType_types] import target [file target.py] from typing import TypeVar, Generic class A: pass foo: int x: foo[A] [file target.py.next] from typing import TypeVar, Generic class A: pass foo: int x: foo[A] [out] tmp/target.py:4: error: Invalid type "target.foo" ## target TempNode:-1: Any TempNode:-1: Any NameExpr:3: builtins.int<0> NameExpr:4: foo?[A?] ==> ## target TempNode:-1: Any TempNode:-1: Any NameExpr:3: builtins.int<0> NameExpr:4: foo?[A?] [case testOverloaded_types] import target [file target.py] from typing import overload class A: pass @overload def f(x: A) -> A: pass @overload def f(x: int) -> int: pass def f(x): pass g = f [file target.py.next] from typing import overload class A: pass @overload def f(x: A) -> A: pass @overload def f(x: str) -> str: pass def f(x): pass g = f [out] -- TODO: It is unclear why this works correctly... ## target NameExpr:11: Overload(def (x: target.A<0>) -> target.A<0>, def (x: builtins.int<1>) -> builtins.int<1>) NameExpr:11: Overload(def (x: target.A<0>) -> target.A<0>, def (x: builtins.int<1>) -> builtins.int<1>) ==> ## target NameExpr:12: Overload(def (x: target.A<0>) -> target.A<0>, def (x: builtins.str<2>) -> builtins.str<2>) NameExpr:12: Overload(def (x: target.A<0>) -> target.A<0>, def (x: builtins.str<2>) -> builtins.str<2>) [case testTypeVar_symtable] import target [file target.py] from typing import TypeVar T = TypeVar('T') [file target.py.next] from typing import TypeVar T = TypeVar('T', bound=int) [out] __main__: target: MypyFile<0> target: T: TypeVarExpr<1> TypeVar: Var<2> ==> __main__: target: MypyFile<0> target: T: TypeVarExpr<1> TypeVar: Var<2> [case testTypeAlias_symtable] import target [file target.py] from typing import TypeVar, Generic T = TypeVar('T') class A(Generic[T]): pass X = A[int] [file target.py.next] from typing import TypeVar, Generic T = TypeVar('T') class A(Generic[T]): pass X = A[str] [out] __main__: target: MypyFile<0> target: A: TypeInfo<1> Generic: Var<2> T: TypeVarExpr<3> TypeVar: Var<4> X: Var<5>(type_override=target.A[builtins.int<6>]<1>) ==> __main__: target: MypyFile<0> target: A: TypeInfo<1> Generic: Var<2> T: TypeVarExpr<3> TypeVar: Var<4> X: Var<5>(type_override=target.A[builtins.str<7>]<1>) [case testGenericFunction_types] import target [file target.py] from typing import TypeVar class A: pass T = TypeVar('T', bound=A) def f(x: T) -> T: pass f [file target.py.next] from typing import TypeVar class A: pass T = TypeVar('T', bound=A) def f(x: T, y: A) -> T: pass f [out] ## target CallExpr:3: Any NameExpr:3: Any TypeVarExpr:3: Any NameExpr:5: def [T <: target.A<0>] (x: T`-1(upper_bound=target.A<0>)) -> T`-1(upper_bound=target.A<0>) ==> ## target CallExpr:3: Any NameExpr:3: Any TypeVarExpr:3: Any NameExpr:5: def [T <: target.A<0>] (x: T`-1(upper_bound=target.A<0>), y: target.A<0>) -> T`-1(upper_bound=target.A<0>) [case testMergeOverloaded_types] import target [file target.py] from _x import A a: A [file target.py.next] from _x import A a: A [file _x.pyi] from typing import Generic, TypeVar, overload T = TypeVar('T') class C(Generic[T]): @overload def __init__(self) -> None: pass @overload def __init__(self, x: int) -> None: pass A = C[int] [out] ## target TempNode:-1: Any NameExpr:2: _x.C[builtins.int<0>]<1> ==> ## target TempNode:-1: Any NameExpr:2: _x.C[builtins.int<0>]<1> [case testRefreshVar_symtable] from typing import TypeVar from target import f x = 1 y = '' # type: str [file target.py] f = 1 [file target.py.next] [out] __main__: TypeVar: Var<0> f: Var<1> x: Var<2> y: Var<3> target: f: Var<1> ==> __main__: TypeVar: Var<0> f: Var<4> x: Var<2> y: Var<3> target: [case testRefreshTypeVar_symtable] from typing import TypeVar from target import f T = TypeVar('T') [file target.py] f = 1 [file target.py.next] [out] __main__: T: TypeVarExpr<0> TypeVar: Var<1> f: Var<2> target: f: Var<2> ==> __main__: T: TypeVarExpr<0> TypeVar: Var<1> f: Var<3> target: [case testRefreshNamedTuple_symtable] from typing import NamedTuple from target import f N = NamedTuple('N', [('x', int)]) [file target.py] f = 1 [file target.py.next] [out] __main__: N: TypeInfo<0> NamedTuple: Var<1> f: Var<2> target: f: Var<2> ==> __main__: N: TypeInfo<0> NamedTuple: Var<1> f: Var<3> target: [case testRefreshAttributeDefinedInClassBody_typeinfo] from target import f class A: a = 1 b = '' # type: str [file target.py] f = 1 [file target.py.next] [out] TypeInfo<0>( Name(__main__.A) Bases(builtins.object<1>) Mro(__main__.A<0>, builtins.object<1>) Names( a<2> (builtins.int<3>) b<4> (builtins.str<5>))) ==> TypeInfo<0>( Name(__main__.A) Bases(builtins.object<1>) Mro(__main__.A<0>, builtins.object<1>) Names( a<2> (builtins.int<3>) b<4> (builtins.str<5>))) mypy-0.560/test-data/unit/parse-errors.test0000644€tŠÔÚ€2›s®0000002116713215007206025071 0ustar jukkaDROPBOX\Domain Users00000000000000-- Test cases for parser errors. Each test case consists of two sections. -- The first section contains [case NAME] followed by the input code, while -- the second section contains [out] followed by the output from the parser. -- -- The input file name in errors is "file". -- -- Comments starting with "--" in this file will be ignored, except for lines -- starting with "----" that are not ignored. The first two dashes of these -- lines are interpreted as escapes and removed. [case testInvalidFunction] def f() pass [out] file:1: error: invalid syntax [case testMissingIndent] if x: 1 [out] file:2: error: invalid syntax [case testUnexpectedIndent] 1 2 [out] file:2: error: unexpected indent [case testInconsistentIndent] if x: 1 1 [out] file:3: error: unexpected indent [case testInconsistentIndent2] if x: 1 1 [out] file:3: error: unindent does not match any outer indentation level [case testInvalidBinaryOp] 1> a* a+1* [out] file:1: error: invalid syntax [case testDoubleStar] **a [out] file:1: error: invalid syntax [case testInvalidSuperClass] class A(C[): pass [out] file:1: error: invalid syntax [case testMissingSuperClass] class A(: pass [out] file:1: error: invalid syntax [case testUnexpectedEof] if 1: [out] file:1: error: unexpected EOF while parsing [case testInvalidKeywordArguments1] f(x=y, z) [out] file:1: error: positional argument follows keyword argument [case testInvalidKeywordArguments2] f(**x, y) [out] file:1: error: positional argument follows keyword argument unpacking [case testInvalidBareAsteriskAndVarArgs2] def f(*x: A, *) -> None: pass [out] file:1: error: invalid syntax [case testInvalidBareAsteriskAndVarArgs3] def f(*, *x: A) -> None: pass [out] file:1: error: invalid syntax [case testInvalidBareAsteriskAndVarArgs4] def f(*, **x: A) -> None: pass [out] file:1: error: named arguments must follow bare * [case testInvalidBareAsterisk1] def f(*) -> None: pass [out] file:1: error: named arguments must follow bare * [case testInvalidBareAsterisk2] def f(x, *) -> None: pass [out] file:1: error: named arguments must follow bare * [case testInvalidFuncDefArgs1] def f(x = y, x): pass [out] file:1: error: non-default argument follows default argument [case testInvalidFuncDefArgs3] def f(**x, y): pass [out] file:1: error: invalid syntax [case testInvalidFuncDefArgs4] def f(**x, y=x): pass [out] file:1: error: invalid syntax [case testInvalidStringLiteralType] def f(x: 'A[' ) -> None: pass [out] file:1: error: syntax error in type comment [case testInvalidStringLiteralType2] def f(x: 'A B' ) -> None: pass [out] file:1: error: syntax error in type comment [case testInvalidTypeComment] 0 x = 0 # type: A A [out] file:2: error: syntax error in type comment [case testInvalidTypeComment2] 0 x = 0 # type: A[ [out] file:2: error: syntax error in type comment [case testInvalidTypeComment3] 0 x = 0 # type: [out] file:2: error: syntax error in type comment [case testInvalidTypeComment4] 0 x = 0 # type: * [out] file:2: error: syntax error in type comment [case testInvalidMultilineLiteralType] def f() -> "A\nB": pass [out] file:1: error: syntax error in type comment [case testInvalidSignatureInComment1] def f(): # type: x pass [out] file:1: error: syntax error in type comment file:1: note: Suggestion: wrap argument types in parentheses [case testInvalidSignatureInComment2] def f(): # type: pass [out] file:1: error: syntax error in type comment [case testInvalidSignatureInComment3] def f(): # type: ( pass [out] file:1: error: syntax error in type comment [case testInvalidSignatureInComment4] def f(): # type: (. pass [out] file:1: error: syntax error in type comment [case testInvalidSignatureInComment5] def f(): # type: (x pass [out] file:1: error: syntax error in type comment [case testInvalidSignatureInComment6] def f(): # type: (x) pass [out] file:1: error: syntax error in type comment [case testInvalidSignatureInComment7] def f(): # type: (x) - pass [out] file:1: error: syntax error in type comment [case testInvalidSignatureInComment8] def f(): # type: (x) -> pass [out] file:1: error: syntax error in type comment [case testInvalidSignatureInComment9] def f(): # type: (x) -> . pass [out] file:1: error: syntax error in type comment [case testInvalidSignatureInComment10] def f(): # type: (x) -> x x pass [out] file:1: error: syntax error in type comment [case testDuplicateSignatures1] def f() -> None: # type: () -> None pass def f(): # type: () -> None pass [out] file:1: error: Function has duplicate type signatures [case testDuplicateSignatures2] def f(x, y: Z): # type: (x, y) -> z pass [out] file:1: error: Function has duplicate type signatures [case testTooManyTypes] def f(x, y): # type: (X, Y, Z) -> z pass [out] file:1: error: Type signature has too many arguments [case testTooFewTypes] def f(x, y): # type: (X) -> z pass [out] file:1: error: Type signature has too few arguments [case testCommentFunctionAnnotationVarArgMispatch-skip] # see mypy issue #1997 def f(x): # type: (*X) -> Y pass def g(*x): # type: (X) -> Y pass [out] file:1: error: Inconsistent use of '*' in function signature file:3: error: Inconsistent use of '*' in function signature [case testCommentFunctionAnnotationVarArgMispatch2-skip] # see mypy issue #1997 def f(*x, **y): # type: (**X, *Y) -> Z pass def g(*x, **y): # type: (*X, *Y) -> Z pass [out] file:1: error: Inconsistent use of '*' in function signature file:3: error: syntax error in type comment file:3: error: Inconsistent use of '*' in function signature file:3: error: Inconsistent use of '**' in function signature [case testPrintStatementInPython3-skip] print 1 [out] file:1: error: Missing parentheses in call to 'print' [case testInvalidConditionInConditionalExpression] 1 if 2, 3 else 4 [out] file:1: error: invalid syntax [case testInvalidConditionInConditionalExpression2] 1 if x for y in z else 4 [out] file:1: error: invalid syntax [case testInvalidConditionInConditionalExpression2] 1 if x else for y in z [out] file:1: error: invalid syntax [case testYieldFromNotRightParameter] def f(): yield from [out] file:2: error: invalid syntax [case testYieldFromAfterReturn] def f(): return yield from h() [out] file:2: error: invalid syntax [case testImportDotModule] import .x [out] file:1: error: invalid syntax [case testImportDot] import . [out] file:1: error: invalid syntax [case testInvalidFunctionName] def while(): pass [out] file:1: error: invalid syntax [case testInvalidEllipsis1] ...0 ..._ ...a [out] file:1: error: invalid syntax [case testBlockStatementInSingleLineIf] if 1: if 2: pass [out] file:1: error: invalid syntax [case testBlockStatementInSingleLineIf2] if 1: while 2: pass [out] file:1: error: invalid syntax [case testBlockStatementInSingleLineIf3] if 1: for x in y: pass [out] file:1: error: invalid syntax [case testUnexpectedEllipsis] a = a... [out] file:1: error: invalid syntax [case testParseErrorBeforeUnicodeLiteral] x u'y' [out] file:1: error: invalid syntax [case testParseErrorInExtendedSlicing] x[:, [out] file:1: error: unexpected EOF while parsing [case testParseErrorInExtendedSlicing2] x[:,:: [out] file:1: error: unexpected EOF while parsing [case testParseErrorInExtendedSlicing3] x[:,: [out] file:1: error: unexpected EOF while parsing [case testPython2OctalIntLiteralInPython3] 0377 [out] file:1: error: invalid token [case testInvalidEncoding] # foo # coding: uft-8 [out] file:0: error: unknown encoding: uft-8 [case testInvalidEncoding2] # coding=Uft.8 [out] file:0: error: unknown encoding: Uft.8 [case testInvalidEncoding3] #!/usr/bin python # vim: set fileencoding=uft8 : [out] file:0: error: unknown encoding: uft8 [case testDoubleEncoding] # coding: uft8 # coding: utf8 # The first coding cookie should be used and fail. [out] file:0: error: unknown encoding: uft8 [case testDoubleEncoding2] # Again the first cookie should be used and fail. # coding: uft8 # coding: utf8 [out] file:0: error: unknown encoding: uft8 [case testLongLiteralInPython3] 2L 0x2L [out] file:1: error: invalid syntax [case testPython2LegacyInequalityInPython3] 1 <> 2 [out] file:1: error: invalid syntax [case testLambdaInListComprehensionInPython3] ([ 0 for x in 1, 2 if 3 ]) [out] file:1: error: invalid syntax [case testTupleArgListInPython3] def f(x, (y, z)): pass [out] file:1: error: invalid syntax [case testBackquoteInPython3] `1 + 2` [out] file:1: error: invalid syntax [case testSmartQuotes] foo = ‘bar’ [out] file:1: error: invalid character in identifier [case testExceptCommaInPython3] try: pass except KeyError, IndexError: pass [out] file:3: error: invalid syntax [case testLocalVarWithTypeOnNextLine] x = 0 # type: int [out] file:2: error: misplaced type annotation mypy-0.560/test-data/unit/parse-python2.test0000644€tŠÔÚ€2›s®0000001521513215007206025155 0ustar jukkaDROPBOX\Domain Users00000000000000-- Test cases for parser -- Python 2 syntax. -- -- See parse.test for a description of this file format. [case testEmptyFile] [out] MypyFile:1() [case testStringLiterals] 'bar' u'foo' ur'foo' u'''bar''' b'foo' [out] MypyFile:1( ExpressionStmt:1( StrExpr(bar)) ExpressionStmt:2( UnicodeExpr(foo)) ExpressionStmt:3( UnicodeExpr(foo)) ExpressionStmt:4( UnicodeExpr(bar)) ExpressionStmt:5( StrExpr(foo))) [case testSimplePrint] print 1 print 2, 3 print (4, 5) [out] MypyFile:1( PrintStmt:1( IntExpr(1) Newline) PrintStmt:2( IntExpr(2) IntExpr(3) Newline) PrintStmt:3( TupleExpr:3( IntExpr(4) IntExpr(5)) Newline)) [case testPrintWithNoArgs] print [out] MypyFile:1( PrintStmt:1( Newline)) [case testPrintWithTarget] print >>foo [out] MypyFile:1( PrintStmt:1( Target( NameExpr(foo)) Newline)) [case testPrintWithTargetAndArgs] print >>foo, x [out] MypyFile:1( PrintStmt:1( NameExpr(x) Target( NameExpr(foo)) Newline)) [case testPrintWithTargetAndArgsAndTrailingComma] print >>foo, x, y, [out] MypyFile:1( PrintStmt:1( NameExpr(x) NameExpr(y) Target( NameExpr(foo)))) [case testSimpleWithTrailingComma] print 1, print 2, 3, print (4, 5), [out] MypyFile:1( PrintStmt:1( IntExpr(1)) PrintStmt:2( IntExpr(2) IntExpr(3)) PrintStmt:3( TupleExpr:3( IntExpr(4) IntExpr(5)))) [case testOctalIntLiteral] 00 01 0377 [out] MypyFile:1( ExpressionStmt:1( IntExpr(0)) ExpressionStmt:2( IntExpr(1)) ExpressionStmt:3( IntExpr(255))) [case testLongLiteral-skip] # see typed_ast issue #26 0L 123L 012L 0x123l [out] MypyFile:1( ExpressionStmt:1( IntExpr(0)) ExpressionStmt:2( IntExpr(123)) ExpressionStmt:3( IntExpr(10)) ExpressionStmt:4( IntExpr(291))) [case testTryExceptWithComma] try: x except Exception, e: y [out] MypyFile:1( TryStmt:1( Block:1( ExpressionStmt:2( NameExpr(x))) NameExpr(Exception) NameExpr(e) Block:3( ExpressionStmt:4( NameExpr(y))))) [case testTryExceptWithNestedComma] try: x except (KeyError, IndexError): y [out] MypyFile:1( TryStmt:1( Block:1( ExpressionStmt:2( NameExpr(x))) TupleExpr:3( NameExpr(KeyError) NameExpr(IndexError)) Block:3( ExpressionStmt:4( NameExpr(y))))) [case testExecStatement] exec a [out] MypyFile:1( ExecStmt:1( NameExpr(a))) [case testExecStatementWithIn] exec a in globals() [out] MypyFile:1( ExecStmt:1( NameExpr(a) CallExpr:1( NameExpr(globals) Args()))) [case testExecStatementWithInAnd2Expressions] exec a in x, y [out] MypyFile:1( ExecStmt:1( NameExpr(a) NameExpr(x) NameExpr(y))) [case testEllipsisInExpression_python2] x = ... # E: invalid syntax [out] [case testStrLiteralConcatenationWithMixedLiteralTypes] u'foo' 'bar' 'bar' u'foo' [out] MypyFile:1( ExpressionStmt:1( UnicodeExpr(foobar)) ExpressionStmt:2( UnicodeExpr(barfoo))) [case testLegacyInequality] 1 <> 2 [out] MypyFile:1( ExpressionStmt:1( ComparisonExpr:1( != IntExpr(1) IntExpr(2)))) [case testLambdaInListComprehensionInPython2] ([ 0 for x in 1, 2 if 3 ]) [out] MypyFile:1( ExpressionStmt:1( ListComprehension:1( GeneratorExpr:1( IntExpr(0) NameExpr(x) TupleExpr:1( IntExpr(1) IntExpr(2)) IntExpr(3))))) [case testTupleArgListInPython2] def f(x, (y, z)): pass [out] MypyFile:1( FuncDef:1( f Args( Var(x) Var(__tuple_arg_2)) Block:1( AssignmentStmt:1( TupleExpr:1( NameExpr(y) NameExpr(z)) NameExpr(__tuple_arg_2)) PassStmt:1()))) [case testTupleArgListWithTwoTupleArgsInPython2] def f((x, y), (z, zz)): pass [out] MypyFile:1( FuncDef:1( f Args( Var(__tuple_arg_1) Var(__tuple_arg_2)) Block:1( AssignmentStmt:1( TupleExpr:1( NameExpr(x) NameExpr(y)) NameExpr(__tuple_arg_1)) AssignmentStmt:1( TupleExpr:1( NameExpr(z) NameExpr(zz)) NameExpr(__tuple_arg_2)) PassStmt:1()))) [case testTupleArgListWithInitializerInPython2] def f((y, z) = (1, 2)): pass [out] MypyFile:1( FuncDef:1( f Args( default( Var(__tuple_arg_1) TupleExpr:1( IntExpr(1) IntExpr(2)))) Block:1( AssignmentStmt:1( TupleExpr:1( NameExpr(y) NameExpr(z)) NameExpr(__tuple_arg_1)) PassStmt:1()))) [case testLambdaTupleArgListInPython2] lambda (x, y): z [out] MypyFile:1( ExpressionStmt:1( LambdaExpr:1( Args( Var(__tuple_arg_1)) Block:1( AssignmentStmt:1( TupleExpr:1( NameExpr(x) NameExpr(y)) NameExpr(__tuple_arg_1)) ReturnStmt:1( NameExpr(z)))))) [case testLambdaSingletonTupleArgListInPython2] lambda (x,): z [out] MypyFile:1( ExpressionStmt:1( LambdaExpr:1( Args( Var(__tuple_arg_1)) Block:1( AssignmentStmt:1( TupleExpr:1( NameExpr(x)) NameExpr(__tuple_arg_1)) ReturnStmt:1( NameExpr(z)))))) [case testLambdaNoTupleArgListInPython2] lambda (x): z [out] MypyFile:1( ExpressionStmt:1( LambdaExpr:1( Args( Var(x)) Block:1( ReturnStmt:1( NameExpr(z)))))) [case testInvalidExprInTupleArgListInPython2_1] def f(x, ()): pass [out] main:1: error: invalid syntax [case testInvalidExprInTupleArgListInPython2_2] def f(x, (y, x[1])): pass [out] main:1: error: invalid syntax [case testListLiteralAsTupleArgInPython2] def f(x, [x]): pass [out] main:1: error: invalid syntax [case testTupleArgAfterStarArgInPython2] def f(*a, (b, c)): pass [out] main:1: error: invalid syntax [case testTupleArgAfterStarStarArgInPython2] def f(*a, (b, c)): pass [out] main:1: error: invalid syntax [case testParenthesizedArgumentInPython2] def f(x, (y)): pass [out] MypyFile:1( FuncDef:1( f Args( Var(x) Var(y)) Block:1( PassStmt:1()))) [case testDuplicateNameInTupleArgList_python2] def f(a, (a, b)): pass def g((x, (x, y))): pass [out] main:1: error: Duplicate argument 'a' in function definition main:3: error: Duplicate argument 'x' in function definition [case testBackquotesInPython2] `1 + 2` [out] MypyFile:1( ExpressionStmt:1( BackquoteExpr:1( OpExpr:1( + IntExpr(1) IntExpr(2))))) [case testBackquoteSpecialCasesInPython2] `1, 2` [out] MypyFile:1( ExpressionStmt:1( BackquoteExpr:1( TupleExpr:1( IntExpr(1) IntExpr(2))))) mypy-0.560/test-data/unit/parse.test0000644€tŠÔÚ€2›s®0000015133213215007206023555 0ustar jukkaDROPBOX\Domain Users00000000000000-- Test cases for parser. Each test case consists of two sections. -- The first section contains [case NAME] followed by the input code, while -- the second section contains [out] followed by the output from the parser. -- -- Lines starting with "--" in this file will be ignored, except for lines -- starting with "----" that are not ignored. The first two dashes of these -- lines are interpreted as escapes and removed. [case testEmptyFile] [out] MypyFile:1() [case testExpressionStatement] 1 [out] MypyFile:1( ExpressionStmt:1( IntExpr(1))) [case testAssignment] x = 1 [out] MypyFile:1( AssignmentStmt:1( NameExpr(x) IntExpr(1))) [case testExpressionBasics] x = f(1, None) 123 * (2 + x) "hello".lower() -1.23 [out] MypyFile:1( AssignmentStmt:1( NameExpr(x) CallExpr:1( NameExpr(f) Args( IntExpr(1) NameExpr(None)))) ExpressionStmt:2( OpExpr:2( * IntExpr(123) OpExpr:2( + IntExpr(2) NameExpr(x)))) ExpressionStmt:3( CallExpr:3( MemberExpr:3( StrExpr(hello) lower) Args())) ExpressionStmt:4( UnaryExpr:4( - FloatExpr(1.23)))) [case testSingleQuotedStr] '' 'foo' 'foo\ bar' [out] MypyFile:1( ExpressionStmt:1( StrExpr()) ExpressionStmt:2( StrExpr(foo)) ExpressionStmt:3( StrExpr(foobar))) [case testDoubleQuotedStr] "" "foo" "foo\ bar" [out] MypyFile:1( ExpressionStmt:1( StrExpr()) ExpressionStmt:2( StrExpr(foo)) ExpressionStmt:3( StrExpr(foobar))) [case testTripleQuotedStr] '''''' '''foo''' '''foo\ bar''' '''\nfoo bar''' '''fo''bar''' """""" """foo""" """foo\ bar""" """\nfoo bar""" """fo""bar""" [out] MypyFile:1( ExpressionStmt:1( StrExpr()) ExpressionStmt:2( StrExpr(foo)) ExpressionStmt:3( StrExpr(foobar)) ExpressionStmt:5( StrExpr(\u000afoo\u000abar)) ExpressionStmt:6( StrExpr(fo''bar)) ExpressionStmt:7( StrExpr()) ExpressionStmt:8( StrExpr(foo)) ExpressionStmt:9( StrExpr(foobar)) ExpressionStmt:11( StrExpr(\u000afoo\u000abar)) ExpressionStmt:12( StrExpr(fo""bar))) [case testRawStr] r'x\n\'' r"x\n\"" [out] MypyFile:1( ExpressionStmt:1( StrExpr(x\n\')) ExpressionStmt:2( StrExpr(x\n\"))) --" fix syntax highlight [case testBytes] b'foo' b"foo\ bar" br'x\n\'' [out] MypyFile:1( ExpressionStmt:1( BytesExpr(foo)) ExpressionStmt:2( BytesExpr(foobar)) ExpressionStmt:3( BytesExpr(x\\n\\'))) [case testEscapesInStrings] '\r\n\t\x2f\u123f' b'\r\n\t\x2f\u123f' [out] MypyFile:1( ExpressionStmt:1( StrExpr(\u000d\u000a\u0009/\u123f)) ExpressionStmt:2( BytesExpr(\r\n\t/\\\u123f))) -- Note \\u in the b'...' case (\u sequence not translated) [case testEscapedQuote] '\'' [out] MypyFile:1( ExpressionStmt:1( StrExpr('))) --' [case testOctalEscapes] '\0\1\177\1234' b'\1\476' [out] MypyFile:1( ExpressionStmt:1( StrExpr(\u0000\u0001\u007fS4)) ExpressionStmt:2( BytesExpr(\x01>))) [case testUnicodeLiteralInPython3] u'foo' [out] MypyFile:1( ExpressionStmt:1( StrExpr(foo))) [case testArrays] a = [] a = [1, 2] a[[1]] = a[2] [out] MypyFile:1( AssignmentStmt:1( NameExpr(a) ListExpr:1()) AssignmentStmt:2( NameExpr(a) ListExpr:2( IntExpr(1) IntExpr(2))) AssignmentStmt:3( IndexExpr:3( NameExpr(a) ListExpr:3( IntExpr(1))) IndexExpr:3( NameExpr(a) IntExpr(2)))) [case testTuples] () (1,) (1, foo) a, b = 1, (2, 3) [out] MypyFile:1( ExpressionStmt:1( TupleExpr:1()) ExpressionStmt:2( TupleExpr:2( IntExpr(1))) ExpressionStmt:3( TupleExpr:3( IntExpr(1) NameExpr(foo))) AssignmentStmt:4( TupleExpr:4( NameExpr(a) NameExpr(b)) TupleExpr:4( IntExpr(1) TupleExpr:4( IntExpr(2) IntExpr(3))))) [case testSimpleFunction] def main(): 1 [out] MypyFile:1( FuncDef:1( main Block:1( ExpressionStmt:2( IntExpr(1))))) [case testPass] def f(): pass [out] MypyFile:1( FuncDef:1( f Block:1( PassStmt:2()))) [case testIf] if 1: 2 [out] MypyFile:1( IfStmt:1( If( IntExpr(1)) Then( ExpressionStmt:2( IntExpr(2))))) [case testIfElse] if 1: 2 else: 3 [out] MypyFile:1( IfStmt:1( If( IntExpr(1)) Then( ExpressionStmt:2( IntExpr(2))) Else( ExpressionStmt:4( IntExpr(3))))) [case testIfElif] if 1: 2 elif 3: 4 elif 5: 6 else: 7 [out] MypyFile:1( IfStmt:1( If( IntExpr(1)) Then( ExpressionStmt:2( IntExpr(2))) Else( IfStmt:3( If( IntExpr(3)) Then( ExpressionStmt:4( IntExpr(4))) Else( IfStmt:5( If( IntExpr(5)) Then( ExpressionStmt:6( IntExpr(6))) Else( ExpressionStmt:8( IntExpr(7))))))))) [case testWhile] while 1: pass [out] MypyFile:1( WhileStmt:1( IntExpr(1) Block:1( PassStmt:2()))) [case testReturn] def f(): return 1 [out] MypyFile:1( FuncDef:1( f Block:1( ReturnStmt:2( IntExpr(1))))) [case testReturnWithoutValue] def f(): return [out] MypyFile:1( FuncDef:1( f Block:1( ReturnStmt:2()))) [case testBreak] while 1: break [out] MypyFile:1( WhileStmt:1( IntExpr(1) Block:1( BreakStmt:2()))) [case testLargeBlock] if 1: x = 1 while 2: pass y = 2 [out] MypyFile:1( IfStmt:1( If( IntExpr(1)) Then( AssignmentStmt:2( NameExpr(x) IntExpr(1)) WhileStmt:3( IntExpr(2) Block:3( PassStmt:4())) AssignmentStmt:5( NameExpr(y) IntExpr(2))))) [case testSimpleClass] class A: def f(self): pass [out] MypyFile:1( ClassDef:1( A FuncDef:2( f Args( Var(self)) Block:2( PassStmt:3())))) [case testGlobalVarWithType] x = 0 # type: int y = False # type: bool [out] MypyFile:1( AssignmentStmt:1( NameExpr(x) IntExpr(0) int?) AssignmentStmt:2( NameExpr(y) NameExpr(False) bool?)) [case testLocalVarWithType] def f(): x = 0 # type: int y = False # type: bool a = None # type: Any [out] MypyFile:1( FuncDef:1( f Block:1( AssignmentStmt:2( NameExpr(x) IntExpr(0) int?) AssignmentStmt:3( NameExpr(y) NameExpr(False) bool?) AssignmentStmt:4( NameExpr(a) NameExpr(None) Any?)))) [case testFunctionDefWithType] def f(y: str) -> int: return class A: def f(self, a: int, b: Any) -> x: pass def g(self) -> Any: pass [out] MypyFile:1( FuncDef:1( f Args( Var(y)) def (y: str?) -> int? Block:1( ReturnStmt:2())) ClassDef:3( A FuncDef:4( f Args( Var(self) Var(a) Var(b)) def (self: Any, a: int?, b: Any?) -> x? Block:4( PassStmt:5())) FuncDef:6( g Args( Var(self)) def (self: Any) -> Any? Block:6( PassStmt:7())))) [case testFuncWithNoneReturn] def f() -> None: pass [out] MypyFile:1( FuncDef:1( f def () -> None? Block:1( PassStmt:2()))) [case testVarDefWithGenericType] x = None # type: List[str] y = None # type: Dict[int, Any] [out] MypyFile:1( AssignmentStmt:1( NameExpr(x) NameExpr(None) List?[str?]) AssignmentStmt:2( NameExpr(y) NameExpr(None) Dict?[int?, Any?])) [case testSignatureWithGenericTypes] def f(y: t[Any, x]) -> a[b[c], d]: pass [out] MypyFile:1( FuncDef:1( f Args( Var(y)) def (y: t?[Any?, x?]) -> a?[b?[c?], d?] Block:1( PassStmt:2()))) [case testParsingExpressionsWithLessAndGreaterThan] # The operators < > can sometimes be confused with generic types. x = a < b > c f(x < b, y > c) a < b > 1 x < b, y > 2 (a < b > c) [out] MypyFile:1( AssignmentStmt:2( NameExpr(x) ComparisonExpr:2( < > NameExpr(a) NameExpr(b) NameExpr(c))) ExpressionStmt:3( CallExpr:3( NameExpr(f) Args( ComparisonExpr:3( < NameExpr(x) NameExpr(b)) ComparisonExpr:3( > NameExpr(y) NameExpr(c))))) ExpressionStmt:4( ComparisonExpr:4( < > NameExpr(a) NameExpr(b) IntExpr(1))) ExpressionStmt:5( TupleExpr:5( ComparisonExpr:5( < NameExpr(x) NameExpr(b)) ComparisonExpr:5( > NameExpr(y) IntExpr(2)))) ExpressionStmt:6( ComparisonExpr:6( < > NameExpr(a) NameExpr(b) NameExpr(c)))) [case testLineContinuation] if (1 + 2): pass [out] MypyFile:1( IfStmt:1( If( OpExpr:1( + IntExpr(1) IntExpr(2))) Then( PassStmt:3()))) [case testMultipleVarDef] x, y = z # type: int, a[c] [out] MypyFile:1( AssignmentStmt:1( TupleExpr:1( NameExpr(x) NameExpr(y)) NameExpr(z) Tuple[int?, a?[c?]])) [case testMultipleVarDef2] (xx, z, i) = 1 # type: (a[c], Any, int) [out] MypyFile:1( AssignmentStmt:1( TupleExpr:1( NameExpr(xx) NameExpr(z) NameExpr(i)) IntExpr(1) Tuple[a?[c?], Any?, int?])) [case testMultipleVarDef3] (xx, (z, i)) = 1 # type: (a[c], (Any, int)) [out] MypyFile:1( AssignmentStmt:1( TupleExpr:1( NameExpr(xx) TupleExpr:1( NameExpr(z) NameExpr(i))) IntExpr(1) Tuple[a?[c?], Tuple[Any?, int?]])) [case testAnnotateAssignmentViaSelf] class A: def __init__(self): self.x = 1 # type: int [out] MypyFile:1( ClassDef:1( A FuncDef:2( __init__ Args( Var(self)) Block:2( AssignmentStmt:3( MemberExpr:3( NameExpr(self) x) IntExpr(1) int?))))) [case testCommentAfterTypeComment] x = 0 # type: int # bar! [out] MypyFile:1( AssignmentStmt:1( NameExpr(x) IntExpr(0) int?)) [case testMultilineAssignmentAndAnnotations] (x, y) = (1, 2) # type: foo, bar [out] MypyFile:1( AssignmentStmt:1( TupleExpr:1( NameExpr(x) NameExpr(y)) TupleExpr:2( IntExpr(1) IntExpr(2)) Tuple[foo?, bar?])) [case testWhitespaceAndCommentAnnotation] x = 1#type:int [out] MypyFile:1( AssignmentStmt:1( NameExpr(x) IntExpr(1) int?)) [case testWhitespaceAndCommentAnnotation2] x = 1# type: int [out] MypyFile:1( AssignmentStmt:1( NameExpr(x) IntExpr(1) int?)) [case testWhitespaceAndCommentAnnotation3] x = 1# type : int # not recognized! [out] MypyFile:1( AssignmentStmt:1( NameExpr(x) IntExpr(1))) [case testInvalidAnnotation] x=1 ##type: int y=1 #.type: int z=1 # Type: int [out] MypyFile:1( AssignmentStmt:1( NameExpr(x) IntExpr(1)) AssignmentStmt:2( NameExpr(y) IntExpr(1)) AssignmentStmt:3( NameExpr(z) IntExpr(1))) [case testEmptyClass] class C: pass [out] MypyFile:1( ClassDef:1( C PassStmt:2())) [case testOperatorPrecedence] a | b ^ c a & b << c [out] MypyFile:1( ExpressionStmt:1( OpExpr:1( | NameExpr(a) OpExpr:1( ^ NameExpr(b) NameExpr(c)))) ExpressionStmt:2( OpExpr:2( & NameExpr(a) OpExpr:2( << NameExpr(b) NameExpr(c))))) [case testOperatorAssociativity] 1 - 2 + 3 1 << 2 << 3 [out] MypyFile:1( ExpressionStmt:1( OpExpr:1( + OpExpr:1( - IntExpr(1) IntExpr(2)) IntExpr(3))) ExpressionStmt:2( OpExpr:2( << OpExpr:2( << IntExpr(1) IntExpr(2)) IntExpr(3)))) [case testUnaryOperators] -2 * +3 * ~3 * 2 ~3**2 [out] MypyFile:1( ExpressionStmt:1( OpExpr:1( * OpExpr:1( * OpExpr:1( * UnaryExpr:1( - IntExpr(2)) UnaryExpr:1( + IntExpr(3))) UnaryExpr:1( ~ IntExpr(3))) IntExpr(2))) ExpressionStmt:2( UnaryExpr:2( ~ OpExpr:2( ** IntExpr(3) IntExpr(2))))) [case testSingleLineBodies] if 1: pass while 1: pass def f(): pass def g() -> int: return 1 [out] MypyFile:1( IfStmt:1( If( IntExpr(1)) Then( PassStmt:1())) WhileStmt:2( IntExpr(1) Block:2( PassStmt:2())) FuncDef:3( f Block:3( PassStmt:3())) FuncDef:4( g def () -> int? Block:4( ReturnStmt:4( IntExpr(1))))) [case testForStatement] for x in y: pass for x, (y, w) in z: 1 for [x, (y, w)] in z: 1 [out] MypyFile:1( ForStmt:1( NameExpr(x) NameExpr(y) Block:1( PassStmt:2())) ForStmt:3( TupleExpr:3( NameExpr(x) TupleExpr:3( NameExpr(y) NameExpr(w))) NameExpr(z) Block:3( ExpressionStmt:4( IntExpr(1)))) ForStmt:5( ListExpr:5( NameExpr(x) TupleExpr:5( NameExpr(y) NameExpr(w))) NameExpr(z) Block:5( ExpressionStmt:6( IntExpr(1))))) [case testGlobalDecl] global x def f(): global x, y [out] MypyFile:1( GlobalDecl:1( x) FuncDef:2( f Block:2( GlobalDecl:3( x y)))) [case testNonlocalDecl] def f(): def g(): nonlocal x, y [out] MypyFile:1( FuncDef:1( f Block:1( FuncDef:2( g Block:2( NonlocalDecl:3( x y)))))) [case testRaiseStatement] raise foo [out] MypyFile:1( RaiseStmt:1( NameExpr(foo))) [case testRaiseWithoutArg] try: pass except: raise [out] MypyFile:1( TryStmt:1( Block:1( PassStmt:2()) Block:3( RaiseStmt:4()))) [case testRaiseFrom] raise e from x [out] MypyFile:1( RaiseStmt:1( NameExpr(e) NameExpr(x))) [case testBaseclasses] class A(B): pass class A(B[T], C[Any, d[x]]): pass [out] MypyFile:1( ClassDef:1( A BaseTypeExpr( NameExpr(B)) PassStmt:2()) ClassDef:3( A BaseTypeExpr( IndexExpr:3( NameExpr(B) NameExpr(T)) IndexExpr:3( NameExpr(C) TupleExpr:3( NameExpr(Any) IndexExpr:3( NameExpr(d) NameExpr(x))))) PassStmt:4())) [case testIsNot] x is not y [out] MypyFile:1( ExpressionStmt:1( ComparisonExpr:1( is not NameExpr(x) NameExpr(y)))) [case testNotIn] x not in y not x not in y x not in y | z [out] MypyFile:1( ExpressionStmt:1( ComparisonExpr:1( not in NameExpr(x) NameExpr(y))) ExpressionStmt:2( UnaryExpr:2( not ComparisonExpr:2( not in NameExpr(x) NameExpr(y)))) ExpressionStmt:3( ComparisonExpr:3( not in NameExpr(x) OpExpr:3( | NameExpr(y) NameExpr(z))))) [case testNotAsBinaryOp] x not y # E: invalid syntax [out] [case testNotIs] x not is y # E: invalid syntax [out] [case testBinaryNegAsBinaryOp] 1 ~ 2 # E: invalid syntax [out] [case testDictionaryExpression] {} {1:x} {1:x, 2 or 1:2 and 3} [out] MypyFile:1( ExpressionStmt:1( DictExpr:1()) ExpressionStmt:2( DictExpr:2( IntExpr(1) NameExpr(x))) ExpressionStmt:3( DictExpr:3( IntExpr(1) NameExpr(x) OpExpr:3( or IntExpr(2) IntExpr(1)) OpExpr:3( and IntExpr(2) IntExpr(3))))) [case testImport] import x import y.z.foo, __foo__.bar [out] MypyFile:1( Import:1(x) Import:2(y.z.foo, __foo__.bar)) [case testVariableTypeWithQualifiedName] x = None # type: x.y [out] MypyFile:1( AssignmentStmt:1( NameExpr(x) NameExpr(None) x.y?)) [case testTypeInSignatureWithQualifiedName] def f() -> x.y[a.b.c]: pass [out] MypyFile:1( FuncDef:1( f def () -> x.y?[a.b.c?] Block:1( PassStmt:1()))) [case testImportFrom] from m import x from m.n import x, y, z [out] MypyFile:1( ImportFrom:1(m, [x]) ImportFrom:2(m.n, [x, y, z])) [case testImportFromAs] from m import x as y from x import y, z as a, c as c [out] MypyFile:1( ImportFrom:1(m, [x : y]) ImportFrom:2(x, [y, z : a, c : c])) [case testImportStar] from x import * [out] MypyFile:1( ImportAll:1(x)) [case testImportsInDifferentPlaces] 1 import x def f(): from x import y from z import * [out] MypyFile:1( ExpressionStmt:1( IntExpr(1)) Import:2(x) FuncDef:3( f Block:3( ImportFrom:4(x, [y]) ImportAll:5(z)))) [case testImportWithExtraComma] from x import (y, z,) [out] MypyFile:1( ImportFrom:1(x, [y, z])) [case testDefaultArgs] def f(x=1): pass def g(x, y=1+2, z=(1, 2)): pass [out] MypyFile:1( FuncDef:1( f Args( default( Var(x) IntExpr(1))) Block:1( PassStmt:2())) FuncDef:3( g Args( Var(x) default( Var(y) OpExpr:3( + IntExpr(1) IntExpr(2))) default( Var(z) TupleExpr:3( IntExpr(1) IntExpr(2)))) Block:3( PassStmt:4()))) [case testTryFinally] try: 1 finally: 2 [out] MypyFile:1( TryStmt:1( Block:1( ExpressionStmt:2( IntExpr(1))) Finally( ExpressionStmt:4( IntExpr(2))))) [case testTry] try: 1 except x: 2 [out] MypyFile:1( TryStmt:1( Block:1( ExpressionStmt:2( IntExpr(1))) NameExpr(x) Block:3( ExpressionStmt:4( IntExpr(2))))) [case testComplexTry] try: 1 except x as y: 2 except x.y: 3 [out] MypyFile:1( TryStmt:1( Block:1( ExpressionStmt:2( IntExpr(1))) NameExpr(x) NameExpr(y) Block:3( ExpressionStmt:4( IntExpr(2))) MemberExpr:5( NameExpr(x) y) Block:5( ExpressionStmt:6( IntExpr(3))))) [case testGeneratorExpression] (x for y in z) [out] MypyFile:1( ExpressionStmt:1( GeneratorExpr:1( NameExpr(x) NameExpr(y) NameExpr(z)))) [case testGeneratorExpressionNested] (x for y, (p, q) in z) [out] MypyFile:1( ExpressionStmt:1( GeneratorExpr:1( NameExpr(x) TupleExpr:1( NameExpr(y) TupleExpr:1( NameExpr(p) NameExpr(q))) NameExpr(z)))) [case testListComprehension] x=[x for y in z] [out] MypyFile:1( AssignmentStmt:1( NameExpr(x) ListComprehension:1( GeneratorExpr:1( NameExpr(x) NameExpr(y) NameExpr(z))))) [case testComplexListComprehension] x=[(x, y) for y, z in (1, 2)] [out] MypyFile:1( AssignmentStmt:1( NameExpr(x) ListComprehension:1( GeneratorExpr:1( TupleExpr:1( NameExpr(x) NameExpr(y)) TupleExpr:1( NameExpr(y) NameExpr(z)) TupleExpr:1( IntExpr(1) IntExpr(2)))))) [case testListComprehension2] ([x + 1 for x in a]) [out] MypyFile:1( ExpressionStmt:1( ListComprehension:1( GeneratorExpr:1( OpExpr:1( + NameExpr(x) IntExpr(1)) NameExpr(x) NameExpr(a))))) [case testSlices] x[1:2] x[:1] x[1:] x[:] [out] MypyFile:1( ExpressionStmt:1( IndexExpr:1( NameExpr(x) SliceExpr:-1( IntExpr(1) IntExpr(2)))) ExpressionStmt:2( IndexExpr:2( NameExpr(x) SliceExpr:-1( IntExpr(1)))) ExpressionStmt:3( IndexExpr:3( NameExpr(x) SliceExpr:-1( IntExpr(1) ))) ExpressionStmt:4( IndexExpr:4( NameExpr(x) SliceExpr:-1( )))) [case testSliceWithStride] x[1:2:3] x[1::2] x[:1:2] x[::2] x[1:2:] [out] MypyFile:1( ExpressionStmt:1( IndexExpr:1( NameExpr(x) SliceExpr:-1( IntExpr(1) IntExpr(2) IntExpr(3)))) ExpressionStmt:2( IndexExpr:2( NameExpr(x) SliceExpr:-1( IntExpr(1) IntExpr(2)))) ExpressionStmt:3( IndexExpr:3( NameExpr(x) SliceExpr:-1( IntExpr(1) IntExpr(2)))) ExpressionStmt:4( IndexExpr:4( NameExpr(x) SliceExpr:-1( IntExpr(2)))) ExpressionStmt:5( IndexExpr:5( NameExpr(x) SliceExpr:-1( IntExpr(1) IntExpr(2))))) [case testYield] def f(): yield x + 1 [out] MypyFile:1( FuncDef:1( f Block:1( ExpressionStmt:2( YieldExpr:2( OpExpr:2( + NameExpr(x) IntExpr(1))))))) [case testYieldFrom] def f(): yield from h() [out] MypyFile:1( FuncDef:1( f Block:1( ExpressionStmt:2( YieldFromExpr:2( CallExpr:2( NameExpr(h) Args())))))) [case testYieldFromAssignment] def f(): a = yield from h() [out] MypyFile:1( FuncDef:1( f Block:1( AssignmentStmt:2( NameExpr(a) YieldFromExpr:2( CallExpr:2( NameExpr(h) Args())))))) [case testDel] del x del x[0], y[1] [out] MypyFile:1( DelStmt:1( NameExpr(x)) DelStmt:2( TupleExpr:2( IndexExpr:2( NameExpr(x) IntExpr(0)) IndexExpr:2( NameExpr(y) IntExpr(1))))) [case testExtraCommas] 1, 2, +[1, 2,] f(1,) {1:2,} [out] MypyFile:1( ExpressionStmt:1( TupleExpr:1( IntExpr(1) IntExpr(2))) ExpressionStmt:2( UnaryExpr:2( + ListExpr:2( IntExpr(1) IntExpr(2)))) ExpressionStmt:3( CallExpr:3( NameExpr(f) Args( IntExpr(1)))) ExpressionStmt:4( DictExpr:4( IntExpr(1) IntExpr(2)))) [case testExtraCommaInFunc] def f(x,): pass [out] MypyFile:1( FuncDef:1( f Args( Var(x)) Block:1( PassStmt:2()))) [case testLambda] lambda: 1 lambda x: y + 1 lambda x, y: 1 [out] MypyFile:1( ExpressionStmt:1( LambdaExpr:1( Block:1( ReturnStmt:1( IntExpr(1))))) ExpressionStmt:2( LambdaExpr:2( Args( Var(x)) Block:2( ReturnStmt:2( OpExpr:2( + NameExpr(y) IntExpr(1)))))) ExpressionStmt:3( LambdaExpr:3( Args( Var(x) Var(y)) Block:3( ReturnStmt:3( IntExpr(1)))))) [case testComplexLambda] lambda x=2: x [out] MypyFile:1( ExpressionStmt:1( LambdaExpr:1( Args( default( Var(x) IntExpr(2))) Block:1( ReturnStmt:1( NameExpr(x)))))) [case testLambdaPrecedence] lambda x: 1, 2 [out] MypyFile:1( ExpressionStmt:1( TupleExpr:1( LambdaExpr:1( Args( Var(x)) Block:1( ReturnStmt:1( IntExpr(1)))) IntExpr(2)))) [case testForIndicesInParens] for (i, j) in x: pass [out] MypyFile:1( ForStmt:1( TupleExpr:1( NameExpr(i) NameExpr(j)) NameExpr(x) Block:1( PassStmt:2()))) [case testForAndTrailingCommaAfterIndexVar] for i, in x: pass [out] MypyFile:1( ForStmt:1( TupleExpr:1( NameExpr(i)) NameExpr(x) Block:1( PassStmt:2()))) [case testListComprehensionAndTrailingCommaAfterIndexVar] x = [a for b, in c] [out] MypyFile:1( AssignmentStmt:1( NameExpr(x) ListComprehension:1( GeneratorExpr:1( NameExpr(a) TupleExpr:1( NameExpr(b)) NameExpr(c))))) [case testForAndTrailingCommaAfterIndexVars] for i, j, in x: pass [out] MypyFile:1( ForStmt:1( TupleExpr:1( NameExpr(i) NameExpr(j)) NameExpr(x) Block:1( PassStmt:2()))) [case testGeneratorWithCondition] (x for y in z if 0) [out] MypyFile:1( ExpressionStmt:1( GeneratorExpr:1( NameExpr(x) NameExpr(y) NameExpr(z) IntExpr(0)))) [case testListComprehensionWithCondition] raise [x for y in z if 0] [out] MypyFile:1( RaiseStmt:1( ListComprehension:1( GeneratorExpr:1( NameExpr(x) NameExpr(y) NameExpr(z) IntExpr(0))))) [case testListComprehensionWithConditions] raise [x for y in z if 0 if 1] [out] MypyFile:1( RaiseStmt:1( ListComprehension:1( GeneratorExpr:1( NameExpr(x) NameExpr(y) NameExpr(z) IntExpr(0) IntExpr(1))))) [case testListComprehensionWithCrazyConditions] raise [x for y in z if (1 if 2 else 3) if 1] [out] MypyFile:1( RaiseStmt:1( ListComprehension:1( GeneratorExpr:1( NameExpr(x) NameExpr(y) NameExpr(z) ConditionalExpr:1( Condition( IntExpr(2)) IntExpr(1) IntExpr(3)) IntExpr(1))))) [case testDictionaryComprehension] a = {x: y for x, y in xys} [out] MypyFile:1( AssignmentStmt:1( NameExpr(a) DictionaryComprehension:1( NameExpr(x) NameExpr(y) TupleExpr:1( NameExpr(x) NameExpr(y)) NameExpr(xys)))) [case testDictionaryComprehensionComplex] a = {x: y for x, y in xys for p, q in pqs if c} [out] MypyFile:1( AssignmentStmt:1( NameExpr(a) DictionaryComprehension:1( NameExpr(x) NameExpr(y) TupleExpr:1( NameExpr(x) NameExpr(y)) TupleExpr:1( NameExpr(p) NameExpr(q)) NameExpr(xys) NameExpr(pqs) NameExpr(c)))) [case testSetComprehension] a = {i for i in l} [out] MypyFile:1( AssignmentStmt:1( NameExpr(a) SetComprehension:1( GeneratorExpr:1( NameExpr(i) NameExpr(i) NameExpr(l))))) [case testSetComprehensionComplex] a = {x + p for x in xys for p in pqs if c} [out] MypyFile:1( AssignmentStmt:1( NameExpr(a) SetComprehension:1( GeneratorExpr:1( OpExpr:1( + NameExpr(x) NameExpr(p)) NameExpr(x) NameExpr(p) NameExpr(xys) NameExpr(pqs) NameExpr(c))))) [case testWithStatement] with open('foo') as f: pass [out] MypyFile:1( WithStmt:1( Expr( CallExpr:1( NameExpr(open) Args( StrExpr(foo)))) Target( NameExpr(f)) Block:1( PassStmt:2()))) [case testWithStatementWithoutTarget] with foo: pass [out] MypyFile:1( WithStmt:1( Expr( NameExpr(foo)) Block:1( PassStmt:2()))) [case testHexOctBinLiterals] 0xa, 0Xaf, 0o7, 0O12, 0b1, 0B101 [out] MypyFile:1( ExpressionStmt:1( TupleExpr:1( IntExpr(10) IntExpr(175) IntExpr(7) IntExpr(10) IntExpr(1) IntExpr(5)))) [case testImportFromWithParens] from x import (y) from x import (y, z) [out] MypyFile:1( ImportFrom:1(x, [y]) ImportFrom:2(x, [y, z])) [case testContinueStmt] while 1: continue [out] MypyFile:1( WhileStmt:1( IntExpr(1) Block:1( ContinueStmt:2()))) [case testStrLiteralConcatenate] 'f' 'bar' ('x' 'y' 'z') [out] MypyFile:1( ExpressionStmt:1( StrExpr(fbar)) ExpressionStmt:2( StrExpr(xyz))) [case testCatchAllExcept] try: 1 except: pass try: 1 except x: pass except: 2 [out] MypyFile:1( TryStmt:1( Block:1( ExpressionStmt:2( IntExpr(1))) Block:3( PassStmt:4())) TryStmt:5( Block:5( ExpressionStmt:6( IntExpr(1))) NameExpr(x) Block:7( PassStmt:8()) Block:9( ExpressionStmt:10( IntExpr(2))))) [case testTryElse] try: pass except x: 1 else: 2 [out] MypyFile:1( TryStmt:1( Block:1( PassStmt:2()) NameExpr(x) Block:3( ExpressionStmt:4( IntExpr(1))) Else( ExpressionStmt:6( IntExpr(2))))) [case testExceptWithMultipleTypes] try: pass except (x, y): pass except (a, b, c) as e: pass [out] MypyFile:1( TryStmt:1( Block:1( PassStmt:2()) TupleExpr:3( NameExpr(x) NameExpr(y)) Block:3( PassStmt:4()) TupleExpr:5( NameExpr(a) NameExpr(b) NameExpr(c)) NameExpr(e) Block:5( PassStmt:6()))) [case testNestedFunctions] def f(): def g(): pass def h() -> int: def g() -> int: pass [out] MypyFile:1( FuncDef:1( f Block:1( FuncDef:2( g Block:2( PassStmt:3())))) FuncDef:4( h def () -> int? Block:4( FuncDef:5( g def () -> int? Block:5( PassStmt:6()))))) [case testStatementsAndDocStringsInClassBody] class A: "doc string" x = y def f(self): pass [out] MypyFile:1( ClassDef:1( A ExpressionStmt:2( StrExpr(doc string)) AssignmentStmt:3( NameExpr(x) NameExpr(y)) FuncDef:4( f Args( Var(self)) Block:4( PassStmt:5())))) [case testSingleLineClass] class a: pass [out] MypyFile:1( ClassDef:1( a PassStmt:1())) [case testDecorator] @property def f(): pass [out] MypyFile:1( Decorator:1( Var(f) NameExpr(property) FuncDef:2( f Block:2( PassStmt:3())))) [case testComplexDecorator] @foo(bar, 1) @zar def f() -> int: pass [out] MypyFile:1( Decorator:1( Var(f) CallExpr:1( NameExpr(foo) Args( NameExpr(bar) IntExpr(1))) NameExpr(zar) FuncDef:3( f def () -> int? Block:3( PassStmt:4())))) [case testKeywordArgInCall] f(x=1) [out] MypyFile:1( ExpressionStmt:1( CallExpr:1( NameExpr(f) Args() KwArgs( x IntExpr(1))))) [case testComplexKeywordArgs] f(x, y=1 or 2, z=y) [out] MypyFile:1( ExpressionStmt:1( CallExpr:1( NameExpr(f) Args( NameExpr(x)) KwArgs( y OpExpr:1( or IntExpr(1) IntExpr(2))) KwArgs( z NameExpr(y))))) [case testChainedAssignment] x = z = 1 [out] MypyFile:1( AssignmentStmt:1( Lvalues( NameExpr(x) NameExpr(z)) IntExpr(1))) [case testVarArgs] def f(x, *a): pass f(1, *2) [out] MypyFile:1( FuncDef:1( f Args( Var(x)) VarArg( Var(a)) Block:1( PassStmt:1())) ExpressionStmt:2( CallExpr:2( NameExpr(f) Args( IntExpr(1) IntExpr(2)) VarArg))) [case testVarArgWithType] def f(x: str, *a: int): pass [out] MypyFile:1( FuncDef:1( f Args( Var(x)) def (x: str?, *a: int?) -> Any VarArg( Var(a)) Block:1( PassStmt:1()))) [case testDictVarArgs] def f(x, **a): pass [out] MypyFile:1( FuncDef:1( f Args( Var(x)) DictVarArg( Var(a)) Block:1( PassStmt:1()))) [case testBothVarArgs] def f(x, *a, **b): pass def g(*a, **b): pass [out] MypyFile:1( FuncDef:1( f Args( Var(x)) VarArg( Var(a)) DictVarArg( Var(b)) Block:1( PassStmt:1())) FuncDef:2( g VarArg( Var(a)) DictVarArg( Var(b)) Block:2( PassStmt:2()))) [case testDictVarArgsWithType] def f(x: X, **a: A) -> None: pass [out] MypyFile:1( FuncDef:1( f Args( Var(x)) def (x: X?, **a: A?) -> None? DictVarArg( Var(a)) Block:1( PassStmt:1()))) [case testCallDictVarArgs] f(**x) f(x, **y) f(*x, **y) f(x, *y, **z) [out] MypyFile:1( ExpressionStmt:1( CallExpr:1( NameExpr(f) Args() DictVarArg( NameExpr(x)))) ExpressionStmt:2( CallExpr:2( NameExpr(f) Args( NameExpr(x)) DictVarArg( NameExpr(y)))) ExpressionStmt:3( CallExpr:3( NameExpr(f) Args( NameExpr(x)) VarArg DictVarArg( NameExpr(y)))) ExpressionStmt:4( CallExpr:4( NameExpr(f) Args( NameExpr(x) NameExpr(y)) VarArg DictVarArg( NameExpr(z))))) [case testAssert] assert x == y [out] MypyFile:1( AssertStmt:1( ComparisonExpr:1( == NameExpr(x) NameExpr(y)))) [case testYieldWithoutExpressions] def f(): yield [out] MypyFile:1( FuncDef:1( f Block:1( ExpressionStmt:2( YieldExpr:2())))) [case testConditionalExpression] x if y else z [out] MypyFile:1( ExpressionStmt:1( ConditionalExpr:1( Condition( NameExpr(y)) NameExpr(x) NameExpr(z)))) [case testConditionalExpressionInListComprehension] a = [x if y else z for a in b] [out] MypyFile:1( AssignmentStmt:1( NameExpr(a) ListComprehension:1( GeneratorExpr:1( ConditionalExpr:1( Condition( NameExpr(y)) NameExpr(x) NameExpr(z)) NameExpr(a) NameExpr(b))))) [case testConditionalExpressionInTuple] 1 if 2 else 3, 4 [out] MypyFile:1( ExpressionStmt:1( TupleExpr:1( ConditionalExpr:1( Condition( IntExpr(2)) IntExpr(1) IntExpr(3)) IntExpr(4)))) [case testSetLiteral] {x or y} {1, 2} [out] MypyFile:1( ExpressionStmt:1( SetExpr:1( OpExpr:1( or NameExpr(x) NameExpr(y)))) ExpressionStmt:2( SetExpr:2( IntExpr(1) IntExpr(2)))) [case testSetLiteralWithExtraComma] {x,} [out] MypyFile:1( ExpressionStmt:1( SetExpr:1( NameExpr(x)))) [case testImportAs] import x as y import x, z as y, a.b as c, d as d [out] MypyFile:1( Import:1(x : y) Import:2(x, z : y, a.b : c, d : d)) [case testForAndElse] for x in y: pass else: x [out] MypyFile:1( ForStmt:1( NameExpr(x) NameExpr(y) Block:1( PassStmt:2()) Else( ExpressionStmt:4( NameExpr(x))))) [case testWhileAndElse] while x: pass else: y [out] MypyFile:1( WhileStmt:1( NameExpr(x) Block:1( PassStmt:2()) Else( ExpressionStmt:4( NameExpr(y))))) [case testWithAndMultipleOperands] with x as y, a as b: pass with x(), y(): pass [out] MypyFile:1( WithStmt:1( Expr( NameExpr(x)) Target( NameExpr(y)) Expr( NameExpr(a)) Target( NameExpr(b)) Block:1( PassStmt:2())) WithStmt:3( Expr( CallExpr:3( NameExpr(x) Args())) Expr( CallExpr:3( NameExpr(y) Args())) Block:3( PassStmt:4()))) [case testOperatorAssignment] x += 1 x -= 1 x *= 1 x /= 1 x //= 1 x %= 1 x **= 1 x |= 1 x &= 1 x ^= 1 x >>= 1 x <<= 1 [out] MypyFile:1( OperatorAssignmentStmt:1( + NameExpr(x) IntExpr(1)) OperatorAssignmentStmt:2( - NameExpr(x) IntExpr(1)) OperatorAssignmentStmt:3( * NameExpr(x) IntExpr(1)) OperatorAssignmentStmt:4( / NameExpr(x) IntExpr(1)) OperatorAssignmentStmt:5( // NameExpr(x) IntExpr(1)) OperatorAssignmentStmt:6( % NameExpr(x) IntExpr(1)) OperatorAssignmentStmt:7( ** NameExpr(x) IntExpr(1)) OperatorAssignmentStmt:8( | NameExpr(x) IntExpr(1)) OperatorAssignmentStmt:9( & NameExpr(x) IntExpr(1)) OperatorAssignmentStmt:10( ^ NameExpr(x) IntExpr(1)) OperatorAssignmentStmt:11( >> NameExpr(x) IntExpr(1)) OperatorAssignmentStmt:12( << NameExpr(x) IntExpr(1))) [case testNestedClasses] class A: class B: pass class C: pass [out] MypyFile:1( ClassDef:1( A ClassDef:2( B PassStmt:3()) ClassDef:4( C PassStmt:5()))) [case testTryWithExceptAndFinally] try: pass except x: x finally: y [out] MypyFile:1( TryStmt:1( Block:1( PassStmt:2()) NameExpr(x) Block:3( ExpressionStmt:4( NameExpr(x))) Finally( ExpressionStmt:6( NameExpr(y))))) [case testBareAsteriskInFuncDef] def f(x, *, y=1): pass [out] MypyFile:1( FuncDef:1( f MaxPos(1) Args( Var(x) default( Var(y) IntExpr(1))) Block:1( PassStmt:1()))) [case testBareAsteriskInFuncDefWithSignature] def f(x: A, *, y: B = 1) -> None: pass [out] MypyFile:1( FuncDef:1( f MaxPos(1) Args( Var(x) default( Var(y) IntExpr(1))) def (x: A?, *, y: B? =) -> None? Block:1( PassStmt:1()))) [case testBareAsteriskNamedDefault] def f(*, y: B = 1) -> None: pass [out] MypyFile:1( FuncDef:1( f MaxPos(0) Args( default( Var(y) IntExpr(1))) def (*, y: B? =) -> None? Block:1( PassStmt:1()))) [case testBareAsteriskNamedNoDefault] def f(*, y: B) -> None: pass [out] MypyFile:1( FuncDef:1( f MaxPos(0) Args( Var(y)) def (*, y: B?) -> None? Block:1( PassStmt:1()))) [case testSuperExpr] super().x [out] MypyFile:1( ExpressionStmt:1( SuperExpr:1( x))) [case testKeywordAndDictArgs] f(x = y, **kwargs) [out] MypyFile:1( ExpressionStmt:1( CallExpr:1( NameExpr(f) Args() KwArgs( x NameExpr(y)) DictVarArg( NameExpr(kwargs))))) [case testSimpleFunctionType] f = None # type: Callable[[], None] [out] MypyFile:1( AssignmentStmt:1( NameExpr(f) NameExpr(None) Callable?[, None?])) [case testFunctionTypeWithArgument] f = None # type: Callable[[str], int] [out] MypyFile:1( AssignmentStmt:1( NameExpr(f) NameExpr(None) Callable?[, int?])) [case testFunctionTypeWithTwoArguments] f = None # type: Callable[[a[b], x.y], List[int]] [out] MypyFile:1( AssignmentStmt:1( NameExpr(f) NameExpr(None) Callable?[, List?[int?]])) [case testFunctionTypeWithExtraComma] def f(x: Callable[[str,], int]): pass [out] MypyFile:1( FuncDef:1( f Args( Var(x)) def (x: Callable?[, int?]) -> Any Block:1( PassStmt:1()))) [case testSimpleStringLiteralType] def f() -> 'A': pass [out] MypyFile:1( FuncDef:1( f def () -> A? Block:1( PassStmt:1()))) [case testGenericStringLiteralType] def f() -> 'A[B, C]': pass [out] MypyFile:1( FuncDef:1( f def () -> A?[B?, C?] Block:1( PassStmt:1()))) [case testPartialStringLiteralType] def f() -> A['B', C]: pass [out] MypyFile:1( FuncDef:1( f def () -> A?[B?, C?] Block:1( PassStmt:1()))) [case testWhitespaceInStringLiteralType] def f() -> ' A [ X ] ': pass [out] MypyFile:1( FuncDef:1( f def () -> A?[X?] Block:1( PassStmt:1()))) [case testEscapeInStringLiteralType] def f() -> '\x41': pass [out] MypyFile:1( FuncDef:1( f def () -> A? Block:1( PassStmt:1()))) [case testMetaclass] class Foo(metaclass=Bar): pass [out] MypyFile:1( ClassDef:1( Foo Metaclass(NameExpr(Bar)) PassStmt:1())) [case testQualifiedMetaclass] class Foo(metaclass=foo.Bar): pass [out] MypyFile:1( ClassDef:1( Foo Metaclass(MemberExpr:1( NameExpr(foo) Bar)) PassStmt:1())) [case testBaseAndMetaclass] class Foo(foo.bar[x], metaclass=Bar): pass [out] MypyFile:1( ClassDef:1( Foo Metaclass(NameExpr(Bar)) BaseTypeExpr( IndexExpr:1( MemberExpr:1( NameExpr(foo) bar) NameExpr(x))) PassStmt:1())) [case testClassKeywordArgs] class Foo(_root=None): pass [out] MypyFile:1( ClassDef:1( Foo PassStmt:1())) [case testClassKeywordArgsBeforeMeta] class Foo(_root=None, metaclass=Bar): pass [out] MypyFile:1( ClassDef:1( Foo Metaclass(NameExpr(Bar)) PassStmt:1())) [case testClassKeywordArgsAfterMeta] class Foo(metaclass=Bar, _root=None): pass [out] MypyFile:1( ClassDef:1( Foo Metaclass(NameExpr(Bar)) PassStmt:1())) [case testNamesThatAreNoLongerKeywords] any = interface [out] MypyFile:1( AssignmentStmt:1( NameExpr(any) NameExpr(interface))) [case testFunctionOverload] @overload def f() -> x: pass @overload def f() -> y: pass [out] MypyFile:1( OverloadedFuncDef:1( Decorator:1( Var(f) NameExpr(overload) FuncDef:2( f def () -> x? Block:2( PassStmt:2()))) Decorator:3( Var(f) NameExpr(overload) FuncDef:4( f def () -> y? Block:4( PassStmt:4()))))) [case testFunctionOverloadAndOtherStatements] x @overload def f() -> x: pass @overload def f() -> y: pass x [out] MypyFile:1( ExpressionStmt:1( NameExpr(x)) OverloadedFuncDef:2( Decorator:2( Var(f) NameExpr(overload) FuncDef:3( f def () -> x? Block:3( PassStmt:3()))) Decorator:4( Var(f) NameExpr(overload) FuncDef:5( f def () -> y? Block:5( PassStmt:5())))) ExpressionStmt:6( NameExpr(x))) [case testFunctionOverloadWithThreeVariants] @overload def f() -> x: pass @overload def f() -> y: pass @overload def f(y): pass [out] MypyFile:1( OverloadedFuncDef:1( Decorator:1( Var(f) NameExpr(overload) FuncDef:2( f def () -> x? Block:2( PassStmt:2()))) Decorator:3( Var(f) NameExpr(overload) FuncDef:4( f def () -> y? Block:4( PassStmt:4()))) Decorator:5( Var(f) NameExpr(overload) FuncDef:6( f Args( Var(y)) Block:6( PassStmt:6()))))) [case testDecoratorsThatAreNotOverloads] @foo def f() -> x: pass @foo def g() -> y: pass [out] MypyFile:1( Decorator:1( Var(f) NameExpr(foo) FuncDef:2( f def () -> x? Block:2( PassStmt:2()))) Decorator:3( Var(g) NameExpr(foo) FuncDef:4( g def () -> y? Block:4( PassStmt:4())))) [case testFunctionOverloadWithinFunction] def f(): @overload def g(): pass @overload def g() -> x: pass [out] MypyFile:1( FuncDef:1( f Block:1( OverloadedFuncDef:2( Decorator:2( Var(g) NameExpr(overload) FuncDef:3( g Block:3( PassStmt:3()))) Decorator:4( Var(g) NameExpr(overload) FuncDef:5( g def () -> x? Block:5( PassStmt:5()))))))) [case testCommentFunctionAnnotation] def f(): # type: () -> A pass def g(x): # type: (A) -> B pass [out] MypyFile:1( FuncDef:1( f def () -> A? Block:1( PassStmt:2())) FuncDef:3( g Args( Var(x)) def (x: A?) -> B? Block:3( PassStmt:4()))) [case testCommentMethodAnnotation] class A: def f(self): # type: () -> A pass def g(xself, x): # type: (A) -> B pass [out] MypyFile:1( ClassDef:1( A FuncDef:2( f Args( Var(self)) def (self: Any) -> A? Block:2( PassStmt:3())) FuncDef:4( g Args( Var(xself) Var(x)) def (xself: Any, x: A?) -> B? Block:4( PassStmt:5())))) [case testCommentMethodAnnotationAndNestedFunction] class A: def f(self): # type: () -> A def g(x): # type: (A) -> B pass [out] MypyFile:1( ClassDef:1( A FuncDef:2( f Args( Var(self)) def (self: Any) -> A? Block:2( FuncDef:3( g Args( Var(x)) def (x: A?) -> B? Block:3( PassStmt:4())))))) [case testCommentFunctionAnnotationOnSeparateLine] def f(x): # type: (X) -> Y pass [out] MypyFile:1( FuncDef:1( f Args( Var(x)) def (x: X?) -> Y? Block:1( PassStmt:3()))) [case testCommentFunctionAnnotationOnSeparateLine2] def f(x): # type: (X) -> Y # bar pass [out] MypyFile:1( FuncDef:1( f Args( Var(x)) def (x: X?) -> Y? Block:1( PassStmt:5()))) [case testCommentFunctionAnnotationAndVarArg] def f(x, *y): # type: (X, *Y) -> Z pass [out] MypyFile:1( FuncDef:1( f Args( Var(x)) def (x: X?, *y: Y?) -> Z? VarArg( Var(y)) Block:1( PassStmt:2()))) [case testCommentFunctionAnnotationAndAllVarArgs] def f(x, *y, **z): # type: (X, *Y, **Z) -> A pass [out] MypyFile:1( FuncDef:1( f Args( Var(x)) def (x: X?, *y: Y?, **z: Z?) -> A? VarArg( Var(y)) DictVarArg( Var(z)) Block:1( PassStmt:2()))) [case testClassDecorator] @foo class X: pass @foo(bar) @x.y class Z: pass [out] MypyFile:1( ClassDef:1( X Decorators( NameExpr(foo)) PassStmt:2()) ClassDef:3( Z Decorators( CallExpr:3( NameExpr(foo) Args( NameExpr(bar))) MemberExpr:4( NameExpr(x) y)) PassStmt:5())) [case testTrailingSemicolon] def x(): pass; def y(): pass [out] MypyFile:1( FuncDef:1( x Block:1( PassStmt:2())) FuncDef:4( y Block:4( PassStmt:5()))) [case testEmptySuperClass] class A(): pass [out] MypyFile:1( ClassDef:1( A PassStmt:2())) [case testStarExpression] *a *a, b a, *b a, (*x, y) a, (x, *y) [out] MypyFile:1( ExpressionStmt:1( StarExpr:1( NameExpr(a))) ExpressionStmt:2( TupleExpr:2( StarExpr:2( NameExpr(a)) NameExpr(b))) ExpressionStmt:3( TupleExpr:3( NameExpr(a) StarExpr:3( NameExpr(b)))) ExpressionStmt:4( TupleExpr:4( NameExpr(a) TupleExpr:4( StarExpr:4( NameExpr(x)) NameExpr(y)))) ExpressionStmt:5( TupleExpr:5( NameExpr(a) TupleExpr:5( NameExpr(x) StarExpr:5( NameExpr(y)))))) [case testStarExpressionParenthesis] *(a) *(a,b) [out] MypyFile:1( ExpressionStmt:1( StarExpr:1( NameExpr(a))) ExpressionStmt:2( StarExpr:2( TupleExpr:2( NameExpr(a) NameExpr(b))))) [case testStarExpressionInFor] for *a in b: pass for a, *b in c: pass for *a, b in c: pass [out] MypyFile:1( ForStmt:1( StarExpr:1( NameExpr(a)) NameExpr(b) Block:1( PassStmt:2())) ForStmt:4( TupleExpr:4( NameExpr(a) StarExpr:4( NameExpr(b))) NameExpr(c) Block:4( PassStmt:5())) ForStmt:7( TupleExpr:7( StarExpr:7( NameExpr(a)) NameExpr(b)) NameExpr(c) Block:7( PassStmt:8()))) [case testStarExprInGeneratorExpr] (x for y, *p in z) (x for *p, y in z) (x for y, *p, q in z) [out] MypyFile:1( ExpressionStmt:1( GeneratorExpr:1( NameExpr(x) TupleExpr:1( NameExpr(y) StarExpr:1( NameExpr(p))) NameExpr(z))) ExpressionStmt:2( GeneratorExpr:2( NameExpr(x) TupleExpr:2( StarExpr:2( NameExpr(p)) NameExpr(y)) NameExpr(z))) ExpressionStmt:3( GeneratorExpr:3( NameExpr(x) TupleExpr:3( NameExpr(y) StarExpr:3( NameExpr(p)) NameExpr(q)) NameExpr(z)))) [case testParseNamedtupleBaseclass] class A(namedtuple('x', ['y'])): pass [out] MypyFile:1( ClassDef:1( A BaseTypeExpr( CallExpr:1( NameExpr(namedtuple) Args( StrExpr(x) ListExpr:1( StrExpr(y))))) PassStmt:1())) [case testEllipsis] ... a[1,...,2] ....__class__ [out] MypyFile:1( ExpressionStmt:1( Ellipsis) ExpressionStmt:2( IndexExpr:2( NameExpr(a) TupleExpr:2( IntExpr(1) Ellipsis IntExpr(2)))) ExpressionStmt:3( MemberExpr:3( Ellipsis __class__))) [case testFunctionWithManyKindsOfArgs] def f(x, *args, y=None, **kw): pass [out] MypyFile:1( FuncDef:1( f MaxPos(1) Args( Var(x) default( Var(y) NameExpr(None))) VarArg( Var(args)) DictVarArg( Var(kw)) Block:1( PassStmt:1()))) [case testIfWithSemicolons] if 1: a; b [out] MypyFile:1( IfStmt:1( If( IntExpr(1)) Then( ExpressionStmt:1( NameExpr(a)) ExpressionStmt:1( NameExpr(b))))) [case testIfWithSemicolonsNested] while 2: if 1: a; b [out] MypyFile:1( WhileStmt:1( IntExpr(2) Block:1( IfStmt:2( If( IntExpr(1)) Then( ExpressionStmt:2( NameExpr(a)) ExpressionStmt:2( NameExpr(b))))))) [case testIfElseWithSemicolons] if 1: global x; y = 1 else: x = 1; return 3 4 [out] MypyFile:1( IfStmt:1( If( IntExpr(1)) Then( GlobalDecl:1( x) AssignmentStmt:1( NameExpr(y) IntExpr(1))) Else( AssignmentStmt:2( NameExpr(x) IntExpr(1)) ReturnStmt:2( IntExpr(3)))) ExpressionStmt:3( IntExpr(4))) [case testIfElseWithSemicolonsNested] while 2: if 1: global x; y = 1 else: x = 1; return 3 4 [out] MypyFile:1( WhileStmt:1( IntExpr(2) Block:1( IfStmt:2( If( IntExpr(1)) Then( GlobalDecl:2( x) AssignmentStmt:2( NameExpr(y) IntExpr(1))) Else( AssignmentStmt:3( NameExpr(x) IntExpr(1)) ReturnStmt:3( IntExpr(3)))))) ExpressionStmt:4( IntExpr(4))) [case testKeywordArgumentAfterStarArgumentInCall] f(x=1, *y) [out] MypyFile:1( ExpressionStmt:1( CallExpr:1( NameExpr(f) Args( NameExpr(y)) VarArg KwArgs( x IntExpr(1))))) [case testConditionalExpressionInSetComprehension] { 1 if x else 2 for x in y } [out] MypyFile:1( ExpressionStmt:1( SetComprehension:1( GeneratorExpr:1( ConditionalExpr:1( Condition( NameExpr(x)) IntExpr(1) IntExpr(2)) NameExpr(x) NameExpr(y))))) [case testConditionalExpressionInListComprehension] a = [ 1 if x else 2 for x in y ] [out] MypyFile:1( AssignmentStmt:1( NameExpr(a) ListComprehension:1( GeneratorExpr:1( ConditionalExpr:1( Condition( NameExpr(x)) IntExpr(1) IntExpr(2)) NameExpr(x) NameExpr(y))))) [case testComplexWithLvalue] with x as y.z: pass [out] MypyFile:1( WithStmt:1( Expr( NameExpr(x)) Target( MemberExpr:1( NameExpr(y) z)) Block:1( PassStmt:1()))) [case testRelativeImportWithEllipsis] from ... import x [out] MypyFile:1( ImportFrom:1(..., [x])) [case testRelativeImportWithEllipsis2] from .... import x [out] MypyFile:1( ImportFrom:1(...., [x])) [case testParseExtendedSlicing] a[:, :] [out] MypyFile:1( ExpressionStmt:1( IndexExpr:1( NameExpr(a) TupleExpr:-1( SliceExpr:-1( ) SliceExpr:-1( ))))) [case testParseExtendedSlicing2] a[1:2:, :,] [out] MypyFile:1( ExpressionStmt:1( IndexExpr:1( NameExpr(a) TupleExpr:-1( SliceExpr:-1( IntExpr(1) IntExpr(2)) SliceExpr:-1( ))))) [case testParseExtendedSlicing3] a[1:2:3, ..., 1] [out] MypyFile:1( ExpressionStmt:1( IndexExpr:1( NameExpr(a) TupleExpr:-1( SliceExpr:-1( IntExpr(1) IntExpr(2) IntExpr(3)) Ellipsis IntExpr(1))))) [case testParseIfExprInDictExpr] test = { 'spam': 'eggs' if True else 'bacon' } [out] MypyFile:1( AssignmentStmt:1( NameExpr(test) DictExpr:1( StrExpr(spam) ConditionalExpr:1( Condition( NameExpr(True)) StrExpr(eggs) StrExpr(bacon))))) [case testIgnoreLine] import x # type: ignore [out] MypyFile:1( Import:1(x) IgnoredLines(1)) [case testIgnore2Lines] x y # type: ignore z # type: ignore [out] MypyFile:1( ExpressionStmt:1( NameExpr(x)) ExpressionStmt:2( NameExpr(y)) ExpressionStmt:3( NameExpr(z)) IgnoredLines(2, 3)) [case testCommentedOutIgnoreAnnotation] y ## type: ignore [out] MypyFile:1( ExpressionStmt:1( NameExpr(y))) [case testInvalidIgnoreAnnotations] y # type: ignored y # type: IGNORE y # type : ignore [out] MypyFile:1( ExpressionStmt:1( NameExpr(y)) ExpressionStmt:2( NameExpr(y)) ExpressionStmt:3( NameExpr(y))) [case testSpaceInIgnoreAnnotations] y # type: ignore # foo y #type:ignore [out] MypyFile:1( ExpressionStmt:1( NameExpr(y)) ExpressionStmt:2( NameExpr(y)) IgnoredLines(1, 2)) [case testIgnoreAnnotationAndMultilineStatement] x = { 1: 2 # type: ignore } y = { # type: ignore 1: 2 } # type: ignore [out] MypyFile:1( AssignmentStmt:1( NameExpr(x) DictExpr:1( IntExpr(1) IntExpr(2))) AssignmentStmt:4( NameExpr(y) DictExpr:4( IntExpr(1) IntExpr(2))) IgnoredLines(2, 4, 6)) [case testIgnoreAnnotationAndMultilineStatement2] from m import ( # type: ignore x, y ) [out] MypyFile:1( ImportFrom:1(m, [x, y]) IgnoredLines(1)) [case testYieldExpression] def f(): x = yield f() [out] MypyFile:1( FuncDef:1( f Block:1( AssignmentStmt:2( NameExpr(x) YieldExpr:2( CallExpr:2( NameExpr(f) Args())))))) [case testForWithSingleItemTuple] for x in 1,: pass [out] MypyFile:1( ForStmt:1( NameExpr(x) TupleExpr:1( IntExpr(1)) Block:1( PassStmt:1()))) [case testIsoLatinUnixEncoding] # coding: iso-latin-1-unix [out] MypyFile:1() [case testLatinUnixEncoding] # coding: latin-1-unix [out] MypyFile:1() [case testLatinUnixEncoding] # coding: iso-latin-1 [out] MypyFile:1() [case testYieldExpressionInParens] def f(): (yield) [out] MypyFile:1( FuncDef:1( f Block:1( ExpressionStmt:2( YieldExpr:2())))) mypy-0.560/test-data/unit/plugins/0000755€tŠÔÚ€2›s®0000000000013215007244023220 5ustar jukkaDROPBOX\Domain Users00000000000000mypy-0.560/test-data/unit/plugins/attrhook.py0000644€tŠÔÚ€2›s®0000000113713215007206025425 0ustar jukkaDROPBOX\Domain Users00000000000000from typing import Optional, Callable from mypy.plugin import Plugin, AttributeContext from mypy.types import Type, Instance class AttrPlugin(Plugin): def get_attribute_hook(self, fullname: str) -> Optional[Callable[[AttributeContext], Type]]: if fullname == 'm.Signal.__call__': return signal_call_callback return None def signal_call_callback(ctx: AttributeContext) -> Type: if isinstance(ctx.type, Instance) and ctx.type.type.fullname() == 'm.Signal': return ctx.type.args[0] return ctx.inferred_attr_type def plugin(version): return AttrPlugin mypy-0.560/test-data/unit/plugins/badreturn.py0000644€tŠÔÚ€2›s®0000000003613215007206025555 0ustar jukkaDROPBOX\Domain Users00000000000000def plugin(version): pass mypy-0.560/test-data/unit/plugins/badreturn2.py0000644€tŠÔÚ€2›s®0000000010313215007206025632 0ustar jukkaDROPBOX\Domain Users00000000000000class MyPlugin: pass def plugin(version): return MyPlugin mypy-0.560/test-data/unit/plugins/fnplugin.py0000644€tŠÔÚ€2›s®0000000052213215007206025411 0ustar jukkaDROPBOX\Domain Users00000000000000from mypy.plugin import Plugin class MyPlugin(Plugin): def get_function_hook(self, fullname): if fullname == '__main__.f': return my_hook assert fullname is not None return None def my_hook(ctx): return ctx.api.named_generic_type('builtins.int', []) def plugin(version): return MyPlugin mypy-0.560/test-data/unit/plugins/named_callable.py0000644€tŠÔÚ€2›s®0000000150613215007206026475 0ustar jukkaDROPBOX\Domain Users00000000000000from mypy.plugin import Plugin from mypy.types import CallableType class MyPlugin(Plugin): def get_function_hook(self, fullname): if fullname == 'm.decorator1': return decorator_call_hook if fullname == 'm._decorated': # This is a dummy name generated by the plugin return decorate_hook return None def decorator_call_hook(ctx): if isinstance(ctx.default_return_type, CallableType): return ctx.default_return_type.copy_modified(name='m._decorated') return ctx.default_return_type def decorate_hook(ctx): if isinstance(ctx.default_return_type, CallableType): return ctx.default_return_type.copy_modified( ret_type=ctx.api.named_generic_type('builtins.str', [])) return ctx.default_return_type def plugin(version): return MyPlugin mypy-0.560/test-data/unit/plugins/noentry.py0000644€tŠÔÚ€2›s®0000000001713215007206025264 0ustar jukkaDROPBOX\Domain Users00000000000000# empty plugin mypy-0.560/test-data/unit/plugins/plugin2.py0000644€tŠÔÚ€2›s®0000000047613215007206025157 0ustar jukkaDROPBOX\Domain Users00000000000000from mypy.plugin import Plugin class Plugin2(Plugin): def get_function_hook(self, fullname): if fullname in ('__main__.f', '__main__.g'): return str_hook return None def str_hook(ctx): return ctx.api.named_generic_type('builtins.str', []) def plugin(version): return Plugin2 mypy-0.560/test-data/unit/plugins/type_anal_hook.py0000644€tŠÔÚ€2›s®0000000260613215007206026570 0ustar jukkaDROPBOX\Domain Users00000000000000from typing import Optional, Callable from mypy.plugin import Plugin, AnalyzeTypeContext from mypy.types import Type, UnboundType, TypeList, AnyType, NoneTyp, CallableType, TypeOfAny class TypeAnalyzePlugin(Plugin): def get_type_analyze_hook(self, fullname: str ) -> Optional[Callable[[AnalyzeTypeContext], Type]]: if fullname == 'm.Signal': return signal_type_analyze_callback return None def signal_type_analyze_callback(ctx: AnalyzeTypeContext) -> Type: if (len(ctx.type.args) != 1 or not isinstance(ctx.type.args[0], TypeList)): ctx.api.fail('Invalid "Signal" type (expected "Signal[[t, ...]]")', ctx.context) return AnyType(TypeOfAny.from_error) args = ctx.type.args[0] assert isinstance(args, TypeList) analyzed = ctx.api.analyze_callable_args(args) if analyzed is None: return AnyType(TypeOfAny.from_error) # Error generated elsewhere arg_types, arg_kinds, arg_names = analyzed arg_types = [ctx.api.analyze_type(arg) for arg in arg_types] type_arg = CallableType(arg_types, arg_kinds, arg_names, NoneTyp(), ctx.api.named_type('builtins.function', [])) return ctx.api.named_type('m.Signal', [type_arg]) def plugin(version): return TypeAnalyzePlugin mypy-0.560/test-data/unit/python2eval.test0000644€tŠÔÚ€2›s®0000002365213215007206024721 0ustar jukkaDROPBOX\Domain Users00000000000000-- Test cases for type checking mypy programs using full stubs and running -- using CPython (Python 2 mode). -- -- These are mostly regression tests -- no attempt is made to make these -- complete. [case testAbs2_python2] n = None # type: int f = None # type: float n = abs(1) abs(1) + 'x' # Error f = abs(1.1) abs(1.1) + 'x' # Error [out] _program.py:4: error: Unsupported operand types for + ("int" and "str") _program.py:6: error: Unsupported operand types for + ("float" and "str") [case testUnicode_python2] x = unicode('xyz', 'latin1') print x x = u'foo' print repr(x) [out] xyz u'foo' [case testXrangeAndRange_python2] for i in xrange(2): print i for i in range(3): print i [out] 0 1 0 1 2 [case testIterator_python2] import typing, sys x = iter('bar') print x.next(), x.next() [out] b a [case testEncodeAndDecode_python2] print 'a'.encode('latin1') print 'b'.decode('latin1') print u'c'.encode('latin1') print u'd'.decode('latin1') [out] a b c d [case testHasKey_python2] d = {1: 'x'} print d.has_key(1) print d.has_key(2) [out] True False [case testIntegerDivision_python2] x = 1 / 2 x() [out] _program.py:2: error: "int" not callable [case testFloatDivision_python2] x = 1.0 / 2.0 x = 1.0 / 2 x = 1 / 2.0 x = 1.5 [out] [case testAnyStr_python2] from typing import AnyStr def f(x): # type: (AnyStr) -> AnyStr if isinstance(x, str): return 'foo' else: return u'zar' print f('') print f(u'') [out] foo zar [case testGenericPatterns_python2] from typing import Pattern import re p = None # type: Pattern[unicode] p = re.compile(u'foo*') b = None # type: Pattern[str] b = re.compile('foo*') print(p.match(u'fooo').group(0)) [out] fooo [case testGenericMatch_python2] from typing import Match import re def f(m): # type: (Match[str]) -> None print(m.group(0)) f(re.match('x*', 'xxy')) [out] xx [case testVariableLengthTuple_python2] from typing import Tuple, cast x = cast(Tuple[int, ...], ()) print(x) [out] () [case testFromFuturePrintFunction_python2] from __future__ import print_function print('a', 'b') [out] a b [case testFromFutureImportUnicodeLiterals_python2] from __future__ import unicode_literals print '>', ['a', b'b', u'c'] [out] > [u'a', 'b', u'c'] [case testUnicodeLiteralsKwargs_python2] from __future__ import unicode_literals def f(**kwargs): # type: (...) -> None pass params = {'a': 'b'} f(**params) [out] [case testUnicodeStringKwargs_python2] def f(**kwargs): # type: (...) -> None pass params = {u'a': 'b'} f(**params) [out] [case testStrKwargs_python2] def f(**kwargs): # type: (...) -> None pass params = {'a': 'b'} f(**params) [out] [case testFromFutureImportUnicodeLiterals2_python2] from __future__ import unicode_literals def f(x): # type: (str) -> None pass f(b'') f(u'') f('') [out] _program.py:5: error: Argument 1 to "f" has incompatible type "unicode"; expected "str" _program.py:6: error: Argument 1 to "f" has incompatible type "unicode"; expected "str" [case testStrUnicodeCompatibility_python2] import typing def f(s): # type: (unicode) -> None pass f(u'') f('') [out] [case testStrUnicodeCompatibilityInBuiltins_python2] import typing 'x'.count('x') 'x'.count(u'x') [out] [case testTupleAsSubtypeOfSequence_python2] from typing import TypeVar, Sequence T = TypeVar('T') def f(a): # type: (Sequence[T]) -> None print a f(tuple()) [out] () [case testReadOnlyProperty_python2] import typing class A: @property def foo(self): # type: () -> int return 1 print(A().foo + 2) [out] 3 [case testIOTypes_python2] from typing import IO, TextIO, BinaryIO, Any class X(IO[str]): pass class Y(TextIO): pass class Z(BinaryIO): pass [out] [case testOpenReturnType_python2] import typing f = open('/tmp/xyz', 'w') f.write(u'foo') f.write('bar') f.close() [out] _program.py:3: error: Argument 1 to "write" of "IO" has incompatible type "unicode"; expected "str" [case testPrintFunctionWithFileArg_python2] from __future__ import print_function import typing if 1 == 2: # Don't want to run the code below, since it would create a file. f = open('/tmp/xyz', 'w') print('foo', file=f) f.close() print('ok') [out] ok [case testStringIO_python2] import typing import io c = io.StringIO() c.write(u'\x89') print(repr(c.getvalue())) [out] u'\x89' [case testBytesIO_python2] import typing import io c = io.BytesIO() c.write('\x89') print(repr(c.getvalue())) [out] '\x89' [case testTextIOWrapper_python2] import typing import io b = io.BytesIO(u'\xab'.encode('utf8')) w = io.TextIOWrapper(b, encoding='utf8') print(repr(w.read())) [out] u'\xab' [case testIoOpen_python2] import typing import io if 1 == 2: # Only type check, do not execute f = io.open('/tmp/xyz', 'w', encoding='utf8') f.write(u'\xab') f.close() print 'ok' [out] ok [case testUnionType_python2] from typing import Union y = None # type: Union[int, str] def f(x): # type: (Union[int, str]) -> str if isinstance(x, int): x = str(x) return x print f(12) print f('ab') [out] 12 ab [case testStrAdd_python2] import typing s = '' u = u'' n = 0 n = s + '' # E s = s + u'' # E [out] _program.py:5: error: Incompatible types in assignment (expression has type "str", variable has type "int") _program.py:6: error: Incompatible types in assignment (expression has type "unicode", variable has type "str") [case testStrJoin_python2] import typing s = '' u = u'' n = 0 n = ''.join(['']) # Error s = ''.join([u'']) # Error [out] _program.py:5: error: Incompatible types in assignment (expression has type "str", variable has type "int") _program.py:6: error: Incompatible types in assignment (expression has type "unicode", variable has type "str") [case testNamedTuple_python2] import typing from collections import namedtuple X = namedtuple('X', ['a', 'b']) x = X(a=1, b='s') print x.a, x.b [out] 1 s [case testNamedTupleError_python2] import typing from collections import namedtuple X = namedtuple('X', ['a', 'b']) x = X(a=1, b='s') x.c [out] _program.py:5: error: "X" has no attribute "c" [case testAssignToComplexReal_python2] import typing x = 4j y = x.real y = x # Error x.imag = 2.0 # Error [out] _program.py:4: error: Incompatible types in assignment (expression has type "complex", variable has type "float") _program.py:5: error: Property "imag" defined in "complex" is read-only [case testComplexArithmetic_python2] import typing print 5 + 8j print 3j * 2.0 print 4j / 2.0 [out] (5+8j) 6j 2j [case testNamedTupleWithTypes_python2] from typing import NamedTuple N = NamedTuple('N', [('a', int), ('b', str)]) n = N(1, 'x') print n a, b = n print a, b print n[0] [out] N(a=1, b='x') 1 x 1 [case testUnionTypeAlias_python2] from typing import Union U = Union[int, str] u = 1 # type: U u = 1.1 [out] _program.py:4: error: Incompatible types in assignment (expression has type "float", variable has type "Union[int, str]") [case testSuperNew_python2] from typing import Dict, Any class MyType(type): def __new__(cls, name, bases, namespace): # type: (str, tuple, Dict[str, Any]) -> type return super(MyType, cls).__new__(cls, name + 'x', bases, namespace) class A(object): __metaclass__ = MyType print(type(A()).__name__) [out] Ax [case testSequenceIndexAndCount_python2] from typing import Sequence def f(x): # type: (Sequence[int]) -> None print(x.index(1)) print(x.count(1)) f([0, 0, 1, 1, 1]) [out] 2 3 [case testOptional_python2] from typing import Optional def f(): # type: () -> Optional[int] pass x = f() y = 1 y = x [case testUnicodeAndOverloading_python2] from m import f f(1) f('') f(u'') f(b'') [file m.pyi] from typing import overload @overload def f(x): # type: (unicode) -> int pass @overload def f(x): # type: (bytearray) -> int pass [out] _program.py:2: error: No overload variant of "f" matches argument types [builtins.int] [case testByteArrayStrCompatibility_python2] def f(x): # type: (str) -> None pass f(bytearray('foo')) [case testAbstractProperty_python2] from abc import abstractproperty, ABCMeta class A: __metaclass__ = ABCMeta @abstractproperty def x(self): # type: () -> int pass class B(A): @property def x(self): # type: () -> int return 3 b = B() print b.x + 1 [out] 4 [case testReModuleBytesPython2] # Regression tests for various overloads in the re module -- bytes version import re if False: bre = b'a+' bpat = re.compile(bre) bpat = re.compile(bpat) re.search(bre, b'').groups() re.search(bre, u'') re.search(bpat, b'').groups() re.search(bpat, u'') # match(), split(), findall(), finditer() are much the same, so skip those. # sub(), subn() have more overloads and we are checking these: re.sub(bre, b'', b'') + b'' re.sub(bpat, b'', b'') + b'' re.sub(bre, lambda m: b'', b'') + b'' re.sub(bpat, lambda m: b'', b'') + b'' re.subn(bre, b'', b'')[0] + b'' re.subn(bpat, b'', b'')[0] + b'' re.subn(bre, lambda m: b'', b'')[0] + b'' re.subn(bpat, lambda m: b'', b'')[0] + b'' [out] [case testReModuleStringPython2] # Regression tests for various overloads in the re module -- string version import re ure = u'a+' upat = re.compile(ure) upat = re.compile(upat) re.search(ure, u'a').groups() re.search(ure, b'') # This ought to be an error, but isn't because of bytes->unicode equivalence re.search(upat, u'a').groups() re.search(upat, b'') # This ought to be an error, but isn't because of bytes->unicode equivalence # match(), split(), findall(), finditer() are much the same, so skip those. # sus(), susn() have more overloads and we are checking these: re.sub(ure, u'', u'') + u'' re.sub(upat, u'', u'') + u'' re.sub(ure, lambda m: u'', u'') + u'' re.sub(upat, lambda m: u'', u'') + u'' re.subn(ure, u'', u'')[0] + u'' re.subn(upat, u'', u'')[0] + u'' re.subn(ure, lambda m: u'', u'')[0] + u'' re.subn(upat, lambda m: u'', u'')[0] + u'' [out] [case testYieldRegressionTypingAwaitable_python2] # Make sure we don't reference typing.Awaitable in Python 2 mode. def g(): # type: () -> int yield [out] _program.py:2: error: The return type of a generator function should be "Generator" or one of its supertypes mypy-0.560/test-data/unit/pythoneval-asyncio.test0000644€tŠÔÚ€2›s®0000003021213215007206026270 0ustar jukkaDROPBOX\Domain Users00000000000000-- Test cases for type checking mypy programs using full stubs and running -- using CPython. -- -- These are mostly regression tests -- no attempt is made to make these -- complete. -- -- This test file check Asyncio and yield from interaction [case testImportAsyncio] import asyncio print('Imported') [out] Imported [case testSimpleCoroutineSleep] from typing import Any, Generator import asyncio from asyncio import Future @asyncio.coroutine def greet_every_two_seconds() -> 'Generator[Any, None, None]': n = 0 while n < 5: print('Prev', n) yield from asyncio.sleep(0.1) print('After', n) n += 1 loop = asyncio.get_event_loop() try: loop.run_until_complete(greet_every_two_seconds()) finally: loop.close() [out] Prev 0 After 0 Prev 1 After 1 Prev 2 After 2 Prev 3 After 3 Prev 4 After 4 [case testCoroutineCallingOtherCoroutine] from typing import Generator, Any import asyncio from asyncio import Future @asyncio.coroutine def compute(x: int, y: int) -> 'Generator[Any, None, int]': print("Compute %s + %s ..." % (x, y)) yield from asyncio.sleep(0.1) return x + y # Here the int is wrapped in Future[int] @asyncio.coroutine def print_sum(x: int, y: int) -> 'Generator[Any, None, None]': result = yield from compute(x, y) # The type of result will be int (is extracted from Future[int] print("%s + %s = %s" % (x, y, result)) loop = asyncio.get_event_loop() loop.run_until_complete(print_sum(1, 2)) loop.close() [out] Compute 1 + 2 ... 1 + 2 = 3 [case testCoroutineChangingFuture] from typing import Generator, Any import asyncio from asyncio import Future @asyncio.coroutine def slow_operation(future: 'Future[str]') -> 'Generator[Any, None, None]': yield from asyncio.sleep(0.1) future.set_result('Future is done!') loop = asyncio.get_event_loop() future = asyncio.Future() # type: Future[str] asyncio.Task(slow_operation(future)) loop.run_until_complete(future) print(future.result()) loop.close() [out] Future is done! [case testFunctionAssignedAsCallback] import typing from typing import Generator, Any import asyncio from asyncio import Future, AbstractEventLoop @asyncio.coroutine def slow_operation(future: 'Future[str]') -> 'Generator[Any, None, None]': yield from asyncio.sleep(1) future.set_result('Callback works!') def got_result(future: 'Future[str]') -> None: print(future.result()) loop.stop() loop = asyncio.get_event_loop() # type: AbstractEventLoop future = asyncio.Future() # type: Future[str] asyncio.Task(slow_operation(future)) # Here create a task with the function. (The Task need a Future[T] as first argument) future.add_done_callback(got_result) # and assign the callback to the future try: loop.run_forever() finally: loop.close() [out] Callback works! [case testMultipleTasks] import typing from typing import Generator, Any import asyncio from asyncio import Task, Future @asyncio.coroutine def factorial(name, number) -> 'Generator[Any, None, None]': f = 1 for i in range(2, number+1): print("Task %s: Compute factorial(%s)..." % (name, i)) yield from asyncio.sleep(0.1) f *= i print("Task %s: factorial(%s) = %s" % (name, number, f)) loop = asyncio.get_event_loop() tasks = [ asyncio.Task(factorial("A", 2)), asyncio.Task(factorial("B", 3)), asyncio.Task(factorial("C", 4))] loop.run_until_complete(asyncio.wait(tasks)) loop.close() [out] Task A: Compute factorial(2)... Task B: Compute factorial(2)... Task C: Compute factorial(2)... Task A: factorial(2) = 2 Task B: Compute factorial(3)... Task C: Compute factorial(3)... Task B: factorial(3) = 6 Task C: Compute factorial(4)... Task C: factorial(4) = 24 [case testConcatenatedCoroutines] import typing from typing import Generator, Any import asyncio from asyncio import Future @asyncio.coroutine def h4() -> 'Generator[Any, None, int]': x = yield from future return x @asyncio.coroutine def h3() -> 'Generator[Any, None, int]': x = yield from h4() print("h3: %s" % x) return x @asyncio.coroutine def h2() -> 'Generator[Any, None, int]': x = yield from h3() print("h2: %s" % x) return x @asyncio.coroutine def h() -> 'Generator[Any, None, None]': x = yield from h2() print("h: %s" % x) loop = asyncio.get_event_loop() future = asyncio.Future() # type: Future[int] future.set_result(42) loop.run_until_complete(h()) print("Outside %s" % future.result()) loop.close() [out] h3: 42 h2: 42 h: 42 Outside 42 [case testConcatenatedCoroutinesReturningFutures] import typing from typing import Generator, Any import asyncio from asyncio import Future @asyncio.coroutine def h4() -> 'Generator[Any, None, Future[int]]': yield from asyncio.sleep(0.1) f = asyncio.Future() #type: Future[int] return f @asyncio.coroutine def h3() -> 'Generator[Any, None, Future[Future[int]]]': x = yield from h4() x.set_result(42) f = asyncio.Future() #type: Future[Future[int]] f.set_result(x) return f @asyncio.coroutine def h() -> 'Generator[Any, None, None]': print("Before") x = yield from h3() y = yield from x z = yield from y print(z) def normalize(future): # The str conversion seems inconsistent; not sure exactly why. Normalize # the result. return str(future).replace(' Future> [case testCoroutineWithOwnClass] import typing from typing import Generator, Any import asyncio from asyncio import Future class A: def __init__(self, x: int) -> None: self.x = x @asyncio.coroutine def h() -> 'Generator[Any, None, None]': x = yield from future print("h: %s" % x.x) loop = asyncio.get_event_loop() future = asyncio.Future() # type: Future[A] future.set_result(A(42)) loop.run_until_complete(h()) print("Outside %s" % future.result().x) loop.close() [out] h: 42 Outside 42 -- Errors [case testErrorAssigningCoroutineThatDontReturn] from typing import Generator, Any import asyncio from asyncio import Future @asyncio.coroutine def greet() -> 'Generator[Any, None, None]': yield from asyncio.sleep(0.2) print('Hello World') @asyncio.coroutine def test() -> 'Generator[Any, None, None]': yield from greet() x = yield from greet() # Error loop = asyncio.get_event_loop() try: loop.run_until_complete(test()) finally: loop.close() [out] _program.py:13: error: Function does not return a value [case testErrorReturnIsNotTheSameType] from typing import Generator, Any import asyncio from asyncio import Future @asyncio.coroutine def compute(x: int, y: int) -> 'Generator[Any, None, int]': print("Compute %s + %s ..." % (x, y)) yield from asyncio.sleep(0.1) return str(x + y) # Error @asyncio.coroutine def print_sum(x: int, y: int) -> 'Generator[Any, None, None]': result = yield from compute(x, y) print("%s + %s = %s" % (x, y, result)) loop = asyncio.get_event_loop() loop.run_until_complete(print_sum(1, 2)) loop.close() [out] _program.py:9: error: Incompatible return value type (got "str", expected "int") [case testErrorSetFutureDifferentInternalType] from typing import Generator, Any import asyncio from asyncio import Future @asyncio.coroutine def slow_operation(future: 'Future[str]') -> 'Generator[Any, None, None]': yield from asyncio.sleep(1) future.set_result(42) # Error loop = asyncio.get_event_loop() future = asyncio.Future() # type: Future[str] asyncio.Task(slow_operation(future)) loop.run_until_complete(future) print(future.result()) loop.close() [out] _program.py:8: error: Argument 1 to "set_result" of "Future" has incompatible type "int"; expected "str" [case testErrorUsingDifferentFutureType] from typing import Any, Generator import asyncio from asyncio import Future @asyncio.coroutine def slow_operation(future: 'Future[int]') -> 'Generator[Any, None, None]': yield from asyncio.sleep(1) future.set_result(42) loop = asyncio.get_event_loop() future = asyncio.Future() # type: Future[str] asyncio.Task(slow_operation(future)) # Error loop.run_until_complete(future) print(future.result()) loop.close() [out] _program.py:12: error: Argument 1 to "slow_operation" has incompatible type "Future[str]"; expected "Future[int]" [case testErrorUsingDifferentFutureTypeAndSetFutureDifferentInternalType] from typing import Generator, Any import asyncio from asyncio import Future asyncio.coroutine def slow_operation(future: 'Future[int]') -> 'Generator[Any, None, None]': yield from asyncio.sleep(1) future.set_result('42') #Try to set an str as result to a Future[int] loop = asyncio.get_event_loop() future = asyncio.Future() # type: Future[str] asyncio.Task(slow_operation(future)) # Error loop.run_until_complete(future) print(future.result()) loop.close() [out] _program.py:8: error: Argument 1 to "set_result" of "Future" has incompatible type "str"; expected "int" _program.py:12: error: Argument 1 to "slow_operation" has incompatible type "Future[str]"; expected "Future[int]" [case testErrorSettingCallbackWithDifferentFutureType] import typing from typing import Generator, Any import asyncio from asyncio import Future, AbstractEventLoop @asyncio.coroutine def slow_operation(future: 'Future[str]') -> 'Generator[Any, None, None]': yield from asyncio.sleep(1) future.set_result('Future is done!') def got_result(future: 'Future[int]') -> None: print(future.result()) loop.stop() loop = asyncio.get_event_loop() # type: AbstractEventLoop future = asyncio.Future() # type: Future[str] asyncio.Task(slow_operation(future)) future.add_done_callback(got_result) # Error try: loop.run_forever() finally: loop.close() [out] _program.py:18: error: Argument 1 to "add_done_callback" of "Future" has incompatible type "Callable[[Future[int]], None]"; expected "Callable[[Future[str]], Any]" [case testErrorOneMoreFutureInReturnType] import typing from typing import Any, Generator import asyncio from asyncio import Future @asyncio.coroutine def h4() -> 'Generator[Any, None, Future[int]]': yield from asyncio.sleep(1) f = asyncio.Future() #type: Future[int] return f @asyncio.coroutine def h3() -> 'Generator[Any, None, Future[Future[Future[int]]]]': x = yield from h4() x.set_result(42) f = asyncio.Future() #type: Future[Future[int]] f.set_result(x) return f @asyncio.coroutine def h() -> 'Generator[Any, None, None]': print("Before") x = yield from h3() y = yield from x z = yield from y print(z) print(y) print(x) loop = asyncio.get_event_loop() loop.run_until_complete(h()) loop.close() [out] _program.py:18: error: Incompatible return value type (got "Future[Future[int]]", expected "Future[Future[Future[int]]]") [case testErrorOneLessFutureInReturnType] import typing from typing import Any, Generator import asyncio from asyncio import Future @asyncio.coroutine def h4() -> 'Generator[Any, None, Future[int]]': yield from asyncio.sleep(1) f = asyncio.Future() #type: Future[int] return f @asyncio.coroutine def h3() -> 'Generator[Any, None, Future[int]]': x = yield from h4() x.set_result(42) f = asyncio.Future() #type: Future[Future[int]] f.set_result(x) return f @asyncio.coroutine def h() -> 'Generator[Any, None, None]': print("Before") x = yield from h3() y = yield from x print(y) print(x) loop = asyncio.get_event_loop() loop.run_until_complete(h()) loop.close() [out] _program.py:18: error: Incompatible return value type (got "Future[Future[int]]", expected "Future[int]") [case testErrorAssignmentDifferentType] import typing from typing import Generator, Any import asyncio from asyncio import Future class A: def __init__(self, x: int) -> None: self.x = x class B: def __init__(self, x: int) -> None: self.x = x @asyncio.coroutine def h() -> 'Generator[Any, None, None]': x = yield from future # type: B # Error print("h: %s" % x.x) loop = asyncio.get_event_loop() future = asyncio.Future() # type: Future[A] future.set_result(A(42)) loop.run_until_complete(h()) loop.close() [out] _program.py:16: error: Incompatible types in assignment (expression has type "A", variable has type "B") mypy-0.560/test-data/unit/pythoneval.test0000644€tŠÔÚ€2›s®0000010004713215007206024631 0ustar jukkaDROPBOX\Domain Users00000000000000-- Test cases for type checking mypy programs using full stubs and running -- using CPython. -- -- These are mostly regression tests -- no attempt is made to make these -- complete. [case testHello] import typing print('hello, world') [out] hello, world -- Skipped because different typing package versions have different repr()s. [case testAbstractBaseClasses-skip] import re from typing import Sized, Sequence, Iterator, Iterable, Mapping, AbstractSet def check(o, t): rep = re.sub('0x[0-9a-fA-F]+', '0x...', repr(o)) rep = rep.replace('sequenceiterator', 'str_iterator') trep = str(t).replace('_abcoll.Sized', 'collections.abc.Sized') print(rep, trep, isinstance(o, t)) def f(): check('x', Sized) check([1], Sequence) check({1:3}, Sequence) check(iter('x'), Iterator) check('x', Iterable) check({}, Mapping) check(set([1]), AbstractSet) f() [out] 'x' True [1] typing.Sequence True {1: 3} typing.Sequence False typing.Iterator True 'x' typing.Iterable True {} typing.Mapping True {1} typing.AbstractSet True [case testSized] from typing import Sized class A(Sized): def __len__(self): return 5 print(len(A())) [out] 5 [case testReversed] from typing import Reversible class A(Reversible): def __iter__(self): return iter('oof') def __reversed__(self): return iter('foo') print(list(reversed(range(5)))) print(list(reversed([1,2,3]))) print(list(reversed('abc'))) print(list(reversed(A()))) [out] -- Duplicate [ at line beginning. [[4, 3, 2, 1, 0] [[3, 2, 1] [['c', 'b', 'a'] [['f', 'o', 'o'] [case testIntAndFloatConversion] from typing import SupportsInt, SupportsFloat class A(SupportsInt): def __int__(self): return 5 class B(SupportsFloat): def __float__(self): return 1.2 print(int(1)) print(int(6.2)) print(int('3')) print(int(b'4')) print(int(A())) print(float(-9)) print(float(B())) [out] 1 6 3 4 5 -9.0 1.2 [case testAbs] from typing import SupportsAbs class A(SupportsAbs[float]): def __abs__(self) -> float: return 5.5 print(abs(-1)) print(abs(-1.2)) print(abs(A())) [out] 1 1.2 5.5 [case testAbs2] n = None # type: int f = None # type: float n = abs(1) abs(1) + 'x' # Error f = abs(1.1) abs(1.1) + 'x' # Error [out] _program.py:5: error: Unsupported operand types for + ("int" and "str") _program.py:7: error: Unsupported operand types for + ("float" and "str") [case testRound] from typing import SupportsRound class A(SupportsRound): def __round__(self, ndigits=0): return 'x%d' % ndigits print(round(1.6)) print(round(A())) print(round(A(), 2)) [out] 2 x0 x2 [case testCallMethodViaTypeObject] import typing print(list.__add__([1, 2], [3, 4])) [out] [[1, 2, 3, 4] [case testClassDataAttribute] import typing class A: x = 0 print(A.x) A.x += 1 print(A.x) [out] 0 1 [case testInheritedClassAttribute] import typing class A: x = 1 def f(self) -> None: print('f') class B(A): pass B.f(None) print(B.x) [out] f 1 [case testFunctionDecorator] from typing import TypeVar, cast ftype = TypeVar('ftype') def logged(f: ftype) -> ftype: def g(*args, **kwargs): print('enter', f.__name__) r = f(*args, **kwargs) print('exit', f.__name__) return r return cast(ftype, g) @logged def foo(s: str) -> str: print('foo', s) return s + '!' print(foo('y')) print(foo('x')) [out] enter foo foo y exit foo y! enter foo foo x exit foo x! [case testModuleAttributes] import math import typing print(math.__name__) print(type(math.__dict__)) print(type(math.__doc__ or '')) print(math.__class__) [out] math [case testSpecialAttributes] import typing class A: pass print(object().__doc__) print(A().__class__) [out] The most base type [case testFunctionAttributes] import typing ord.__class__ print(type(ord.__doc__ + '')) print(ord.__name__) print(ord.__module__) [out] ord builtins [case testTypeAttributes] import typing print(str.__class__) print(type(str.__doc__)) print(str.__name__) print(str.__module__) print(str.__dict__ is not None) [out] str builtins True [case testBoolCompatibilityWithInt] import typing x = 0 x = True print(bool('x')) print(bool('')) [out] True False [case testCallBuiltinTypeObjectsWithoutArguments] import typing print(int()) print(repr(str())) print(repr(bytes())) print(float()) print(bool()) [out] 0 '' b'' 0.0 False [case testIntegerDivision] import typing x = 1 / 2 x = 1.5 [out] [case testStaticmethod] import typing class A: @staticmethod def f(x: str) -> int: return int(x) print(A.f('12')) print(A().f('34')) [out] 12 34 [case testClassmethod] import typing class A: @classmethod def f(cls, x: str) -> int: return int(x) print(A.f('12')) print(A().f('34')) [out] 12 34 [case testIntMethods] import typing print(int.from_bytes(b'ab', 'big')) n = 0 print(n.from_bytes(b'ac', 'big')) print(n.from_bytes([2, 3], 'big')) print(n.to_bytes(2, 'big')) [out] 24930 24931 515 b'\x00\x00' [case testFloatMethods] import typing print(1.5.as_integer_ratio()) print(1.5.hex()) print(2.0.is_integer()) print(float.fromhex('0x1.8')) [out] (3, 2) 0x1.8000000000000p+0 True 1.5 [case testArray] import typing import array array.array('b', [1, 2]) [out] [case testDictFromkeys] import typing d = dict.fromkeys('foo') d['x'] = 2 d2 = dict.fromkeys([1, 2], b'') d2[2] = b'foo' [out] [case testReadOnlyProperty] class A: x = 2 @property def f(self) -> int: return self.x + 1 print(A().f) [out] 3 [case testIsinstanceWithTuple] from typing import cast, Any x = cast(Any, (1, 'x')) if isinstance(x, tuple): print(x[0], x[1]) [out] 1 x [case testTypevarValues] from typing import TypeVar T = TypeVar('T', str, bytes) def f(x: T) -> T: if isinstance(x, str): return 'foo' else: return b'bar' print(f('')) print(f(b'')) [out] foo b'bar' [case testAnyStr] from typing import AnyStr def f(x: AnyStr) -> AnyStr: if isinstance(x, str): return 'foo' else: return b'zar' print(f('')) print(f(b'')) [out] foo b'zar' [case testNameNotImportedFromTyping] import typing cast(int, 2) [out] _program.py:2: error: Name 'cast' is not defined [case testBinaryIOType] from typing import BinaryIO def f(f: BinaryIO) -> None: f.write(b'foo') f.write(bytearray(b'foo')) [out] [case testIOTypes] from typing import IO import sys def txt(f: IO[str]) -> None: f.write('foo') f.write(b'foo') def bin(f: IO[bytes]) -> None: f.write(b'foo') f.write(bytearray(b'foo')) txt(sys.stdout) bin(sys.stdout) [out] _program.py:5: error: Argument 1 to "write" of "IO" has incompatible type "bytes"; expected "str" _program.py:10: error: Argument 1 to "bin" has incompatible type "TextIO"; expected "IO[bytes]" [case testBuiltinOpen] f = open('x') f.write('x') f.write(b'x') f.foobar() [out] _program.py:3: error: Argument 1 to "write" of "IO" has incompatible type "bytes"; expected "str" _program.py:4: error: "TextIO" has no attribute "foobar" [case testOpenReturnTypeInference] reveal_type(open('x')) reveal_type(open('x', 'r')) reveal_type(open('x', 'rb')) mode = 'rb' reveal_type(open('x', mode)) [out] _program.py:1: error: Revealed type is 'typing.TextIO' _program.py:2: error: Revealed type is 'typing.TextIO' _program.py:3: error: Revealed type is 'typing.BinaryIO' _program.py:5: error: Revealed type is 'typing.IO[Any]' [case testOpenReturnTypeInferenceSpecialCases] reveal_type(open()) reveal_type(open(mode='rb', file='x')) reveal_type(open(file='x', mode='rb')) mode = 'rb' reveal_type(open(mode=mode, file='r')) [out] _testOpenReturnTypeInferenceSpecialCases.py:1: error: Revealed type is 'typing.TextIO' _testOpenReturnTypeInferenceSpecialCases.py:1: error: Too few arguments for "open" _testOpenReturnTypeInferenceSpecialCases.py:2: error: Revealed type is 'typing.BinaryIO' _testOpenReturnTypeInferenceSpecialCases.py:3: error: Revealed type is 'typing.BinaryIO' _testOpenReturnTypeInferenceSpecialCases.py:5: error: Revealed type is 'typing.IO[Any]' [case testGenericPatterns] from typing import Pattern import re p = None # type: Pattern[str] p = re.compile('foo*') b = None # type: Pattern[bytes] b = re.compile(b'foo*') print(p.match('fooo').group(0)) [out] fooo [case testGenericMatch] from typing import Match import re def f(m: Match[bytes]) -> None: print(m.group(0)) f(re.match(b'x*', b'xxy')) [out] b'xx' [case testMultipleTypevarsWithValues] from typing import TypeVar T = TypeVar('T', int, str) S = TypeVar('S', int, str) def f(t: T, s: S) -> None: t + s [out] _program.py:7: error: Unsupported operand types for + ("int" and "str") _program.py:7: error: Unsupported operand types for + ("str" and "int") [case testSystemExitCode] import typing print(SystemExit(5).code) [out] 5 [case testIntFloatDucktyping] x = None # type: float x = 2.2 x = 2 def f(x: float) -> None: pass f(1.1) f(1) [out] [case testsFloatOperations] import typing print(1.5 + 1.5) print(1.5 + 1) [out] 3.0 2.5 [case testMathFunctionWithIntArgument] import typing import math math.sin(2) math.sin(2.2) [case testAbsReturnType] f = None # type: float n = None # type: int n = abs(2) f = abs(2.2) abs(2.2) + 'x' [out] _program.py:6: error: Unsupported operand types for + ("float" and "str") [case testROperatorMethods] b = None # type: bytes s = None # type: str s = b'foo' * 5 # Error b = 5 * b'foo' b = b'foo' * 5 s = 5 * 'foo' s = 'foo' * 5 [out] _program.py:4: error: Incompatible types in assignment (expression has type "bytes", variable has type "str") [case testROperatorMethods2] import typing print(2 / 0.5) print(' ', 2 * [3, 4]) [out] 4.0 [3, 4, 3, 4] [case testNotImplemented] import typing class A: def __add__(self, x: int) -> int: if isinstance(x, int): return x + 1 return NotImplemented class B: def __radd__(self, x: A) -> str: return 'x' print(A() + 1) print(A() + B()) [out] 2 x [case testMappingMethods] # Regression test from typing import Mapping x = {'x': 'y'} # type: Mapping[str, str] print('x' in x) print('y' in x) [out] True False [case testOverlappingOperatorMethods] class X: pass class A: def __add__(self, x) -> int: if isinstance(x, X): return 1 return NotImplemented class B: def __radd__(self, x: A) -> str: return 'x' class C(X, B): pass b = None # type: B b = C() print(A() + b) [out] _program.py:9: error: Signatures of "__radd__" of "B" and "__add__" of "A" are unsafely overlapping [case testBytesAndBytearrayComparisons] import typing print(b'ab' < bytearray(b'b')) print(bytearray(b'ab') < b'a') [out] True False [case testBytesAndBytearrayComparisons2] import typing '' < b'' b'' < '' '' < bytearray() bytearray() < '' [out] _program.py:2: error: Unsupported operand types for > ("bytes" and "str") _program.py:3: error: Unsupported operand types for > ("str" and "bytes") _program.py:4: error: Unsupported operand types for > ("bytearray" and "str") _program.py:5: error: Unsupported operand types for > ("str" and "bytearray") [case testInplaceOperatorMethod] import typing a = [1] print('', a.__iadd__([2])) print('', a) [out] [1, 2] [1, 2] [case testListInplaceAdd] import typing a = [1] a += iter([2, 3]) print(tuple(a)) [out] (1, 2, 3) [case testListConcatenateWithIterable] import typing [1] + iter([2, 3]) [out] _program.py:2: error: Unsupported operand types for + ("List[int]" and "Iterator[int]") [case testInferHeterogeneousListOfIterables] from typing import Sequence s = ['x', 'y'] # type: Sequence[str] a = [['x', 'x'], 'fo', s, iter('foo'), {'aa'}] for i, x in enumerate(a): print(i, next(iter(x))) [out] 0 x 1 f 2 x 3 f 4 aa [case testTextIOProperties] import typing import sys print(type(sys.stdin.encoding)) print(type(sys.stdin.errors)) sys.stdin.line_buffering sys.stdin.buffer sys.stdin.newlines [out] [case testIOProperties] import typing import sys print(sys.stdin.name) print(sys.stdin.buffer.mode) [out] rb [case testSetUnion] import typing s = {'x', 'y'} print('>', sorted(s.union('foo'))) [out] > ['f', 'o', 'x', 'y'] [case testFromFuturePrintFunction] from __future__ import print_function print('a', 'b') [out] a b [case testLenOfTuple] import typing print(len((1, 'x'))) [out] 2 [case testListMethods] import typing import sys l = [0, 1, 2, 3, 4] if sys.version >= '3.3': l.clear() else: l = [] l.append(0) print('>', l) if sys.version >= '3.3': m = l.copy() else: m = l[:] m.extend([1, 2, 3, 4]) print('>', m) print(l.index(0)) print(l.index(0, 0)) print(l.index(0, 0, 1)) try: print(l.index(1)) print('expected ValueError') except ValueError: pass l.insert(0, 1) print('>', l) print(l.pop(0)) print(l.pop()) m.remove(0) try: m.remove(0) print('expected ValueError') except ValueError: pass m.reverse() m.sort() m.sort(key=lambda x: -x) m.sort(reverse=False) m.sort(key=lambda x: -x, reverse=True) print('>', m) [out] > [0] > [0, 1, 2, 3, 4] 0 0 0 > [1, 0] 1 0 > [1, 2, 3, 4] [case testListOperators] import typing l = [0, 1] print('+', l + [2]) print('*', l * 2) print('*', 2 * l) print('in', 1 in l) print('==', l == [1, 2]) print('!=', l != [1, 2]) print('>', l > [1, 2, 3]) print('>=', l >= [1, 2, 3]) print('<', l < [1, 2, 3]) print('<=', l <= [1, 2, 3]) print('>[0]', l[0]) l += [2] print('+=', l) l *= 2 print('*=', l) print('iter', list(iter(l))) print('len', len(l)) print('repr', repr(l)) l[:3] = [] print('setslice', l) print('reversed', list(reversed(l))) [out] + [0, 1, 2] * [0, 1, 0, 1] * [0, 1, 0, 1] in True == False != True > False >= False < True <= True >[0] 0 += [0, 1, 2] *= [0, 1, 2, 0, 1, 2] iter [0, 1, 2, 0, 1, 2] len 6 repr [0, 1, 2, 0, 1, 2] setslice [0, 1, 2] reversed [2, 1, 0] [case testTupleAsSubtypeOfSequence] from typing import TypeVar, Sequence T = TypeVar('T') def f(a: Sequence[T]) -> None: print(a) f(tuple()) [out] () [case testMapWithLambdaSpecialCase-skip] # TODO: Fix this; this was broken at some point but not sure why. from typing import List, Iterator a = [[1], [3]] b = map(lambda y: y[0], a) print('>', list(b)) [out] > [1, 3] [case testInternalBuiltinDefinition] import typing def f(x: _T) -> None: pass s: FrozenSet [out] _program.py:2: error: Name '_T' is not defined _program.py:3: error: Name 'FrozenSet' is not defined [case testVarArgsFunctionSubtyping] import typing def f(*args: str) -> str: return args[0] map(f, ['x']) map(f, [1]) [out] _program.py:4: error: Argument 1 to "map" has incompatible type "Callable[[VarArg(str)], str]"; expected "Callable[[int], str]" [case testMapStr] import typing x = range(3) a = list(map(str, x)) a + 1 [out] _program.py:4: error: Unsupported operand types for + ("List[str]" and "int") [case testNamedTuple] import typing from collections import namedtuple X = namedtuple('X', ['a', 'b']) x = X(a=1, b='s') print(x.a, x.b) [out] 1 s [case testNamedTupleShortSyntax] import typing from collections import namedtuple X = namedtuple('X', ' a b ') x = X(a=1, b='s') print(x.a, x.b) [out] 1 s [case testNamedTupleError] import typing from collections import namedtuple X = namedtuple('X', ['a', 'b']) x = X(a=1, b='s') x.c [out] _program.py:5: error: "X" has no attribute "c" [case testNamedTupleTupleOperations] from typing import Iterable from collections import namedtuple X = namedtuple('X', ['a', 'b']) def f(x: Iterable[int]) -> None: pass x = X(a=1, b='s') f(x) print(len(x)) print(x.index(1)) print(x.count(1)) print(x + x) [out] 2 0 1 (1, 's', 1, 's') [case testNamedTupleWithTypes] from typing import NamedTuple N = NamedTuple('N', [('a', int), ('b', str)]) n = N(1, 'x') print(n) a, b = n print(a, b) print(n[0]) [out] N(a=1, b='x') 1 x 1 [case testRelativeImport] import typing from m import x print(x) [file m/__init__.py] from .n import x [file m/n.py] x = 1 [out] 1 [case testRelativeImport2] import typing from m.n import x print(x) [file m/__init__.py] [file m/n.py] from .nn import x [file m/nn.py] x = 2 [out] 2 [case testPyiTakesPrecedenceOverPy] import m m.f(1) [file m.py] def f(x): print(x) [file m.pyi] import typing def f(x: str) -> None: pass [out] _program.py:2: error: Argument 1 to "f" has incompatible type "int"; expected "str" [case testAssignToComplexReal] import typing x = 4j y = x.real y = x # Error x.real = 2.0 # Error [out] _program.py:4: error: Incompatible types in assignment (expression has type "complex", variable has type "float") _program.py:5: error: Property "real" defined in "complex" is read-only [case testComplexArithmetic] import typing print(5 + 8j) print(3j * 2.0) print(4J / 2.0) [out] (5+8j) 6j 2j [case testComplexArithmetic2] import typing x = 5 + 8j x = '' y = 3j * 2.0 y = '' [out] _program.py:3: error: Incompatible types in assignment (expression has type "str", variable has type "complex") _program.py:5: error: Incompatible types in assignment (expression has type "str", variable has type "complex") [case testUnionTypeAlias] from typing import Union U = Union[int, str] u = 1 # type: U u = 1.1 [out] _program.py:4: error: Incompatible types in assignment (expression has type "float", variable has type "Union[int, str]") [case testTupleTypeAlias] from typing import Tuple A = Tuple[int, str] u = 1, 'x' # type: A u = 1 [out] _program.py:4: error: Incompatible types in assignment (expression has type "int", variable has type "Tuple[int, str]") [case testCallableTypeAlias] from typing import Callable A = Callable[[int], None] def f(x: A) -> None: x(1) x('') [out] _program.py:5: error: Argument 1 has incompatible type "str"; expected "int" [case testSuperNew] from typing import Dict, Any class MyType(type): def __new__(cls, name: str, bases: tuple, namespace: Dict[str, Any]) -> type: return super().__new__(cls, name + 'x', bases, namespace) class A(metaclass=MyType): pass print(type(A()).__name__) [out] Ax [case testSequenceIndexAndCount] from typing import Sequence def f(x: Sequence[int]) -> None: print(x.index(1)) print(x.count(1)) f([0, 0, 1, 1, 1]) [out] 2 3 [case testEscapeInTripleQuotedStrLiteral] print('''\'''') print(r"""\"""$""") [out] ' \"""$ [case testSubclassBothGenericAndNonGenericABC] from typing import Generic, TypeVar from abc import ABCMeta T = TypeVar('T') class A(metaclass=ABCMeta): pass class B(Generic[T]): pass class C(A, B): pass class D(B, A): pass class E(A, B[T], Generic[T]): pass class F(B[T], A, Generic[T]): pass def f(e: E[int], f: F[int]) -> None: pass [out] [case testOptional] from typing import Optional def f() -> Optional[int]: pass x = f() y = 1 y = x [case testAppendToStarArg] import typing def f(*x: int) -> None: x.append(1) f(1) [out] _program.py:3: error: "Tuple[int, ...]" has no attribute "append" [case testExit] print('a') exit(2) print('b') [out] a [case testTypeVariableTypeComparability] from typing import TypeVar T = TypeVar('T') def eq(x: T, y: T, z: T) -> T: if x == y: return y else: return z print(eq(1, 2, 3)) print(eq('x', 'x', 'z')) [out] 3 x [case testIntDecimalCompatibility] import typing from decimal import Decimal print(Decimal(1) + 2) print(Decimal(1) - 2) print(1 + Decimal('2.34')) print(1 - Decimal('2.34')) print(2 * Decimal('2.34')) [out] 3 -1 3.34 -1.34 4.68 [case testInstantiateBuiltinTypes] from typing import Dict, Set, List d = dict() # type: Dict[int, str] s = set() # type: Set[int] l = list() # type: List[int] str() bytes() bytearray() int() float() complex() slice(1) bool() [case testVariableLengthTuple] from typing import Tuple def p(t: Tuple[int, ...]) -> None: for n in t: print(n) p((1, 3, 2)) [out] 1 3 2 [case testVariableLengthTupleError] from typing import Tuple def p(t: Tuple[str, ...]) -> None: n = 5 print(t[n]) for s in t: s() ''.startswith(('x', 'y')) ''.startswith(('x', b'y')) [out] _program.py:6: error: "str" not callable _program.py:8: error: Argument 1 to "startswith" of "str" has incompatible type "Tuple[str, bytes]"; expected "Union[str, Tuple[str, ...]]" [case testMultiplyTupleByInteger] n = 4 t = ('',) * n t + 1 [out] _program.py:3: error: Unsupported operand types for + ("Tuple[str, ...]" and "int") [case testMultiplyTupleByIntegerReverse] n = 4 t = n * ('',) t + 1 [out] _program.py:3: error: Unsupported operand types for + ("Tuple[str, ...]" and "int") [case testDictWithKeywordArgs] from typing import Dict, Any, List d1 = dict(a=1, b=2) # type: Dict[str, int] d2 = dict(a=1, b='') # type: Dict[str, int] # E d3 = dict(a=1, b=1) d3.xyz # E d4 = dict(a=1, b='') # type: Dict[str, Any] result = dict(x=[], y=[]) # type: Dict[str, List[str]] [out] _program.py:3: error: Dict entry 1 has incompatible type "str": "str"; expected "str": "int" _program.py:5: error: "Dict[str, int]" has no attribute "xyz" [case testDefaultDict] import typing as t from collections import defaultdict T = t.TypeVar('T') d1 = defaultdict(list) # type: t.DefaultDict[int, str] d2 = defaultdict() # type: t.DefaultDict[int, str] d2[0] = '0' d2['0'] = 0 def tst(dct: t.DefaultDict[int, T]) -> T: return dct[0] collections = ['coins', 'stamps', 'comics'] # type: t.List[str] d3 = defaultdict(str) # type: t.DefaultDict[int, str] collections[2] tst(defaultdict(list, {0: []})) tst(defaultdict(list, {'0': []})) class MyDDict(t.DefaultDict[int,T], t.Generic[T]): pass MyDDict(dict)['0'] MyDDict(dict)[0] [out] _program.py:6: error: Argument 1 to "defaultdict" has incompatible type "Type[List[Any]]"; expected "Callable[[], str]" _program.py:9: error: Invalid index type "str" for "defaultdict[int, str]"; expected type "int" _program.py:9: error: Incompatible types in assignment (expression has type "int", target has type "str") _program.py:19: error: Dict entry 0 has incompatible type "str": "List[]"; expected "int": "List[]" _program.py:23: error: Invalid index type "str" for "MyDDict[Dict[_KT, _VT]]"; expected type "int" [case testNoSubcriptionOfStdlibCollections] import collections from collections import Counter from typing import TypeVar collections.defaultdict[int, str]() Counter[int]() T = TypeVar('T') DDint = collections.defaultdict[T, int] d = DDint[str]() d[0] = 1 def f(d: collections.defaultdict[int, str]) -> None: ... [out] _program.py:5: error: "defaultdict" is not subscriptable _program.py:6: error: "Counter" is not subscriptable _program.py:9: error: "defaultdict" is not subscriptable _program.py:12: error: Invalid index type "int" for "defaultdict[str, int]"; expected type "str" _program.py:14: error: "defaultdict" is not subscriptable, use "typing.DefaultDict" instead [case testCollectionsAliases] import typing as t import collections as c o1 = c.Counter() # type: t.Counter[int] reveal_type(o1) o1['string'] o2 = c.ChainMap() # type: t.ChainMap[int, str] reveal_type(o2) o3 = c.deque() # type: t.Deque[int] reveal_type(o3) o4 = t.Counter[int]() reveal_type(o4) o5 = t.ChainMap[int, str]() reveal_type(o5) o6 = t.Deque[int]() reveal_type(o6) [out] _testCollectionsAliases.py:5: error: Revealed type is 'collections.Counter[builtins.int]' _testCollectionsAliases.py:6: error: Invalid index type "str" for "Counter[int]"; expected type "int" _testCollectionsAliases.py:9: error: Revealed type is 'collections.ChainMap[builtins.int, builtins.str]' _testCollectionsAliases.py:12: error: Revealed type is 'collections.deque[builtins.int]' _testCollectionsAliases.py:15: error: Revealed type is 'collections.Counter[builtins.int*]' _testCollectionsAliases.py:18: error: Revealed type is 'collections.ChainMap[builtins.int*, builtins.str*]' _testCollectionsAliases.py:21: error: Revealed type is 'collections.deque[builtins.int*]' [case testChainMapUnimported] ChainMap[int, str]() [out] _testChainMapUnimported.py:1: error: Name 'ChainMap' is not defined [case testDequeWrongCase] import collections import typing collections.Deque() typing.deque() [out] _testDequeWrongCase.py:4: error: Module has no attribute "Deque" _testDequeWrongCase.py:5: error: Module has no attribute "deque" [case testDictUpdateInference] from typing import Dict, Optional d = {} # type: Dict[str, Optional[int]] d.update({str(i): None for i in range(4)}) [case testSuperAndSetattr] class A: def __init__(self) -> None: super().__setattr__('a', 1) super().__setattr__(1, 'a') [out] _program.py:4: error: Argument 1 to "__setattr__" of "object" has incompatible type "int"; expected "str" [case testMetaclassAndSuper] class A(type): def __new__(cls, name, bases, namespace) -> 'type': return super().__new__(cls, '', (object,), {'x': 7}) class B(metaclass=A): pass print(getattr(B(), 'x')) [out] 7 [case testSortedNoError] from typing import Iterable, Callable, TypeVar, List, Dict T = TypeVar('T') def sorted(x: Iterable[T], *, key: Callable[[T], object] = None) -> None: ... a = None # type: List[Dict[str, str]] sorted(a, key=lambda y: y['']) [case testAbstractProperty] from abc import abstractproperty, ABCMeta class A(metaclass=ABCMeta): @abstractproperty def x(self) -> int: pass class B(A): @property def x(self) -> int: return 3 b = B() print(b.x + 1) [out] 4 [case testInferenceWithLambda] from typing import TypeVar, Iterable, Iterator, List import itertools _T = TypeVar('_T') def f(iterable): # type: (Iterable[_T]) -> Iterator[List[_T]] grouped = itertools.groupby(enumerate(iterable), lambda pair: pair[0] // 2) return ([elem for _, elem in group] for _, group in grouped) [case testReModuleBytes] # Regression tests for various overloads in the re module -- bytes version import re bre = b'a+' bpat = re.compile(bre) bpat = re.compile(bpat) re.search(bre, b'').groups() re.search(bre, u'') # Error re.search(bpat, b'').groups() re.search(bpat, u'') # Error # match(), split(), findall(), finditer() are much the same, so skip those. # sub(), subn() have more overloads and we are checking these: re.sub(bre, b'', b'') + b'' re.sub(bpat, b'', b'') + b'' re.sub(bre, lambda m: b'', b'') + b'' re.sub(bpat, lambda m: b'', b'') + b'' re.subn(bre, b'', b'')[0] + b'' re.subn(bpat, b'', b'')[0] + b'' re.subn(bre, lambda m: b'', b'')[0] + b'' re.subn(bpat, lambda m: b'', b'')[0] + b'' [out] _program.py:7: error: Value of type variable "AnyStr" of "search" cannot be "object" _program.py:9: error: Cannot infer type argument 1 of "search" [case testReModuleString] # Regression tests for various overloads in the re module -- string version import re sre = 'a+' spat = re.compile(sre) spat = re.compile(spat) re.search(sre, '').groups() re.search(sre, b'') # Error re.search(spat, '').groups() re.search(spat, b'') # Error # match(), split(), findall(), finditer() are much the same, so skip those. # sus(), susn() have more overloads and we are checking these: re.sub(sre, '', '') + '' re.sub(spat, '', '') + '' re.sub(sre, lambda m: '', '') + '' re.sub(spat, lambda m: '', '') + '' re.subn(sre, '', '')[0] + '' re.subn(spat, '', '')[0] + '' re.subn(sre, lambda m: '', '')[0] + '' re.subn(spat, lambda m: '', '')[0] + '' [out] _program.py:7: error: Value of type variable "AnyStr" of "search" cannot be "object" _program.py:9: error: Cannot infer type argument 1 of "search" [case testListSetitemTuple] from typing import List, Tuple a = [] # type: List[Tuple[str, int]] a[0] = 'x', 1 a[1] = 2, 'y' a[:] = [('z', 3)] [out] _program.py:4: error: Incompatible types in assignment (expression has type "Tuple[int, str]", target has type "Tuple[str, int]") [case testContextManager] import contextlib from contextlib import contextmanager from typing import Iterator @contextmanager def f(x: int) -> Iterator[str]: yield 'foo' @contextlib.contextmanager def g(*x: str) -> Iterator[int]: yield 1 reveal_type(f) reveal_type(g) with f('') as s: reveal_type(s) [out] _program.py:13: error: Revealed type is 'def (x: builtins.int) -> contextlib.GeneratorContextManager[builtins.str*]' _program.py:14: error: Revealed type is 'def (*x: builtins.str) -> contextlib.GeneratorContextManager[builtins.int*]' _program.py:16: error: Argument 1 to "f" has incompatible type "str"; expected "int" _program.py:17: error: Revealed type is 'builtins.str*' [case testTypedDictGet] # Test that TypedDict get plugin works with typeshed stubs # TODO: Make it possible to use strict optional here from mypy_extensions import TypedDict class A: pass D = TypedDict('D', {'x': int, 'y': str}) d: D reveal_type(d.get('x')) reveal_type(d.get('y')) d.get('z') d.get() s = '' reveal_type(d.get(s)) [out] _testTypedDictGet.py:7: error: Revealed type is 'builtins.int' _testTypedDictGet.py:8: error: Revealed type is 'builtins.str' _testTypedDictGet.py:9: error: TypedDict "D" has no key 'z' _testTypedDictGet.py:10: error: No overload variant of "get" of "Mapping" matches argument types [] _testTypedDictGet.py:12: error: Revealed type is 'builtins.object*' [case testTypedDictMappingMethods] from mypy_extensions import TypedDict Cell = TypedDict('Cell', {'value': int}) c = Cell(value=42) for x in c: reveal_type(x) reveal_type(iter(c)) reveal_type(len(c)) reveal_type('value' in c) reveal_type(c.keys()) reveal_type(c.items()) reveal_type(c.values()) c == c c != c [out] _testTypedDictMappingMethods.py:5: error: Revealed type is 'builtins.str*' _testTypedDictMappingMethods.py:6: error: Revealed type is 'typing.Iterator[builtins.str*]' _testTypedDictMappingMethods.py:7: error: Revealed type is 'builtins.int' _testTypedDictMappingMethods.py:8: error: Revealed type is 'builtins.bool' _testTypedDictMappingMethods.py:9: error: Revealed type is 'typing.AbstractSet[builtins.str*]' _testTypedDictMappingMethods.py:10: error: Revealed type is 'typing.AbstractSet[Tuple[builtins.str*, builtins.int*]]' _testTypedDictMappingMethods.py:11: error: Revealed type is 'typing.ValuesView[builtins.int*]' [case testCrashOnComplexCheckWithNamedTupleNext] from typing import NamedTuple MyNamedTuple = NamedTuple('MyNamedTuple', [('parent', 'MyNamedTuple')]) # type: ignore def foo(mymap) -> MyNamedTuple: return next((mymap[key] for key in mymap), None) [out] [case testCanConvertTypedDictToAnySuperclassOfMapping] from mypy_extensions import TypedDict from typing import Sized, Iterable, Container Point = TypedDict('Point', {'x': int, 'y': int}) p: Point s: Sized = p it: Iterable[str] = p c: Container[str] = p o: object = p it2: Iterable[int] = p [out] _testCanConvertTypedDictToAnySuperclassOfMapping.py:11: error: Incompatible types in assignment (expression has type "Point", variable has type "Iterable[int]") _testCanConvertTypedDictToAnySuperclassOfMapping.py:11: note: Following member(s) of "Point" have conflicts: _testCanConvertTypedDictToAnySuperclassOfMapping.py:11: note: Expected: _testCanConvertTypedDictToAnySuperclassOfMapping.py:11: note: def __iter__(self) -> Iterator[int] _testCanConvertTypedDictToAnySuperclassOfMapping.py:11: note: Got: _testCanConvertTypedDictToAnySuperclassOfMapping.py:11: note: def __iter__(self) -> Iterator[str] [case testAsyncioGatherPreciseType] import asyncio from typing import Tuple async def get_location(arg: str) -> Tuple[str, str]: return arg, arg async def main() -> None: ((a_x, a_y),) = await asyncio.gather(get_location('start')) reveal_type(a_x) reveal_type(a_y) [out] _testAsyncioGatherPreciseType.py:9: error: Revealed type is 'builtins.str' _testAsyncioGatherPreciseType.py:10: error: Revealed type is 'builtins.str' [case testMultipleInheritanceWorksWithTupleTypeGeneric] from typing import SupportsAbs, NamedTuple class Point(NamedTuple('Point', [('x', int), ('y', int)]), SupportsAbs[int]): def __abs__(p) -> int: return abs(p.x) + abs(p.y) def test(a: Point) -> bool: return abs(a) == 2 [out] [case testNoCrashOnGenericUnionUnpacking] from typing import Union, Dict TEST = {'key': ('a', 'b')} def test() -> None: a, b = TEST.get('foo', ('x', 'y')) reveal_type(a) reveal_type(b) def test2() -> None: a, b = TEST.get('foo', (1, 2)) reveal_type(a) reveal_type(b) x: Union[Dict[int, int], Dict[str, str]] = dict(a='b') for a, b in x.items(): reveal_type(a) reveal_type(b) [out] _testNoCrashOnGenericUnionUnpacking.py:6: error: Revealed type is 'builtins.str' _testNoCrashOnGenericUnionUnpacking.py:7: error: Revealed type is 'builtins.str' _testNoCrashOnGenericUnionUnpacking.py:10: error: Revealed type is 'Union[builtins.str, builtins.int]' _testNoCrashOnGenericUnionUnpacking.py:11: error: Revealed type is 'Union[builtins.str, builtins.int]' _testNoCrashOnGenericUnionUnpacking.py:15: error: Revealed type is 'Union[builtins.int*, builtins.str*]' _testNoCrashOnGenericUnionUnpacking.py:16: error: Revealed type is 'Union[builtins.int*, builtins.str*]' mypy-0.560/test-data/unit/README.md0000644€tŠÔÚ€2›s®0000001564313215007205023024 0ustar jukkaDROPBOX\Domain Users00000000000000Tests ===== Quick Start ----------- To add a simple unit test for a new feature you developed, open or create a `test-data/unit/check-*.test` file with a name that roughly relates to the feature you added. Add the test in this format anywhere in the file: [case testNewSyntaxBasics] # flags: --python-version 3.6 x: int x = 5 y: int = 5 a: str a = 5 # E: Incompatible types in assignment (expression has type "int", variable has type "str") b: str = 5 # E: Incompatible types in assignment (expression has type "int", variable has type "str") zzz: int zzz: str # E: Name 'zzz' already defined - no code here is executed, just type checked - optional `# flags: ` indicates which flags to use for this unit test - `# E: abc...` indicates that this line should result in type check error with text "abc..." - note a space after `E:` and `flags:` - `# E:12` adds column number to the expected error - repeating `# E: ` several times in one line indicates multiple expected errors in one line - `W: ...` and `N: ...` works exactly like `E:`, but report a warning and a note respectively - lines that don't contain the above should cause no type check errors - optional `[builtins fixtures/...]` tells the type checker to use stubs from the indicated file (see Fixtures section below) - optional `[out]` is an alternative to the "# E:" notation: it indicates that any text after it contains the expected type checking error messages. usually, "E: " is preferred because it makes it easier to associate the errors with the code generating them at a glance, and to change the code of the test without having to change line numbers in `[out]` - an empty `[out]` section has no effect - to run just this test, use `pytest -k testNewSyntaxBasics -n0` Fixtures -------- The unit tests use minimal stubs for builtins, so a lot of operations are not possible. You should generally define any needed classes within the test case instead of relying on builtins, though clearly this is not always an option (see below for more about stubs in test cases). This way tests run much faster and don't break if the stubs change. If your test crashes mysteriously even though the code works when run manually, you should make sure you have all the stubs you need for your test case, including built-in classes such as `list` or `dict`, as these are not included by default. Where the stubs for builtins come from for a given test: - The builtins used by default in unit tests live in `test-data/unit/lib-stub`. - Individual test cases can override the builtins stubs by using `[builtins fixtures/foo.pyi]`; this targets files in `test-data/unit/fixtures`. Feel free to modify existing files there or create new ones as you deem fit. - Test cases can also use `[typing fixtures/typing-full.pyi]` to use a more complete stub for `typing` that contains the async types, among other things. - Feel free to add additional stubs to that `fixtures` directory, but generally don't expand files in `lib-stub` without first discussing the addition with other mypy developers, as additions could slow down the test suite. Running tests and linting ------------------------- First install any additional dependencies needed for testing: $ python3 -m pip install -U -r test-requirements.txt You must also have a Python 2.7 binary installed that can import the `typing` module: $ python2 -m pip install -U typing To run all tests, run the script `runtests.py` in the mypy repository: $ ./runtests.py Note that some tests will be disabled for older python versions. This will run all tests, including integration and regression tests, and will type check mypy and verify that all stubs are valid. This may take several minutes to run, so you don't want to use this all the time while doing development. You can run a subset of test suites by passing positive or negative filters: $ ./runtests.py lex parse -x lint -x stub For example, to run unit tests only, which run pretty quickly: $ ./runtests.py unit-test You can get a list of available test suites through the `-l` option (though this doesn't show all available subtasks): $ ./runtests.py -l The unit test suites are driven by a mixture of test frameworks: `pytest` and mypy's own `myunit` framework, which we're in the process of migrating away from. Test suites for individual components are in the files `mypy/test/test*.py`. You can run many of these individually by doing `runtests.py testfoobar`. For finer control over which unit tests are run and how, you can run `pytest` directly: $ py.test mypy/test/testcheck.py -v -k MethodCall You can pass inferior arguments to pytest via `-a` when using `runtests.py`: $ ./runtests.py pytest -a -v -a -k -a MethodCall You can also run the type checker for manual testing without installing it by setting up the Python module search path suitably: $ export PYTHONPATH=$PWD $ python -m mypy PROGRAM.py You will have to manually install the `typing` module if you're running Python 3.4 or earlier. You can add the entry scripts to PATH for a single python3 version: $ export PATH=$PWD/scripts $ mypy PROGRAM.py You can check a module or string instead of a file: $ mypy PROGRAM.py $ mypy -m MODULE $ mypy -c 'import MODULE' To run the linter: $ ./runtests.py lint Many test suites store test case descriptions in text files (`test-data/unit/*.test`). The module `mypy.test.data` parses these descriptions. The package `mypy.myunit` contains the test framework used for the non-checker test cases. Python evaluation test cases are a little different from unit tests (`mypy/test/testpythoneval.py`, `test-data/unit/pythoneval.test`). These type check programs and run them. Unlike the unit tests, these use the full builtins and library stubs instead of minimal ones. Run them using `runtests.py testpythoneval`. `runtests.py` by default runs tests in parallel using as many processes as there are logical cores the `runtests.py` process is allowed to use (on some platforms this information isn't available, so 2 processes are used by default). You can change the number of workers using `-j` option. All pytest tests run as a single test from the perspective of `runtests.py`, and so `-j` option has no effect on them. Instead, `pytest` itself determines the number of processes to use. The default (set in `./pytest.ini`) is the number of logical cores; this can be overridden using `-n` option. Note that running more processes than logical cores is likely to significantly decrease performance. Coverage reports ---------------- There is an experimental feature to generate coverage reports. To use this feature, you need to `pip install -U lxml`. This is an extension module and requires various library headers to install; on a Debian-derived system the command `apt-get install python3-dev libxml2-dev libxslt1-dev` may provide the necessary dependencies. To use the feature, pass e.g. `--txt-report "$(mktemp -d)"`. mypy-0.560/test-data/unit/reports.test0000644€tŠÔÚ€2›s®0000002347013215007206024142 0ustar jukkaDROPBOX\Domain Users00000000000000-- Tests for reports -- ------------------------------ -- -- This file follows syntax of cmdline.test -- ---------------------------------------- [case testConfigErrorUnknownReport] # cmd: mypy -c pass [file mypy.ini] [[mypy] bad_report = . [out] mypy.ini: [mypy]: Unrecognized report type: bad_report [case testCoberturaParser] # cmd: mypy --cobertura-xml-report build pkg [file pkg/__init__.py] [file pkg/a.py] from typing import Dict def foo() -> Dict: z = {'hello': 'world'} return z [file pkg/subpkg/__init__.py] [file pkg/subpkg/a.py] def bar() -> str: return 'world' def untyped_function(): return 42 [outfile build/cobertura.xml] $PWD [case testAnyExprReportDivisionByZero] # cmd: mypy --any-exprs-report=out -c 'pass' [case testClassDefIsTreatedAsEmpty] # cmd: mypy --html-report report n.py [file n.py] class A(object): pass [file report/mypy-html.css] [file report/index.html] [outfile report/html/n.py.html]

n

n.py
1
2
class A(object):
    pass
[case testTypeVarTreatedAsEmptyLine] # cmd: mypy --html-report report n.py [file n.py] from typing import TypeVar T = TypeVar('T') [file report/mypy-html.css] [file report/index.html] [outfile report/html/n.py.html]

n

n.py
1
2
3
from typing import TypeVar

T = TypeVar('T')
[case testUnreachableCodeMarkedAsAny] # cmd: mypy --html-report report n.py [file n.py] def bar(x): # type: (str) -> None print(x) assert False print(x) [file report/mypy-html.css] [file report/index.html] [outfile report/html/n.py.html]

n

n.py
1
2
3
4
5
def bar(x):
    # type: (str) -> None
    print(x)
    assert False
    print(x)
[case testHtmlReportMemberExprNoUnanalyzed] # cmd: mypy --html-report report n.py [file n.py] import sys old_stdout = sys.stdout [file report/mypy-html.css] [file report/index.html] [outfile report/html/n.py.html]

n

n.py
1
2
3
import sys

old_stdout = sys.stdout
[case testAnyExprReportIncludesDeadCode] # cmd: mypy --any-exprs-report report i.py j.py [file i.py] def bar(x): # type: (str) -> None print(x) assert False print(x) # dead code! [file j.py] def bar(x): # type: (str) -> None print(x) assert False [file report/types-of-anys.txt] [outfile report/any-exprs.txt] Name Anys Exprs Coverage --------------------------------- i 1 7 85.71% j 0 6 100.00% --------------------------------- Total 1 13 92.31% [case testAnyExprReportHigherKindedTypesAreNotAny] # cmd: mypy --any-exprs-report report i.py [file i.py] from enum import Enum from mypy_extensions import TypedDict from typing import NewType, NamedTuple, TypeVar from typing import TypeVar T = TypeVar('T') # no error def f(t: T) -> T: return t Point = NamedTuple('Point', [('x', int), ('y', int)]) # no error def origin() -> Point: return Point(x=0, y=0) NT = NewType('NT', int) # no error def nt() -> NT: return NT(1) E = Enum('E', '1, 2, 3') # no error def k(s: E) -> None: pass Movie = TypedDict('Movie', {'name': str, 'year': int}) def g(m: Movie) -> Movie: return m [file report/types-of-anys.txt] [outfile report/any-exprs.txt] Name Anys Exprs Coverage --------------------------------- i 0 16 100.00% --------------------------------- Total 0 16 100.00% [case testAnyExpressionsReportTypesOfAny] # cmd: mypy --any-exprs-report report n.py [file n.py] from typing import Any, List from nonexistent import C # type: ignore def a(x) -> None: # Unannotated print(x) x: Any = 2 # Explicit y: C = None # Unimported def b() -> List: # Omitted Generics return [1, 2, 3] g = 1 z = g.does_not_exist() # type: ignore # Error [file report/any-exprs.txt] [outfile report/types-of-anys.txt] Name Unannotated Explicit Unimported Omitted Generics Error Special Form --------------------------------------------------------------------------------------- n 2 3 2 1 3 0 --------------------------------------------------------------------------------------- Total 2 3 2 1 3 0 [case testAnyExpressionsReportUnqualifiedError] # cmd: mypy --any-exprs-report report n.py [file n.py] z = does_not_exist() # type: ignore # Error [file report/any-exprs.txt] [outfile report/types-of-anys.txt] Name Unannotated Explicit Unimported Omitted Generics Error Special Form --------------------------------------------------------------------------------------- n 0 0 0 0 3 0 --------------------------------------------------------------------------------------- Total 0 0 0 0 3 0 [case testAnyExpressionsReportUntypedDef] # cmd: mypy --any-exprs-report report n.py [file n.py] def foo(): x = 0 f = 0 [file report/any-exprs.txt] [outfile report/types-of-anys.txt] Name Unannotated Explicit Unimported Omitted Generics Error Special Form --------------------------------------------------------------------------------------- n 0 0 0 0 0 0 --------------------------------------------------------------------------------------- Total 0 0 0 0 0 0 mypy-0.560/test-data/unit/semanal-abstractclasses.test0000644€tŠÔÚ€2›s®0000000461613215007206027244 0ustar jukkaDROPBOX\Domain Users00000000000000[case testAbstractMethods] from abc import abstractmethod, ABCMeta import typing class A(metaclass=ABCMeta): @abstractmethod def g(self) -> 'A': pass @abstractmethod def f(self) -> 'A': return self [out] MypyFile:1( ImportFrom:1(abc, [abstractmethod, ABCMeta]) Import:2(typing) ClassDef:4( A Metaclass(NameExpr(ABCMeta [abc.ABCMeta])) Decorator:5( Var(g) FuncDef:6( g Args( Var(self)) def (self: __main__.A) -> __main__.A Abstract Block:6( PassStmt:6()))) Decorator:7( Var(f) FuncDef:8( f Args( Var(self)) def (self: __main__.A) -> __main__.A Abstract Block:8( ReturnStmt:8( NameExpr(self [l]))))))) [case testClassInheritingTwoAbstractClasses] from abc import abstractmethod, ABCMeta import typing class A(metaclass=ABCMeta): pass class B(metaclass=ABCMeta): pass class C(A, B): pass [out] MypyFile:1( ImportFrom:1(abc, [abstractmethod, ABCMeta]) Import:2(typing) ClassDef:4( A Metaclass(NameExpr(ABCMeta [abc.ABCMeta])) PassStmt:4()) ClassDef:5( B Metaclass(NameExpr(ABCMeta [abc.ABCMeta])) PassStmt:5()) ClassDef:6( C BaseType( __main__.A __main__.B) PassStmt:6())) [case testAbstractGenericClass] from abc import abstractmethod from typing import Generic, TypeVar T = TypeVar('T') class A(Generic[T]): @abstractmethod def f(self) -> 'A[T]': pass [out] MypyFile:1( ImportFrom:1(abc, [abstractmethod]) ImportFrom:2(typing, [Generic, TypeVar]) AssignmentStmt:3( NameExpr(T* [__main__.T]) TypeVarExpr:3()) ClassDef:4( A TypeVars( T) Decorator:5( Var(f) FuncDef:6( f Args( Var(self)) def (self: __main__.A[T`1]) -> __main__.A[T`1] Abstract Block:6( PassStmt:6()))))) [case testFullyQualifiedAbstractMethodDecl] import abc from abc import ABCMeta import typing class A(metaclass=ABCMeta): @abc.abstractmethod def g(self) -> 'A': pass [out] MypyFile:1( Import:1(abc) ImportFrom:2(abc, [ABCMeta]) Import:3(typing) ClassDef:5( A Metaclass(NameExpr(ABCMeta [abc.ABCMeta])) Decorator:6( Var(g) FuncDef:7( g Args( Var(self)) def (self: __main__.A) -> __main__.A Abstract Block:7( PassStmt:7()))))) mypy-0.560/test-data/unit/semanal-basic.test0000644€tŠÔÚ€2›s®0000001714413215007206025144 0ustar jukkaDROPBOX\Domain Users00000000000000[case testEmptyFile] [out] MypyFile:1() [case testGlobalVariable] x = 1 x [out] MypyFile:1( AssignmentStmt:1( NameExpr(x* [__main__.x]) IntExpr(1)) ExpressionStmt:2( NameExpr(x [__main__.x]))) [case testMultipleGlobals] x = y = 2 z = 3 (x, y, z) [out] MypyFile:1( AssignmentStmt:1( Lvalues( NameExpr(x* [__main__.x]) NameExpr(y* [__main__.y])) IntExpr(2)) AssignmentStmt:2( NameExpr(z* [__main__.z]) IntExpr(3)) ExpressionStmt:3( TupleExpr:3( NameExpr(x [__main__.x]) NameExpr(y [__main__.y]) NameExpr(z [__main__.z])))) [case testEmptyFunction] def f(): pass f() [out] MypyFile:1( FuncDef:1( f Block:1( PassStmt:1())) ExpressionStmt:2( CallExpr:2( NameExpr(f [__main__.f]) Args()))) [case testAccessingGlobalNameBeforeDefinition] x f() x = 1 def f(): pass [out] MypyFile:1( ExpressionStmt:1( NameExpr(x [__main__.x])) ExpressionStmt:2( CallExpr:2( NameExpr(f [__main__.f]) Args())) AssignmentStmt:3( NameExpr(x* [__main__.x]) IntExpr(1)) FuncDef:4( f Block:4( PassStmt:4()))) [case testFunctionArgs] def f(x, y): (x, y) [out] MypyFile:1( FuncDef:1( f Args( Var(x) Var(y)) Block:1( ExpressionStmt:2( TupleExpr:2( NameExpr(x [l]) NameExpr(y [l])))))) [case testLocalVar] def f(): x = 1 x [out] MypyFile:1( FuncDef:1( f Block:1( AssignmentStmt:2( NameExpr(x* [l]) IntExpr(1)) ExpressionStmt:3( NameExpr(x [l]))))) [case testAccessGlobalInFn] def f(): x g() x = 1 def g(): pass [out] MypyFile:1( FuncDef:1( f Block:1( ExpressionStmt:2( NameExpr(x [__main__.x])) ExpressionStmt:3( CallExpr:3( NameExpr(g [__main__.g]) Args())))) AssignmentStmt:4( NameExpr(x* [__main__.x]) IntExpr(1)) FuncDef:5( g Block:5( PassStmt:5()))) [case testAssignmentAfterInit] x = 1 x = 2 def f(y): y = 1 z = 1 z = 2 [out] MypyFile:1( AssignmentStmt:1( NameExpr(x* [__main__.x]) IntExpr(1)) AssignmentStmt:2( NameExpr(x [__main__.x]) IntExpr(2)) FuncDef:3( f Args( Var(y)) Block:3( AssignmentStmt:4( NameExpr(y [l]) IntExpr(1)) AssignmentStmt:5( NameExpr(z* [l]) IntExpr(1)) AssignmentStmt:6( NameExpr(z [l]) IntExpr(2))))) [case testLocalAndGlobalAliasing] x = 1 def f(): x = 2 x x [out] MypyFile:1( AssignmentStmt:1( NameExpr(x* [__main__.x]) IntExpr(1)) FuncDef:2( f Block:2( AssignmentStmt:3( NameExpr(x* [l]) IntExpr(2)) ExpressionStmt:4( NameExpr(x [l])))) ExpressionStmt:5( NameExpr(x [__main__.x]))) [case testArgumentInitializers] def f(x = f, y = object): x, y [out] MypyFile:1( FuncDef:1( f Args( default( Var(x) NameExpr(f [__main__.f])) default( Var(y) NameExpr(object [builtins.object]))) Block:1( ExpressionStmt:2( TupleExpr:2( NameExpr(x [l]) NameExpr(y [l])))))) [case testVarArgs] def f(x, *y): x, y [out] MypyFile:1( FuncDef:1( f Args( Var(x)) VarArg( Var(y)) Block:1( ExpressionStmt:2( TupleExpr:2( NameExpr(x [l]) NameExpr(y [l])))))) [case testGlobalDecl] x = None def f(): global x x = None x class A: pass [out] MypyFile:1( AssignmentStmt:1( NameExpr(x* [__main__.x]) NameExpr(None [builtins.None])) FuncDef:2( f Block:2( GlobalDecl:3( x) AssignmentStmt:4( NameExpr(x [__main__.x]) NameExpr(None [builtins.None])) ExpressionStmt:5( NameExpr(x [__main__.x])))) ClassDef:6( A PassStmt:6())) [case testMultipleNamesInGlobalDecl] x, y = None, None def f(): global x, y x = y [out] MypyFile:1( AssignmentStmt:1( TupleExpr:1( NameExpr(x* [__main__.x]) NameExpr(y* [__main__.y])) TupleExpr:1( NameExpr(None [builtins.None]) NameExpr(None [builtins.None]))) FuncDef:2( f Block:2( GlobalDecl:3( x y) AssignmentStmt:4( NameExpr(x [__main__.x]) NameExpr(y [__main__.y]))))) [case testGlobalDeclScope] x = None def f(): global x def g(): x = None [out] MypyFile:1( AssignmentStmt:1( NameExpr(x* [__main__.x]) NameExpr(None [builtins.None])) FuncDef:2( f Block:2( GlobalDecl:3( x))) FuncDef:4( g Block:4( AssignmentStmt:5( NameExpr(x* [l]) NameExpr(None [builtins.None]))))) [case testGlobalDeclScope] x = None def f(): global x def g(): x = None [out] MypyFile:1( AssignmentStmt:1( NameExpr(x* [__main__.x]) NameExpr(None [builtins.None])) FuncDef:2( f Block:2( GlobalDecl:3( x))) FuncDef:4( g Block:4( AssignmentStmt:5( NameExpr(x* [l]) NameExpr(None [builtins.None]))))) [case testGlobaWithinMethod] x = None class A: def f(self): global x x = self [out] MypyFile:1( AssignmentStmt:1( NameExpr(x* [__main__.x]) NameExpr(None [builtins.None])) ClassDef:2( A FuncDef:3( f Args( Var(self)) Block:3( GlobalDecl:4( x) AssignmentStmt:5( NameExpr(x [__main__.x]) NameExpr(self [l])))))) [case testGlobalDefinedInBlock] if object: x = object() x = x x [out] MypyFile:1( IfStmt:1( If( NameExpr(object [builtins.object])) Then( AssignmentStmt:2( NameExpr(x* [__main__.x]) CallExpr:2( NameExpr(object [builtins.object]) Args())) AssignmentStmt:3( NameExpr(x [__main__.x]) NameExpr(x [__main__.x])))) ExpressionStmt:4( NameExpr(x [__main__.x]))) [case testNonlocalDecl] def g(): x = None def f(): nonlocal x x = None x [out] MypyFile:1( FuncDef:1( g Block:1( AssignmentStmt:2( NameExpr(x* [l]) NameExpr(None [builtins.None])) FuncDef:3( f Block:3( NonlocalDecl:4( x) AssignmentStmt:5( NameExpr(x [l]) NameExpr(None [builtins.None])) ExpressionStmt:6( NameExpr(x [l]))))))) [case testMultipleNamesInNonlocalDecl] def g(): x, y = None, None def f(z): nonlocal x, y x = y [out] MypyFile:1( FuncDef:1( g Block:1( AssignmentStmt:2( TupleExpr:2( NameExpr(x* [l]) NameExpr(y* [l])) TupleExpr:2( NameExpr(None [builtins.None]) NameExpr(None [builtins.None]))) FuncDef:3( f Args( Var(z)) Block:3( NonlocalDecl:4( x y) AssignmentStmt:5( NameExpr(x [l]) NameExpr(y [l]))))))) [case testNestedFunctions] def f(x): def g(y): z = y + x return g [out] MypyFile:1( FuncDef:1( f Args( Var(x)) Block:1( FuncDef:2( g Args( Var(y)) Block:2( AssignmentStmt:3( NameExpr(z* [l]) OpExpr:3( + NameExpr(y [l]) NameExpr(x [l]))))) ReturnStmt:4( NameExpr(g [l]))))) [case testNestedFunctionWithOverlappingName] def f(x): def g(): x = 1 [out] MypyFile:1( FuncDef:1( f Args( Var(x)) Block:1( FuncDef:2( g Block:2( AssignmentStmt:3( NameExpr(x* [l]) IntExpr(1))))))) mypy-0.560/test-data/unit/semanal-classes.test0000644€tŠÔÚ€2›s®0000002603413215007206025516 0ustar jukkaDROPBOX\Domain Users00000000000000-- Test cases related to classes for the semantic analyzer. [case testSimpleClass] class A: pass x = A [out] MypyFile:1( ClassDef:1( A PassStmt:1()) AssignmentStmt:2( NameExpr(x* [__main__.x]) NameExpr(A [__main__.A]))) [case testMethods] class A: def __init__(self, x): y = x def f(self): y = self [out] MypyFile:1( ClassDef:1( A FuncDef:2( __init__ Args( Var(self) Var(x)) Block:2( AssignmentStmt:3( NameExpr(y* [l]) NameExpr(x [l])))) FuncDef:4( f Args( Var(self)) Block:4( AssignmentStmt:5( NameExpr(y* [l]) NameExpr(self [l])))))) [case testMemberDefinitionInInit] class A: def __init__(self): self.x = 1 self.y = 2 [out] MypyFile:1( ClassDef:1( A FuncDef:2( __init__ Args( Var(self)) Block:2( AssignmentStmt:3( MemberExpr:3( NameExpr(self [l]) x*) IntExpr(1)) AssignmentStmt:4( MemberExpr:4( NameExpr(self [l]) y*) IntExpr(2)))))) [case testMemberAssignmentViaSelfOutsideInit] class A: def f(self): self.x = 1 def __init__(self): self.y = 1 [out] MypyFile:1( ClassDef:1( A FuncDef:2( f Args( Var(self)) Block:2( AssignmentStmt:3( MemberExpr:3( NameExpr(self [l]) x*) IntExpr(1))))) FuncDef:4( __init__ Args( Var(self)) Block:4( AssignmentStmt:5( MemberExpr:5( NameExpr(self [l]) y) IntExpr(1))))) [case testMemberAssignmentNotViaSelf] class A: def __init__(x, self): self.y = 1 # not really self class B: def __init__(x): self = x self.z = 1 [out] MypyFile:1( ClassDef:1( A FuncDef:2( __init__ Args( Var(x) Var(self)) Block:2( AssignmentStmt:3( MemberExpr:3( NameExpr(self [l]) y) IntExpr(1))))) ClassDef:4( B FuncDef:5( __init__ Args( Var(x)) Block:5( AssignmentStmt:6( NameExpr(self* [l]) NameExpr(x [l])) AssignmentStmt:7( MemberExpr:7( NameExpr(self [l]) z) IntExpr(1)))))) [case testNonStandardNameForSelfAndInit] class A: def __init__(x): x.y = 1 [out] MypyFile:1( ClassDef:1( A FuncDef:2( __init__ Args( Var(x)) Block:2( AssignmentStmt:3( MemberExpr:3( NameExpr(x [l]) y*) IntExpr(1)))))) [case testAssignmentAfterAttributeInit] class A: def __init__(self): self.x = 1 self.x = 2 [out] MypyFile:1( ClassDef:1( A FuncDef:2( __init__ Args( Var(self)) Block:2( AssignmentStmt:3( MemberExpr:3( NameExpr(self [l]) x*) IntExpr(1)) AssignmentStmt:4( MemberExpr:4( NameExpr(self [l]) x) IntExpr(2)))))) [case testOverloadedMethod] from typing import overload class A: @overload def f(self) -> None: self @overload def f(self, x: 'A') -> None: self def f(self, *args): self [out] MypyFile:1( ImportFrom:1(typing, [overload]) ClassDef:2( A OverloadedFuncDef:3( FuncDef:7( f Args( Var(self)) VarArg( Var(args)) Block:7( ExpressionStmt:7( NameExpr(self [l])))) Overload(def (self: __main__.A), \ def (self: __main__.A, x: __main__.A)) Decorator:3( Var(f) NameExpr(overload [typing.overload]) FuncDef:4( f Args( Var(self)) def (self: __main__.A) Block:4( ExpressionStmt:4( NameExpr(self [l]))))) Decorator:5( Var(f) NameExpr(overload [typing.overload]) FuncDef:6( f Args( Var(self) Var(x)) def (self: __main__.A, x: __main__.A) Block:6( ExpressionStmt:6( NameExpr(self [l])))))))) [case testAttributeWithoutType] class A: a = object [out] MypyFile:1( ClassDef:1( A AssignmentStmt:2( NameExpr(a* [m]) NameExpr(object [builtins.object])))) [case testDataAttributeRefInClassBody] class A: x = 1 y = x [out] MypyFile:1( ClassDef:1( A AssignmentStmt:2( NameExpr(x* [m]) IntExpr(1)) AssignmentStmt:3( NameExpr(y* [m]) NameExpr(x [__main__.A.x])))) [case testMethodRefInClassBody] class A: def f(self): pass g = f [out] MypyFile:1( ClassDef:1( A FuncDef:2( f Args( Var(self)) Block:2( PassStmt:2())) AssignmentStmt:3( NameExpr(g* [m]) NameExpr(f [__main__.A.f])))) [case testIfStatementInClassBody] class A: if A: x = 1 else: x = 2 [out] MypyFile:1( ClassDef:1( A IfStmt:2( If( NameExpr(A [__main__.A])) Then( AssignmentStmt:3( NameExpr(x* [m]) IntExpr(1))) Else( AssignmentStmt:5( NameExpr(x [__main__.A.x]) IntExpr(2)))))) [case testForStatementInClassBody] class A: for x in [1, 2]: y = x [out] MypyFile:1( ClassDef:1( A ForStmt:2( NameExpr(x* [m]) ListExpr:2( IntExpr(1) IntExpr(2)) Block:2( AssignmentStmt:3( NameExpr(y* [m]) NameExpr(x [__main__.A.x])))))) [case testReferenceToClassWithinFunction] def f(): class A: pass A [out] MypyFile:1( FuncDef:1( f Block:1( ClassDef:2( A PassStmt:2()) ExpressionStmt:3( NameExpr(A [__main__.A@2]))))) [case testReferenceToClassWithinClass] class A: class B: pass B [out] MypyFile:1( ClassDef:1( A ClassDef:2( B PassStmt:2()) ExpressionStmt:3( NameExpr(B [__main__.A.B])))) [case testClassWithBaseClassWithinClass] class A: class B: pass class C(B): pass [out] MypyFile:1( ClassDef:1( A ClassDef:2( B PassStmt:2()) ClassDef:3( C BaseType( __main__.A.B) PassStmt:3()))) [case testDeclarationReferenceToNestedClass] def f() -> None: class A: pass x = None # type: A [out] MypyFile:1( FuncDef:1( f def () Block:1( ClassDef:2( A PassStmt:2()) AssignmentStmt:3( NameExpr(x [l]) NameExpr(None [builtins.None]) __main__.A@2)))) [case testAccessToLocalInOuterScopeWithinNestedClass] def f(x): class A: y = x def g(self): z = x [out] MypyFile:1( FuncDef:1( f Args( Var(x)) Block:1( ClassDef:2( A AssignmentStmt:3( NameExpr(y* [m]) NameExpr(x [l])) FuncDef:4( g Args( Var(self)) Block:4( AssignmentStmt:5( NameExpr(z* [l]) NameExpr(x [l])))))))) [case testQualifiedMetaclass] import abc class A(metaclass=abc.ABCMeta): pass [out] MypyFile:1( Import:1(abc) ClassDef:2( A Metaclass(MemberExpr:2( NameExpr(abc) ABCMeta [abc.ABCMeta])) PassStmt:2())) [case testStaticMethod] class A: @staticmethod def f(z: int) -> str: pass [builtins fixtures/staticmethod.pyi] [out] MypyFile:1( ClassDef:1( A Decorator:2( Var(f) FuncDef:3( f Args( Var(z)) def (z: builtins.int) -> builtins.str Static Block:3( PassStmt:3()))))) [case testStaticMethodWithNoArgs] class A: @staticmethod def f() -> str: pass [builtins fixtures/staticmethod.pyi] [out] MypyFile:1( ClassDef:1( A Decorator:2( Var(f) FuncDef:3( f def () -> builtins.str Static Block:3( PassStmt:3()))))) [case testClassMethod] class A: @classmethod def f(cls, z: int) -> str: pass [builtins fixtures/classmethod.pyi] [out] MypyFile:1( ClassDef:1( A Decorator:2( Var(f) FuncDef:3( f Args( Var(cls) Var(z)) def (cls: def () -> __main__.A, z: builtins.int) -> builtins.str Class Block:3( PassStmt:3()))))) [case testClassMethodWithNoArgs] class A: @classmethod def f(cls) -> str: pass [builtins fixtures/classmethod.pyi] [out] MypyFile:1( ClassDef:1( A Decorator:2( Var(f) FuncDef:3( f Args( Var(cls)) def (cls: def () -> __main__.A) -> builtins.str Class Block:3( PassStmt:3()))))) [case testProperty] import typing class A: @property def f(self) -> str: pass [builtins fixtures/property.pyi] [out] MypyFile:1( Import:1(typing) ClassDef:2( A Decorator:3( Var(f) FuncDef:4( f Args( Var(self)) def (self: __main__.A) -> builtins.str Property Block:4( PassStmt:4()))))) [case testClassDecorator] import typing @object class A: pass [out] MypyFile:1( Import:1(typing) ClassDef:2( A Decorators( NameExpr(object [builtins.object])) PassStmt:3())) [case testClassAttributeAsMethodDefaultArgumentValue] import typing class A: X = 1 def f(self, x : int = X) -> None: pass [out] MypyFile:1( Import:1(typing) ClassDef:2( A AssignmentStmt:3( NameExpr(X* [m]) IntExpr(1)) FuncDef:4( f Args( Var(self) default( Var(x) NameExpr(X [__main__.A.X]))) def (self: __main__.A, x: builtins.int =) Block:4( PassStmt:4())))) [case testInvalidBaseClass] from typing import Any, Callable class A(None): pass class B(Any): pass class C(Callable[[], int]): pass [out] main: error: Invalid base class main:4: error: Invalid base class [case testTupleAsBaseClass] import m [file m.pyi] from typing import Tuple class A(Tuple[int, str]): pass [builtins fixtures/tuple.pyi] [out] MypyFile:1( Import:1(m)) MypyFile:1( tmp/m.pyi ImportFrom:1(typing, [Tuple]) ClassDef:2( A TupleType( Tuple[builtins.int, builtins.str]) BaseType( builtins.tuple[Any]) PassStmt:2())) [case testBaseClassFromIgnoredModule] import m # type: ignore class B(m.A): pass [out] MypyFile:1( Import:1(m) ClassDef:2( B FallbackToAny BaseType( builtins.object) PassStmt:3()) IgnoredLines(1)) [case testBaseClassFromIgnoredModuleUsingImportFrom] from m import A # type: ignore class B(A, int): pass [out] MypyFile:1( ImportFrom:1(m, [A]) ClassDef:2( B FallbackToAny BaseType( builtins.int) PassStmt:3()) IgnoredLines(1)) [case testBaseClassWithExplicitAnyType] from typing import Any A = 1 # type: Any class B(A): pass [out] MypyFile:1( ImportFrom:1(typing, [Any]) AssignmentStmt:2( NameExpr(A [__main__.A]) IntExpr(1) Any) ClassDef:3( B FallbackToAny BaseType( builtins.object) PassStmt:4())) mypy-0.560/test-data/unit/semanal-classvar.test0000644€tŠÔÚ€2›s®0000001323313215007206025674 0ustar jukkaDROPBOX\Domain Users00000000000000[case testClassVarDef] from typing import ClassVar class A: x = 1 # type: ClassVar[int] [out] MypyFile:1( ImportFrom:1(typing, [ClassVar]) ClassDef:2( A AssignmentStmt:3( NameExpr(x [m]) IntExpr(1) builtins.int))) [case testClassVarDefInModuleScope] from typing import ClassVar x = None # type: ClassVar[int] [out] main:2: error: ClassVar can only be used for assignments in class body [case testClassVarDefInFuncScope] from typing import ClassVar def f() -> None: x = None # type: ClassVar[int] [out] main:3: error: ClassVar can only be used for assignments in class body [case testClassVarDefInMethod] from typing import ClassVar class A: def f(self) -> None: x = None # type: ClassVar [out] main:4: error: ClassVar can only be used for assignments in class body [case testClassVarTooManyArguments] from typing import ClassVar class A: x = 1 # type: ClassVar[int, str] [out] main:3: error: ClassVar[...] must have at most one type argument [case testClassVarWithoutArguments] from typing import ClassVar class A: x = 1 # type: ClassVar [out] MypyFile:1( ImportFrom:1(typing, [ClassVar]) ClassDef:2( A AssignmentStmt:3( NameExpr(x [m]) IntExpr(1) Any))) [case testClassVarWithTypeVar] from typing import ClassVar, TypeVar T = TypeVar('T') class A: x = None # type: ClassVar[T] [out] main:4: error: Invalid type "__main__.T" [case testClassVarInFunctionArgs] from typing import ClassVar def f(x: str, y: ClassVar) -> None: pass [out] main:2: error: ClassVar can only be used for assignments in class body [case testClassVarInMethodArgs] from typing import ClassVar class A: def f(x: str, y: ClassVar) -> None: pass [out] main:3: error: ClassVar can only be used for assignments in class body [case testClassVarFunctionRetType] from typing import ClassVar def f() -> ClassVar: pass [out] main:2: error: ClassVar can only be used for assignments in class body [case testClassVarMethodRetType] from typing import ClassVar class A: def f(self) -> ClassVar: pass [out] main:3: error: ClassVar can only be used for assignments in class body [case testMultipleClassVarInFunctionSig] from typing import ClassVar def f(x: ClassVar, y: ClassVar) -> ClassVar: pass [out] main:2: error: ClassVar can only be used for assignments in class body [case testClassVarInCallableArgs] from typing import Callable, ClassVar, Any f = None # type: Callable[[int, ClassVar], Any] [out] main:2: error: Invalid type: ClassVar nested inside other type [case testClassVarInCallableRet] from typing import Callable, ClassVar f = None # type: Callable[..., ClassVar] [out] main:2: error: Invalid type: ClassVar nested inside other type [case testClassVarInUnion] from typing import ClassVar, Union x = None # type: Union[ClassVar, str] [out] main:2: error: Invalid type: ClassVar nested inside other type [case testClassVarInUnionAsAttribute] from typing import ClassVar, Union class A: x = None # type: Union[ClassVar, str] [out] main:3: error: Invalid type: ClassVar nested inside other type [case testListWithClassVars] from typing import ClassVar, List x = [] # type: List[ClassVar] [builtins fixtures/list.pyi] [out] main:2: error: Invalid type: ClassVar nested inside other type [case testTupleClassVar] from typing import ClassVar, Tuple x = None # type: Tuple[ClassVar, int] [out] main:2: error: Invalid type: ClassVar nested inside other type [case testMultipleLvaluesWithList] from typing import ClassVar, List class A: [x, y] = None, None # type: List[ClassVar] [builtins fixtures/list.pyi] [out] main:3: error: Invalid type: ClassVar nested inside other type [case testDeeplyNested] from typing import Callable, ClassVar, Union class A: pass class B: x = None # type: Union[str, Callable[[A, ClassVar], int]] [out] main:4: error: Invalid type: ClassVar nested inside other type [case testClassVarInClassVar] from typing import ClassVar class A: x = None # type: ClassVar[ClassVar[int]] [out] main:3: error: Invalid type: ClassVar nested inside other type [case testInsideGeneric] from typing import ClassVar, Generic, TypeVar T = TypeVar('T') class A(Generic[T]): pass class B: x = None # type: A[ClassVar] [out] main:5: error: Invalid type: ClassVar nested inside other type [case testDefineOnSelf] from typing import ClassVar class A: def __init__(self) -> None: self.x = None # type: ClassVar [out] main:4: error: ClassVar can only be used for assignments in class body [case testForIndex] from typing import ClassVar for i in []: # type: ClassVar pass [out] main:2: error: ClassVar can only be used for assignments in class body [case testForIndexInClassBody] from typing import ClassVar class A: for i in []: # type: ClassVar pass [out] main:3: error: ClassVar can only be used for assignments in class body [case testWithStmt] from typing import ClassVar class A: pass with A() as x: # type: ClassVar pass [out] main:3: error: ClassVar can only be used for assignments in class body [case testWithStmtInClassBody] from typing import ClassVar class A: pass class B: with A() as x: # type: ClassVar pass [out] main:4: error: ClassVar can only be used for assignments in class body [case testClassVarWithGeneric] from typing import ClassVar, Generic, TypeVar T = TypeVar('T') class A(Generic[T]): x = None # type: ClassVar[T] [out] main:4: error: Invalid type: ClassVar cannot be generic [case testClassVarWithNestedGeneric] from typing import ClassVar, Generic, List, TypeVar, Union T = TypeVar('T') U = TypeVar('U') class A(Generic[T, U]): x = None # type: ClassVar[Union[T, List[U]]] [builtins fixtures/list.pyi] [out] main:5: error: Invalid type: ClassVar cannot be generic mypy-0.560/test-data/unit/semanal-errors.test0000644€tŠÔÚ€2›s®0000010000713215007206025366 0ustar jukkaDROPBOX\Domain Users00000000000000[case testUndefinedVariableInGlobalStatement] import typing x y [out] main:2: error: Name 'x' is not defined main:3: error: Name 'y' is not defined [case testUndefinedVariableWithinFunctionContext] import typing def f() -> None: x y [out] main:3: error: Name 'x' is not defined main:4: error: Name 'y' is not defined [case testMethodScope] import typing class A: def f(self): pass f [out] main:4: error: Name 'f' is not defined [case testMethodScope2] import typing class A: def f(self): pass class B: def g(self) -> None: f # error g # error [out] main:6: error: Name 'f' is not defined main:7: error: Name 'g' is not defined [case testInvalidType] import typing x = None # type: X [out] main:2: error: Name 'X' is not defined [case testInvalidGenericArg] from typing import TypeVar, Generic t = TypeVar('t') class A(Generic[t]): pass x = 0 # type: A[y] [out] main:4: error: Name 'y' is not defined [case testInvalidNumberOfGenericArgsInTypeDecl] from typing import TypeVar, Generic t = TypeVar('t') class A: pass class B(Generic[t]): pass x = 0 # type: B[A, A] y = 0 # type: A[A] [out] main:5: error: "B" expects 1 type argument, but 2 given main:6: error: "A" expects no type arguments, but 1 given [case testInvalidNumberOfGenericArgsInUndefinedArg] class A: pass x = None # type: A[int] # E: "A" expects no type arguments, but 1 given [out] [case testInvalidNumberOfGenericArgsInNestedBlock] class A: pass class B: def f(self) -> None: while 1: x = None # type: A[int] \ # E: "A" expects no type arguments, but 1 given [out] [case testInvalidNumberOfGenericArgsInSignature] import typing class A: pass def f() -> A[int]: pass # E: "A" expects no type arguments, but 1 given [out] [case testInvalidNumberOfGenericArgsInOverloadedSignature] from typing import overload class A: pass @overload def f(): pass @overload def f(x: A[int]) -> None: pass # E: "A" expects no type arguments, but 1 given def f(*args): pass [out] [case testInvalidNumberOfGenericArgsInBaseType] import typing class A: pass class B(A[int]): pass # E: "A" expects no type arguments, but 1 given [out] [case testInvalidNumberOfGenericArgsInCast] from typing import cast class A: pass x = cast(A[int], 1) # E: "A" expects no type arguments, but 1 given [out] [case testInvalidNumberOfGenericArgsInNestedGenericType] from typing import TypeVar, Generic T = TypeVar('T') class A(Generic[T]): pass class B: pass def f() -> A[B[int]]: pass # E: "B" expects no type arguments, but 1 given [out] [case testInvalidNumberOfGenericArgsInTupleType] from typing import Tuple class A: pass x = None # type: Tuple[A[int]] # E: "A" expects no type arguments, but 1 given [out] [case testInvalidNumberOfGenericArgsInFunctionType] from typing import Callable class A: pass x = None # type: Callable[[A[int]], int] # E: "A" expects no type arguments, but 1 given y = None # type: Callable[[], A[int]] # E: "A" expects no type arguments, but 1 given [out] [case testVarOrFuncAsType] import typing def f(): pass x = 1 y = 0 # type: f z = 0 # type: x [out] main:4: error: Invalid type "__main__.f" main:5: error: Invalid type "__main__.x" [case testGlobalVarRedefinition] import typing class A: pass x = 0 # type: A x = 0 # type: A [out] main:4: error: Name 'x' already defined [case testLocalVarRedefinition] import typing class A: pass def f() -> None: x = 0 # type: A x = 0 # type: A [out] main:5: error: Name 'x' already defined [case testClassVarRedefinition] import typing class A: x = 0 # type: object x = 0 # type: object [out] main:4: error: Name 'x' already defined [case testMultipleClassDefinitions] import typing class A: pass class A: pass [out] main:3: error: Name 'A' already defined on line 2 [case testMultipleMixedDefinitions] import typing x = 1 def x(): pass class x: pass [out] main:3: error: Name 'x' already defined on line 2 main:4: error: Name 'x' already defined on line 2 [case testNameNotImported] import typing from m import y x [file m.py] x = y = 1 [out] main:3: error: Name 'x' is not defined [case testMissingNameInImportFrom] import typing from m import y [file m.py] x = 1 [out] main:2: error: Module 'm' has no attribute 'y' [case testMissingModule] import typing import m [out] main:2: error: Cannot find module named 'm' main:2: note: (Perhaps setting MYPYPATH or using the "--ignore-missing-imports" flag would help) [case testMissingModule2] import typing from m import x [out] main:2: error: Cannot find module named 'm' main:2: note: (Perhaps setting MYPYPATH or using the "--ignore-missing-imports" flag would help) [case testMissingModule3] import typing from m import * [out] main:2: error: Cannot find module named 'm' main:2: note: (Perhaps setting MYPYPATH or using the "--ignore-missing-imports" flag would help) [case testMissingModuleRelativeImport] import typing import m [file m/__init__.py] from .x import y [out] tmp/m/__init__.py:1: error: Cannot find module named 'm.x' tmp/m/__init__.py:1: note: (Perhaps setting MYPYPATH or using the "--ignore-missing-imports" flag would help) [case testMissingModuleRelativeImport2] import typing import m.a [file m/__init__.py] [file m/a.py] from .x import y [out] tmp/m/a.py:1: error: Cannot find module named 'm.x' tmp/m/a.py:1: note: (Perhaps setting MYPYPATH or using the "--ignore-missing-imports" flag would help) [case testModuleNotImported] import typing import _m _n.x [file _m.py] import _n [file _n.py] x = 1 [out] main:3: error: Name '_n' is not defined [case testImportAsteriskPlusUnderscore] import typing from _m import * _x __x__ [file _m.py] _x = __x__ = 1 [out] main:3: error: Name '_x' is not defined main:4: error: Name '__x__' is not defined [case testRelativeImportAtTopLevelModule] from . import m [out] main:1: error: No parent module -- cannot perform relative import [case testRelativeImportAtTopLevelModule2] from .. import m [out] main:1: error: No parent module -- cannot perform relative import [case testUndefinedTypeWithQualifiedName] import typing import m def f() -> m.c: pass def g() -> n.c: pass [file m.py] [out] main:3: error: Name 'm.c' is not defined main:4: error: Name 'n' is not defined [case testMissingPackage] import typing import m.n [out] main:2: error: Cannot find module named 'm' main:2: note: (Perhaps setting MYPYPATH or using the "--ignore-missing-imports" flag would help) main:2: error: Cannot find module named 'm.n' [case testMissingPackage] import typing from m.n import x from a.b import * [out] main:2: error: Cannot find module named 'm.n' main:2: note: (Perhaps setting MYPYPATH or using the "--ignore-missing-imports" flag would help) main:3: error: Cannot find module named 'a.b' [case testErrorInImportedModule] import m [file m.py] import typing x = y [out] tmp/m.py:2: error: Name 'y' is not defined [case testErrorInImportedModule2] import m.n [file m/__init__.py] [file m/n.py] import k [file k.py] import typing x = y [out] tmp/k.py:2: error: Name 'y' is not defined [case testPackageWithoutInitFile] import typing import m.n m.n.x [file m/n.py] x = 1 [out] main:2: error: Cannot find module named 'm' main:2: note: (Perhaps setting MYPYPATH or using the "--ignore-missing-imports" flag would help) main:2: error: Cannot find module named 'm.n' [case testBreakOutsideLoop] break def f(): break [out] main:1: error: 'break' outside loop main:3: error: 'break' outside loop [case testContinueOutsideLoop] continue def f(): continue [out] main:1: error: 'continue' outside loop main:3: error: 'continue' outside loop [case testReturnOutsideFunction] def f(): pass return return 1 [out] main:2: error: 'return' outside function main:3: error: 'return' outside function [case testYieldOutsideFunction] yield 1 yield [out] main:1: error: 'yield' outside function main:2: error: 'yield' outside function [case testInvalidLvalues1] 1 = 1 [out] main:1: error: can't assign to literal [case testInvalidLvalues2] (1) = 1 [out] main:1: error: can't assign to literal [case testInvalidLvalues3] (1, 1) = 1 [out] main:1: error: can't assign to literal [case testInvalidLvalues4] [1, 1] = 1 [out] main:1: error: can't assign to literal [case testInvalidLvalues5] () = 1 [out] main:1: error: can't assign to () [case testInvalidLvalues6] x = y = z = 1 # ok x, (y, 1) = 1 [out] main:2: error: can't assign to literal [case testInvalidLvalues7] x, [y, 1] = 1 [out] main:1: error: can't assign to literal [case testInvalidLvalues8] x, [y, [z, 1]] = 1 [out] main:1: error: can't assign to literal [case testInvalidLvalues9] x, (y) = 1 # ok x, (y, (z, z)) = 1 # ok x, (y, (z, 1)) = 1 [out] main:3: error: can't assign to literal [case testInvalidLvalues10] x + x = 1 [out] main:1: error: can't assign to operator [case testInvalidLvalues11] -x = 1 [out] main:1: error: can't assign to operator [case testInvalidLvalues12] 1.1 = 1 [out] main:1: error: can't assign to literal [case testInvalidLvalues13] 'x' = 1 [out] main:1: error: can't assign to literal [case testInvalidLvalues14] x() = 1 [out] main:1: error: can't assign to function call [case testTwoStarExpressions] a, *b, *c = 1 *a, (*b, c) = 1 a, (*b, *c) = 1 [*a, *b] = 1 [out] main:1: error: Two starred expressions in assignment main:3: error: Two starred expressions in assignment main:4: error: Two starred expressions in assignment [case testTwoStarExpressionsInForStmt] z = 1 for a, *b, *c in z: pass for *a, (*b, c) in z: pass for a, (*b, *c) in z: pass for [*a, *b] in z: pass [out] main:2: error: Two starred expressions in assignment main:6: error: Two starred expressions in assignment main:8: error: Two starred expressions in assignment [case testTwoStarExpressionsInGeneratorExpr] (a for a, *b, *c in []) (a for *a, (*b, c) in []) (a for a, (*b, *c) in []) [out] main:1: error: Name 'a' is not defined main:1: error: Two starred expressions in assignment main:3: error: Two starred expressions in assignment [case testStarExpressionRhs] b = 1 c = 1 d = 1 a = *b [out] main:4: error: Can use starred expression only as assignment target [case testStarExpressionInExp] a = 1 *a + 1 [out] main:2: error: Can use starred expression only as assignment target [case testInvalidDel1] x = 1 del x(1) # E: can't delete function call [out] [case testInvalidDel2] x = 1 del x + 1 # E: can't delete operator [out] [case testInvalidDel3] del z # E: Name 'z' is not defined [out] [case testFunctionTvarScope] from typing import TypeVar t = TypeVar('t') def f(x: t) -> t: pass x = 0 # type: t [out] main:4: error: Invalid type "__main__.t" [case testClassTvarScope] from typing import Generic, TypeVar t = TypeVar('t') class c(Generic[t]): pass x = 0 # type: t [out] main:4: error: Invalid type "__main__.t" [case testExpressionRefersToTypeVariable] from typing import TypeVar, Generic t = TypeVar('t') class c(Generic[t]): def f(self) -> None: x = t def f(y: t): x = t [out] main:4: error: 't' is a type variable and only valid in type context main:5: error: 't' is a type variable and only valid in type context [case testMissingSelf] import typing class A: def f(): pass [out] main:3: error: Method must have at least one argument [case testInvalidBaseClass] import typing class A(B): pass [out] main:2: error: Name 'B' is not defined [case testSuperOutsideClass] class A: pass super().x def f() -> None: super().y [out] main:2: error: "super" used outside class main:3: error: "super" used outside class [case testMissingSelfInMethod] import typing class A: def f() -> None: pass def g(): pass [out] main:3: error: Method must have at least one argument main:4: error: Method must have at least one argument [case testMultipleMethodDefinition] import typing class A: def f(self) -> None: pass def g(self) -> None: pass def f(self, x: object) -> None: pass [out] main:5: error: Name 'f' already defined [case testInvalidGlobalDecl] import typing def f() -> None: global x x = None [out] main:4: error: Name 'x' is not defined [case testInvalidNonlocalDecl] import typing def f(): def g() -> None: nonlocal x x = None [out] main:4: error: No binding for nonlocal 'x' found main:5: error: Name 'x' is not defined [case testNonlocalDeclNotMatchingGlobal] import typing x = None def f() -> None: nonlocal x x = None [out] main:4: error: No binding for nonlocal 'x' found main:5: error: Name 'x' is not defined [case testNonlocalDeclConflictingWithParameter] import typing def g(): x = None def f(x) -> None: nonlocal x x = None [out] main:5: error: Name 'x' is already defined in local scope before nonlocal declaration [case testNonlocalDeclOutsideFunction] x = 2 nonlocal x [out] main:2: error: nonlocal declaration not allowed at module level [case testGlobalAndNonlocalDecl] import typing x = 1 def f(): x = 1 def g() -> None: global x nonlocal x x = None [out] main:7: error: Name 'x' is nonlocal and global [case testNonlocalAndGlobalDecl] import typing x = 1 def f(): x = 1 def g() -> None: nonlocal x global x x = None [out] main:7: error: Name 'x' is nonlocal and global [case testNestedFunctionAndScoping] import typing def f(x) -> None: def g(y): z = x z y x [out] main:5: error: Name 'z' is not defined main:6: error: Name 'y' is not defined [case testMultipleNestedFunctionDef] import typing def f(x) -> None: def g(): pass x = 1 def g(): pass [out] main:5: error: Name 'g' already defined [case testRedefinedOverloadedFunction] from typing import overload, Any def f() -> None: @overload def p(o: object) -> None: pass # no error @overload def p(o: Any) -> None: pass # no error x = 1 def p(): pass # fail [out] main:3: error: An overloaded function outside a stub file must have an implementation main:8: error: Name 'p' already defined [case testNestedFunctionInMethod] import typing class A: def f(self) -> None: def g() -> None: x y [out] main:5: error: Name 'x' is not defined main:6: error: Name 'y' is not defined [case testImportScope] import typing def f() -> None: import x x.y # E: Name 'x' is not defined [file x.py] y = 1 [out] [case testImportScope2] import typing def f() -> None: from x import y y y # E: Name 'y' is not defined [file x.py] y = 1 [out] [case testImportScope3] import typing def f() -> None: from x import * y y # E: Name 'y' is not defined [file x.py] y = 1 [out] [case testImportScope4] import typing class A: from x import * y y # E: Name 'y' is not defined [file x.py] y = 1 [out] [case testScopeOfNestedClass] import typing def f(): class A: pass A A # E: Name 'A' is not defined [out] [case testScopeOfNestedClass2] import typing class A: class B: pass B # E: Name 'B' is not defined [out] [case testScopeOfNestedClass3] import typing class A: def f(self): class B: pass B # E: Name 'B' is not defined B # E: Name 'B' is not defined [out] [case testInvalidNestedClassReferenceInDecl] import typing class A: pass foo = 0 # type: A.x # E: Name 'A.x' is not defined [out] [case testTvarScopingWithNestedClass] from typing import TypeVar, Generic t = TypeVar('t') s = TypeVar('s') class A(Generic[t]): class B(Generic[s]): x = 0 # type: A[s] y = 0 # type: A[t] # E: Invalid type "__main__.t" z = 0 # type: A[s] # E: Invalid type "__main__.s" a = 0 # type: A[t] [out] [case testTestExtendPrimitives] class C(bool): pass # E: 'bool' is not a valid base class class A(int): pass # ok class B(float): pass # ok class D(str): pass # ok [builtins fixtures/primitives.pyi] [out] [case testCyclicInheritance] class A(A): pass # E: Cycle in inheritance hierarchy [out] [case testAssignToTypeDef] import typing class A: pass A = None # E: Cannot assign to a type [out] [case testInvalidCastTargetSyntax] from typing import cast, TypeVar, Generic t = TypeVar('t') class C(Generic[t]): pass cast(str + str, None) # E: Cast target is not a type cast(C[str][str], None) # E: Cast target is not a type cast(C[str + str], None) # E: Cast target is not a type cast([int, str], None) # E: Invalid type [out] [case testInvalidCastTargetType] from typing import cast x = 0 cast(x, None) # E: Invalid type "__main__.x" cast(t, None) # E: Name 't' is not defined cast(__builtins__.x, None) # E: Name '__builtins__.x' is not defined [out] [case testInvalidCastTargetType2] from typing import cast x = 0 cast(str[str], None) # E: "str" expects no type arguments, but 1 given [out] [case testInvalidNumberOfArgsToCast] from typing import cast cast(str) # E: 'cast' expects 2 arguments cast(str, None, None) # E: 'cast' expects 2 arguments [out] [case testInvalidKindsOfArgsToCast] from typing import cast cast(str, *None) # E: 'cast' must be called with 2 positional arguments cast(str, target=None) # E: 'cast' must be called with 2 positional arguments [out] [case testInvalidAnyCall] from typing import Any Any(str, None) # E: Any(...) is no longer supported. Use cast(Any, ...) instead Any(arg=str) # E: Any(...) is no longer supported. Use cast(Any, ...) instead [out] [case testTypeListAsType] def f(x:[int, str]) -> None: # E: Invalid type pass [out] [case testInvalidFunctionType] from typing import Callable x = None # type: Callable[int, str] y = None # type: Callable[int] z = None # type: Callable[int, int, int] [out] main:2: error: The first argument to Callable must be a list of types or "..." main:3: error: Please use "Callable[[], ]" or "Callable" main:4: error: Please use "Callable[[], ]" or "Callable" [case testAbstractGlobalFunction] import typing from abc import abstractmethod @abstractmethod def foo(): pass [out] main:3: error: 'abstractmethod' used with a non-method [case testAbstractNestedFunction] import typing from abc import abstractmethod def g() -> None: @abstractmethod def foo(): pass [out] main:4: error: 'abstractmethod' used with a non-method [case testInvalidTypeDeclaration] import typing def f(): pass f() = 1 # type: int [out] main:3: error: can't assign to function call [case testIndexedAssignmentWithTypeDeclaration] import typing None[1] = 1 # type: int [out] main:2: error: Unexpected type declaration [case testNonSelfMemberAssignmentWithTypeDeclaration] import typing None.x = 1 # type: int [out] main:2: error: Type cannot be declared in assignment to non-self attribute [case testNonSelfMemberAssignmentWithTypeDeclarationInMethod] import typing class A: def f(self, x) -> None: x.y = 1 # type: int [out] main:4: error: Type cannot be declared in assignment to non-self attribute [case testInvalidTypeInTypeApplication] from typing import TypeVar, Generic t = TypeVar('t') class A(Generic[t]): pass A[TypeVar] # E: Invalid type "typing.TypeVar" [out] [case testInvalidTypeInTypeApplication2] from typing import TypeVar, Generic t = TypeVar('t') class A(Generic[t]): pass A[1] # E: Type expected within [...] [out] [case testVariableDeclWithInvalidNumberOfTypes] x, y = 1, 2 # type: int, str, int # E: Incompatible number of tuple items [out] [case testVariableDeclWithInvalidNumberOfTypesNested] x, (y, z) = 1, (2, 3) # type: int, (str, int, int) # E: Incompatible number of tuple items [out] [case testVariableDeclWithInvalidNumberOfTypesNested2] x, (y, z) = 1, (2, 3) # type: int, (str, ) # E: Incompatible number of tuple items [out] [case testVariableDeclWithInvalidNumberOfTypesNested3] x, (y, z) = 1, (2, 3) # type: int, str # E: Tuple type expected for multiple variables [out] [case testVariableDeclWithInvalidNumberOfTypesNested4] x, (y, z) = 1, (2, 3) # type: int, str, int # E: Incompatible number of tuple items [out] [case testVariableDeclWithInvalidNumberOfTypesNested5] x, (y, ) = 1, (2, ) # type: int, str # E: Tuple type expected for multiple variables [out] [case testVariableDeclWithInvalidType] x, y = 1, 2 # type: int # E: Tuple type expected for multiple variables [out] [case testInvalidLvalueWithExplicitType] a = 1 a() = None # type: int # E: can't assign to function call [out] [case testInvalidLvalueWithExplicitType2] a = 1 a[1] = None # type: int # E: Unexpected type declaration a.x = None # type: int \ # E: Type cannot be declared in assignment to non-self attribute [out] [case testInvalidLvalueWithExplicitType3] a = 1 a.y, a.x = None, None # type: int, int \ # E: Type cannot be declared in assignment to non-self attribute a[1], a[2] = None, None # type: int, int \ # E: Unexpected type declaration [out] [case testMissingGenericImport] from typing import TypeVar T = TypeVar('T') class A(Generic[T]): pass [out] main:3: error: Name 'Generic' is not defined [case testInvalidTypeWithinGeneric] from typing import Generic class A(Generic[int]): pass # E: Free type variable expected in Generic[...] [out] [case testInvalidTypeWithinNestedGenericClass] from typing import Generic, TypeVar T = TypeVar('T') class A(Generic[T]): class B(Generic[T]): pass \ # E: Free type variable expected in Generic[...] [out] [case testIncludingGenericTwiceInBaseClassList] from typing import Generic, TypeVar T = TypeVar('T') S = TypeVar('S') class A(Generic[T], Generic[S]): pass \ # E: Only single Generic[...] or Protocol[...] can be in bases [out] [case testInvalidMetaclass] class A(metaclass=x): pass # E: Name 'x' is not defined [out] [case testInvalidQualifiedMetaclass] import abc class A(metaclass=abc.Foo): pass # E: Name 'abc.Foo' is not defined [out] [case testNonClassMetaclass] def f(): pass class A(metaclass=f): pass # E: Invalid metaclass 'f' [out] [case testInvalidTypevarArguments] from typing import TypeVar a = TypeVar() # E: Too few arguments for TypeVar() b = TypeVar(x='b') # E: TypeVar() expects a string literal as first argument c = TypeVar(1) # E: TypeVar() expects a string literal as first argument d = TypeVar('D') # E: String argument 1 'D' to TypeVar(...) does not match variable name 'd' e = TypeVar('e', int, str, x=1) # E: Unexpected argument to TypeVar(): x f = TypeVar('f', (int, str), int) # E: Type expected g = TypeVar('g', int) # E: TypeVar cannot have only a single constraint h = TypeVar('h', x=(int, str)) # E: Unexpected argument to TypeVar(): x i = TypeVar('i', bound=1) # E: TypeVar 'bound' must be a type [out] [case testMoreInvalidTypevarArguments] from typing import TypeVar T = TypeVar('T', int, str, bound=bool) # E: TypeVar cannot have both values and an upper bound S = TypeVar('S', covariant=True, contravariant=True) \ # E: TypeVar cannot be both covariant and contravariant [builtins fixtures/bool.pyi] [case testInvalidTypevarValues] from typing import TypeVar b = TypeVar('b', *[int]) # E: Unexpected argument to TypeVar() c = TypeVar('c', int, 2) # E: Type expected [out] [case testObsoleteTypevarValuesSyntax] from typing import TypeVar a = TypeVar('a', values=(int, str)) [out] main:2: error: TypeVar 'values' argument not supported main:2: error: Use TypeVar('T', t, ...) instead of TypeVar('T', values=(t, ...)) [case testLocalTypevarScope] from typing import TypeVar def f() -> None: T = TypeVar('T') def g(x: T) -> None: pass # E: Name 'T' is not defined [out] [case testClassTypevarScope] from typing import TypeVar class A: T = TypeVar('T') def g(x: T) -> None: pass # E: Name 'T' is not defined [out] [case testRedefineVariableAsTypevar] from typing import TypeVar x = 0 x = TypeVar('x') # E: Cannot redefine 'x' as a type variable [out] [case testTypevarWithType] from typing import TypeVar x = TypeVar('x') # type: int # E: Cannot declare the type of a type variable [out] [case testRedefineTypevar] from typing import TypeVar t = TypeVar('t') t = 1 # E: Invalid assignment target [out] [case testRedefineTypevar2] from typing import TypeVar t = TypeVar('t') def t(): pass # E: Name 't' already defined on line 2 [out] [case testRedefineTypevar3] from typing import TypeVar t = TypeVar('t') class t: pass # E: Name 't' already defined on line 2 [out] [case testRedefineTypevar4] from typing import TypeVar t = TypeVar('t') from typing import Generic as t # E: Name 't' already defined [out] [case testInvalidStrLiteralType] def f(x: 'foo'): pass # E: Name 'foo' is not defined [out] [case testInvalidStrLiteralType2] def f(x: 'int['): pass # E: syntax error in type comment [out] [case testInconsistentOverload] from typing import overload def dec(x): pass @dec # E: The implementation for an overloaded function must come last def f(): pass @overload def f(): pass [out] [case testInconsistentOverload2] from typing import overload def dec(x): pass @dec # E: The implementation for an overloaded function must come last def f(): pass @overload def f(): pass [out] [case testMissingOverloadDecorator] from typing import overload def dec(x): pass @dec def f(): pass @dec # E: Name 'f' already defined def f(): pass [out] [case testIncompatibleSignatureInComment] import typing def f(): # type: (int) -> int pass def g(x): # type: () -> int pass [out] main:2: error: Type signature has too many arguments main:4: error: Type signature has too few arguments [case testStaticmethodAndNonMethod] import typing @staticmethod def f(): pass class A: def g(self) -> None: @staticmethod def h(): pass [builtins fixtures/staticmethod.pyi] [out] main:2: error: 'staticmethod' used with a non-method main:6: error: 'staticmethod' used with a non-method [case testClassmethodAndNonMethod] import typing @classmethod def f(): pass class A: def g(self) -> None: @classmethod def h(): pass [builtins fixtures/classmethod.pyi] [out] main:2: error: 'classmethod' used with a non-method main:6: error: 'classmethod' used with a non-method [case testNonMethodProperty] import typing @property # E: 'property' used with a non-method def f() -> int: pass [builtins fixtures/property.pyi] [out] [case testInvalidArgCountForProperty] import typing class A: @property def f(self, x) -> int: pass # E: Too many arguments @property def g() -> int: pass # E: Method must have at least one argument [builtins fixtures/property.pyi] [out] [case testOverloadedProperty] from typing import overload class A: @overload # E: Decorated property not supported @property def f(self) -> int: pass @property # E: Decorated property not supported @overload def f(self) -> int: pass [builtins fixtures/property.pyi] [out] [case testOverloadedProperty2] from typing import overload class A: @overload # E: An overloaded function outside a stub file must have an implementation def f(self) -> int: pass @property # E: Decorated property not supported @overload def f(self) -> int: pass [builtins fixtures/property.pyi] [out] [case testDecoratedProperty] import typing def dec(f): pass class A: @dec # E: Decorated property not supported @property def f(self) -> int: pass @property # E: Decorated property not supported @dec def g(self) -> int: pass [builtins fixtures/property.pyi] [out] [case testImportTwoModulesWithSameNameInFunction] import typing def f() -> None: import x import y as x # E: Name 'x' already defined x.y [file x.py] y = 1 [file y.py] [out] [case testImportTwoModulesWithSameNameInGlobalContext] import typing import x import y as x # E: Name 'x' already defined x.y [file x.py] y = 1 [file y.py] [out] [case testListTypeAliasWithoutImport] import typing def f() -> List[int]: pass [builtins fixtures/list.pyi] [out] main:2: error: Name 'List' is not defined [case testImportObsoleteTypingFunction] from typing import Function # E: Module 'typing' has no attribute 'Function' (it's now called 'typing.Callable') from _m import Function # E: Module '_m' has no attribute 'Function' [file _m.py] [out] [case testTypeRefresToObsoleteTypingFunction] import typing import _m def f(x: typing.Function[[], None]) -> None: pass def g(x: _m.Function[[], None]) -> None: pass [file _m.py] [out] main:3: error: Name 'typing.Function' is not defined (it's now called 'typing.Callable') --' main:4: error: Name '_m.Function' is not defined [case testUnqualifiedNameRefersToObsoleteTypingFunction] x = None # type: Function[[], None] [out] main:1: error: Name 'Function' is not defined main:1: note: (Did you mean 'typing.Callable'?) [case testInvalidWithTarget] def f(): pass with f() as 1: pass # E: can't assign to literal [out] [case testUseObsoleteNameForTypeVar] from typing import typevar t = typevar('t') [out] main:1: error: Module 'typing' has no attribute 'typevar' (it's now called 'typing.TypeVar') --' (this fixes syntax highlighting) [case testUseObsoleteNameForTypeVar2] t = typevar('t') [out] main:1: error: Name 'typevar' is not defined main:1: note: (Did you mean 'typing.TypeVar'?) [case testUseObsoleteNameForTypeVar3] import typing t = typing.typevar('t') [out] main:2: error: Module 'typing' has no attribute 'typevar' (it's now called 'typing.TypeVar') --' (work around syntax highlighting :-/) [case testInvalidTypeAnnotation] import typing def f() -> None: 1[2] = 1 # type: int [out] main:3: error: Unexpected type declaration [case testInvalidTypeAnnotation2] import typing def f() -> None: f() = 1 # type: int [out] main:3: error: can't assign to function call [case testInvalidReferenceToAttributeOfOuterClass] class A: class X: pass class B: y = X # E: Name 'X' is not defined [out] [case testStubPackage] from m import x from m import y # E: Module 'm' has no attribute 'y' [file m/__init__.pyi] x = 1 [out] [case testStubPackageSubModule] from m import x from m import y # E: Module 'm' has no attribute 'y' from m.m2 import y from m.m2 import z # E: Module 'm.m2' has no attribute 'z' [file m/__init__.pyi] x = 1 [file m/m2.pyi] y = 1 [out] [case testMissingStubForThirdPartyModule] import __dummy_third_party1 [out] main:1: error: No library stub file for module '__dummy_third_party1' main:1: note: (Stub files are from https://github.com/python/typeshed) [case testMissingStubForStdLibModule] import __dummy_stdlib1 [out] main:1: error: No library stub file for standard library module '__dummy_stdlib1' main:1: note: (Stub files are from https://github.com/python/typeshed) [case testMissingStubForTwoModules] import __dummy_stdlib1 import __dummy_stdlib2 [out] main:1: error: No library stub file for standard library module '__dummy_stdlib1' main:1: note: (Stub files are from https://github.com/python/typeshed) main:2: error: No library stub file for standard library module '__dummy_stdlib2' [case testListComprehensionSpecialScoping] class A: x = 1 y = 1 z = 1 [x for i in z if y] [out] main:5: error: Name 'x' is not defined main:5: error: Name 'y' is not defined [case testTypeRedeclarationNoSpuriousWarnings] from typing import Tuple a = 1 # type: int a = 's' # type: str a = ('spam', 'spam', 'eggs', 'spam') # type: Tuple[str] [out] main:3: error: Name 'a' already defined main:4: error: Name 'a' already defined [case testDuplicateDefFromImport] from m import A class A: # E: Name 'A' already defined (possibly by an import) pass [file m.py] class A: pass [out] [case testDuplicateDefDec] from typing import Any def dec(x: Any) -> Any: return x @dec def f() -> None: pass @dec # E: Name 'f' already defined def f() -> None: pass [out] [case testDuplicateDefOverload] from typing import overload, Any if 1: @overload def f(x: int) -> None: pass @overload def f(x: str) -> None: pass def f(x: Any) -> None: pass else: def f(x: str) -> None: # E: Name 'f' already defined on line 3 pass [out] [case testDuplicateDefNT] from typing import NamedTuple N = NamedTuple('N', [('a', int), ('b', str)]) class N: # E: Name 'N' already defined on line 2 pass [out] [case testDuplicateDefTypedDict] from mypy_extensions import TypedDict Point = TypedDict('Point', {'x': int, 'y': int}) class Point: # E: Name 'Point' already defined on line 2 pass [builtins fixtures/dict.pyi] [out] [case testTypeVarClassDup] from typing import TypeVar T = TypeVar('T') class T: ... # E: Name 'T' already defined on line 2 [out] [case testAliasDup] from typing import List A = List[int] class A: ... # E: Name 'A' already defined on line 2 [builtins fixtures/list.pyi] [out] [case testNoInvalidTypeInDynamicFunctions] from typing import Dict, TypeVar T = TypeVar('T') def f(): # Note no annotation x: Dict[str, T] = {} y: T z: x def nested(): pass t: nested def g() -> None: x: Dict[str, T] = {} # E: Invalid type "__main__.T" [builtins fixtures/dict.pyi] [out] mypy-0.560/test-data/unit/semanal-expressions.test0000644€tŠÔÚ€2›s®0000001656513215007206026453 0ustar jukkaDROPBOX\Domain Users00000000000000[case testLiterals] (1, 'x', 1.1, 1.1j) [out] MypyFile:1( ExpressionStmt:1( TupleExpr:1( IntExpr(1) StrExpr(x) FloatExpr(1.1) ComplexExpr(1.1j)))) [case testMemberExpr] x = 1 x.y [out] MypyFile:1( AssignmentStmt:1( NameExpr(x* [__main__.x]) IntExpr(1)) ExpressionStmt:2( MemberExpr:2( NameExpr(x [__main__.x]) y))) [case testIndexExpr] x = y = 1 x[y] [out] MypyFile:1( AssignmentStmt:1( Lvalues( NameExpr(x* [__main__.x]) NameExpr(y* [__main__.y])) IntExpr(1)) ExpressionStmt:2( IndexExpr:2( NameExpr(x [__main__.x]) NameExpr(y [__main__.y])))) [case testBinaryOperations] x = y = 1 x + y x | y x is not y x == y [out] MypyFile:1( AssignmentStmt:1( Lvalues( NameExpr(x* [__main__.x]) NameExpr(y* [__main__.y])) IntExpr(1)) ExpressionStmt:2( OpExpr:2( + NameExpr(x [__main__.x]) NameExpr(y [__main__.y]))) ExpressionStmt:3( OpExpr:3( | NameExpr(x [__main__.x]) NameExpr(y [__main__.y]))) ExpressionStmt:4( ComparisonExpr:4( is not NameExpr(x [__main__.x]) NameExpr(y [__main__.y]))) ExpressionStmt:5( ComparisonExpr:5( == NameExpr(x [__main__.x]) NameExpr(y [__main__.y])))) [case testUnaryOperations] x = 1 -x ~x +x not x [out] MypyFile:1( AssignmentStmt:1( NameExpr(x* [__main__.x]) IntExpr(1)) ExpressionStmt:2( UnaryExpr:2( - NameExpr(x [__main__.x]))) ExpressionStmt:3( UnaryExpr:3( ~ NameExpr(x [__main__.x]))) ExpressionStmt:4( UnaryExpr:4( + NameExpr(x [__main__.x]))) ExpressionStmt:5( UnaryExpr:5( not NameExpr(x [__main__.x])))) [case testSlices] x = y = z = 1 x[y:z:x] x[:] x[:y] [out] MypyFile:1( AssignmentStmt:1( Lvalues( NameExpr(x* [__main__.x]) NameExpr(y* [__main__.y]) NameExpr(z* [__main__.z])) IntExpr(1)) ExpressionStmt:2( IndexExpr:2( NameExpr(x [__main__.x]) SliceExpr:-1( NameExpr(y [__main__.y]) NameExpr(z [__main__.z]) NameExpr(x [__main__.x])))) ExpressionStmt:3( IndexExpr:3( NameExpr(x [__main__.x]) SliceExpr:-1( ))) ExpressionStmt:4( IndexExpr:4( NameExpr(x [__main__.x]) SliceExpr:-1( NameExpr(y [__main__.y]))))) [case testTupleLiteral] x = y = 1 x, y [out] MypyFile:1( AssignmentStmt:1( Lvalues( NameExpr(x* [__main__.x]) NameExpr(y* [__main__.y])) IntExpr(1)) ExpressionStmt:2( TupleExpr:2( NameExpr(x [__main__.x]) NameExpr(y [__main__.y])))) [case testListLiteral] x = y = 1 ([], [x, y]) [out] MypyFile:1( AssignmentStmt:1( Lvalues( NameExpr(x* [__main__.x]) NameExpr(y* [__main__.y])) IntExpr(1)) ExpressionStmt:2( TupleExpr:2( ListExpr:2() ListExpr:2( NameExpr(x [__main__.x]) NameExpr(y [__main__.y]))))) [case testDictLiterals] x = y = 1 { x : y, y : x } [out] MypyFile:1( AssignmentStmt:1( Lvalues( NameExpr(x* [__main__.x]) NameExpr(y* [__main__.y])) IntExpr(1)) ExpressionStmt:2( DictExpr:2( NameExpr(x [__main__.x]) NameExpr(y [__main__.y]) NameExpr(y [__main__.y]) NameExpr(x [__main__.x])))) [case testListComprehension] a = 0 ([x + 1 for x in a]) [out] MypyFile:1( AssignmentStmt:1( NameExpr(a* [__main__.a]) IntExpr(0)) ExpressionStmt:2( ListComprehension:2( GeneratorExpr:2( OpExpr:2( + NameExpr(x [l]) IntExpr(1)) NameExpr(x* [l]) NameExpr(a [__main__.a]))))) [case testListComprehensionInFunction] def f(a) -> None: [x for x in a] [out] MypyFile:1( FuncDef:1( f Args( Var(a)) def (a: Any) Block:1( ExpressionStmt:2( ListComprehension:2( GeneratorExpr:2( NameExpr(x [l]) NameExpr(x* [l]) NameExpr(a [l]))))))) [case testListComprehensionWithCondition] a = 0 a = [x for x in a if x] [out] MypyFile:1( AssignmentStmt:1( NameExpr(a* [__main__.a]) IntExpr(0)) AssignmentStmt:2( NameExpr(a [__main__.a]) ListComprehension:2( GeneratorExpr:2( NameExpr(x [l]) NameExpr(x* [l]) NameExpr(a [__main__.a]) NameExpr(x [l]))))) [case testSetComprehension] a = 0 ({x + 1 for x in a}) [out] MypyFile:1( AssignmentStmt:1( NameExpr(a* [__main__.a]) IntExpr(0)) ExpressionStmt:2( SetComprehension:2( GeneratorExpr:2( OpExpr:2( + NameExpr(x [l]) IntExpr(1)) NameExpr(x* [l]) NameExpr(a [__main__.a]))))) [case testSetComprehensionWithCondition] a = 0 a = {x for x in a if x} [out] MypyFile:1( AssignmentStmt:1( NameExpr(a* [__main__.a]) IntExpr(0)) AssignmentStmt:2( NameExpr(a [__main__.a]) SetComprehension:2( GeneratorExpr:2( NameExpr(x [l]) NameExpr(x* [l]) NameExpr(a [__main__.a]) NameExpr(x [l]))))) [case testDictionaryComprehension] a = 0 ({x: x + 1 for x in a}) [out] MypyFile:1( AssignmentStmt:1( NameExpr(a* [__main__.a]) IntExpr(0)) ExpressionStmt:2( DictionaryComprehension:2( NameExpr(x [l]) OpExpr:2( + NameExpr(x [l]) IntExpr(1)) NameExpr(x* [l]) NameExpr(a [__main__.a])))) [case testDictionaryComprehensionWithCondition] a = 0 a = {x: x + 1 for x in a if x} [out] MypyFile:1( AssignmentStmt:1( NameExpr(a* [__main__.a]) IntExpr(0)) AssignmentStmt:2( NameExpr(a [__main__.a]) DictionaryComprehension:2( NameExpr(x [l]) OpExpr:2( + NameExpr(x [l]) IntExpr(1)) NameExpr(x* [l]) NameExpr(a [__main__.a]) NameExpr(x [l])))) [case testGeneratorExpression] a = 0 (x for x in a) [out] MypyFile:1( AssignmentStmt:1( NameExpr(a* [__main__.a]) IntExpr(0)) ExpressionStmt:2( GeneratorExpr:2( NameExpr(x [l]) NameExpr(x* [l]) NameExpr(a [__main__.a])))) [case testGeneratorExpressionNestedIndex] a = 0 (x for x, (y, z) in a) [out] MypyFile:1( AssignmentStmt:1( NameExpr(a* [__main__.a]) IntExpr(0)) ExpressionStmt:2( GeneratorExpr:2( NameExpr(x [l]) TupleExpr:2( NameExpr(x* [l]) TupleExpr:2( NameExpr(y* [l]) NameExpr(z* [l]))) NameExpr(a [__main__.a])))) [case testLambda] x = 0 lambda: x [out] MypyFile:1( AssignmentStmt:1( NameExpr(x* [__main__.x]) IntExpr(0)) ExpressionStmt:2( LambdaExpr:2( Block:2( ReturnStmt:2( NameExpr(x [__main__.x])))))) [case testLambdaWithArguments] lambda x, y: x + y [out] MypyFile:1( ExpressionStmt:1( LambdaExpr:1( Args( Var(x) Var(y)) Block:1( ReturnStmt:1( OpExpr:1( + NameExpr(x [l]) NameExpr(y [l]))))))) [case testConditionalExpression] int if None else str [out] MypyFile:1( ExpressionStmt:1( ConditionalExpr:1( Condition( NameExpr(None [builtins.None])) NameExpr(int [builtins.int]) NameExpr(str [builtins.str])))) [case testDictWithKeywordArgs] dict(a=1, b=str()) [builtins fixtures/dict.pyi] [out] MypyFile:1( ExpressionStmt:1( DictExpr:1( StrExpr(a) IntExpr(1) StrExpr(b) CallExpr:1( NameExpr(str [builtins.str]) Args())))) mypy-0.560/test-data/unit/semanal-modules.test0000644€tŠÔÚ€2›s®0000003362513215007206025535 0ustar jukkaDROPBOX\Domain Users00000000000000-- NOTE: If a module has a name starting or ending with _, it is skipped in -- output. [case testImport] import x x.y [file x.py] y = 1 [out] MypyFile:1( Import:1(x) ExpressionStmt:2( MemberExpr:2( NameExpr(x) y [x.y]))) MypyFile:1( tmp/x.py AssignmentStmt:1( NameExpr(y* [x.y]) IntExpr(1))) [case testImportedNameInType] import m x = None # type: m.c [file m.py] class c: pass [out] MypyFile:1( Import:1(m) AssignmentStmt:2( NameExpr(x [__main__.x]) NameExpr(None [builtins.None]) m.c)) MypyFile:1( tmp/m.py ClassDef:1( c PassStmt:1())) [case testImportFrom] from m import y x = y [file m.py] y = 1 [out] MypyFile:1( ImportFrom:1(m, [y]) AssignmentStmt:2( NameExpr(x* [__main__.x]) NameExpr(y [m.y]))) MypyFile:1( tmp/m.py AssignmentStmt:1( NameExpr(y* [m.y]) IntExpr(1))) [case testImportFromType] from m import c x = None # type: c [file m.py] class c: pass [out] MypyFile:1( ImportFrom:1(m, [c]) AssignmentStmt:2( NameExpr(x [__main__.x]) NameExpr(None [builtins.None]) m.c)) MypyFile:1( tmp/m.py ClassDef:1( c PassStmt:1())) [case testImportMultiple] import _m, _n _m.x, _n.y [file _m.py] x = 1 [file _n.py] y = 2 [out] MypyFile:1( Import:1(_m, _n) ExpressionStmt:2( TupleExpr:2( MemberExpr:2( NameExpr(_m) x [_m.x]) MemberExpr:2( NameExpr(_n) y [_n.y])))) [case testImportAs] import _m as n n.x [file _m.py] x = 1 [out] MypyFile:1( Import:1(_m : n) ExpressionStmt:2( MemberExpr:2( NameExpr(n [_m]) x [_m.x]))) [case testImportFromMultiple] from _m import x, y x, y [file _m.py] x = y = 1 [out] MypyFile:1( ImportFrom:1(_m, [x, y]) ExpressionStmt:2( TupleExpr:2( NameExpr(x [_m.x]) NameExpr(y [_m.y])))) [case testImportFromAs] from _m import y as z z [file _m.py] y = 1 [out] MypyFile:1( ImportFrom:1(_m, [y : z]) ExpressionStmt:2( NameExpr(z [_m.y]))) [case testAccessImportedName] from m import x y = x [file m.py] from _n import x [file _n.py] x = 1 [out] MypyFile:1( ImportFrom:1(m, [x]) AssignmentStmt:2( NameExpr(y* [__main__.y]) NameExpr(x [_n.x]))) MypyFile:1( tmp/m.py ImportFrom:1(_n, [x])) [case testAccessImportedName2] import _m y = _m.x [file _m.py] from _n import x [file _n.py] x = 1 [out] MypyFile:1( Import:1(_m) AssignmentStmt:2( NameExpr(y* [__main__.y]) MemberExpr:2( NameExpr(_m) x [_n.x]))) [case testAccessingImportedNameInType] from _m import c x = None # type: c [file _m.py] from _n import c [file _n.py] class c: pass [out] MypyFile:1( ImportFrom:1(_m, [c]) AssignmentStmt:2( NameExpr(x [__main__.x]) NameExpr(None [builtins.None]) _n.c)) [case testAccessingImportedNameInType2] import _m x = None # type: _m.c [file _m.py] from _n import c [file _n.py] class c: pass [out] MypyFile:1( Import:1(_m) AssignmentStmt:2( NameExpr(x [__main__.x]) NameExpr(None [builtins.None]) _n.c)) [case testAccessingImportedModule] from _m import _n _n.x [file _m.py] import _n [file _n.py] x = 1 [out] MypyFile:1( ImportFrom:1(_m, [_n]) ExpressionStmt:2( MemberExpr:2( NameExpr(_n) x [_n.x]))) [case testAccessingImportedModule] import _m _m._n.x [file _m.py] import _n [file _n.py] x = 1 [out] MypyFile:1( Import:1(_m) ExpressionStmt:2( MemberExpr:2( MemberExpr:2( NameExpr(_m) _n) x [_n.x]))) [case testAccessTypeViaDoubleIndirection] from _m import c a = None # type: c [file _m.py] from _n import c [file _n.py] class c: pass [out] MypyFile:1( ImportFrom:1(_m, [c]) AssignmentStmt:2( NameExpr(a [__main__.a]) NameExpr(None [builtins.None]) _n.c)) [case testAccessTypeViaDoubleIndirection2] import _m a = None # type: _m.c [file _m.py] from _n import c [file _n.py] class c: pass [out] MypyFile:1( Import:1(_m) AssignmentStmt:2( NameExpr(a [__main__.a]) NameExpr(None [builtins.None]) _n.c)) [case testImportAsterisk] from _m import * x, y [file _m.py] x = y = 1 [out] MypyFile:1( ImportAll:1(_m) ExpressionStmt:2( TupleExpr:2( NameExpr(x [_m.x]) NameExpr(y [_m.y])))) [case testImportAsteriskAndImportedNames] from _m import * n_.x, y [file _m.py] import n_ from n_ import y [file n_.py] x = y = 1 [out] MypyFile:1( ImportAll:1(_m) ExpressionStmt:2( TupleExpr:2( MemberExpr:2( NameExpr(n_) x [n_.x]) NameExpr(y [n_.y])))) [case testImportAsteriskAndImportedNamesInTypes] from _m import * x = None # type: n_.c y = None # type: d [file _m.py] import n_ from n_ import d [file n_.py] class c: pass class d: pass [out] MypyFile:1( ImportAll:1(_m) AssignmentStmt:2( NameExpr(x [__main__.x]) NameExpr(None [builtins.None]) n_.c) AssignmentStmt:3( NameExpr(y [__main__.y]) NameExpr(None [builtins.None]) n_.d)) [case testModuleInSubdir] import _m _m.x [file _m/__init__.py] x = 1 [out] MypyFile:1( Import:1(_m) ExpressionStmt:2( MemberExpr:2( NameExpr(_m) x [_m.x]))) [case testNestedModules] import m.n m.n.x, m.y [file m/__init__.py] y = 1 [file m/n.py] x = 1 [out] MypyFile:1( Import:1(m.n) ExpressionStmt:2( TupleExpr:2( MemberExpr:2( MemberExpr:2( NameExpr(m) n [m.n]) x [m.n.x]) MemberExpr:2( NameExpr(m) y [m.y])))) MypyFile:1( tmp/m/n.py AssignmentStmt:1( NameExpr(x* [m.n.x]) IntExpr(1))) [case testImportFromSubmodule] from m._n import x x [file m/__init__.py] [file m/_n.py] x = 1 [out] MypyFile:1( ImportFrom:1(m._n, [x]) ExpressionStmt:2( NameExpr(x [m._n.x]))) [case testImportAllFromSubmodule] from m._n import * x, y [file m/__init__.py] [file m/_n.py] x = y = 1 [out] MypyFile:1( ImportAll:1(m._n) ExpressionStmt:2( TupleExpr:2( NameExpr(x [m._n.x]) NameExpr(y [m._n.y])))) [case testSubmodulesAndTypes] import m._n x = None # type: m._n.c [file m/__init__.py] [file m/_n.py] class c: pass [out] MypyFile:1( Import:1(m._n) AssignmentStmt:2( NameExpr(x [__main__.x]) NameExpr(None [builtins.None]) m._n.c)) [case testSubmodulesAndTypes] from m._n import c x = None # type: c [file m/__init__.py] [file m/_n.py] class c: pass [out] MypyFile:1( ImportFrom:1(m._n, [c]) AssignmentStmt:2( NameExpr(x [__main__.x]) NameExpr(None [builtins.None]) m._n.c)) [case testFromPackageImportModule] from m import _n _n.x [file m/__init__.py] [file m/_n.py] x = 1 [out] MypyFile:1( ImportFrom:1(m, [_n]) ExpressionStmt:2( MemberExpr:2( NameExpr(_n [m._n]) x [m._n.x]))) [case testDeeplyNestedModule] import m.n.k m.n.k.x m.n.b m.a [file m/__init__.py] a = 1 [file m/n/__init__.py] b = 1 [file m/n/k.py] x = 1 [out] MypyFile:1( Import:1(m.n.k) ExpressionStmt:2( MemberExpr:2( MemberExpr:2( MemberExpr:2( NameExpr(m) n [m.n]) k [m.n.k]) x [m.n.k.x])) ExpressionStmt:3( MemberExpr:3( MemberExpr:3( NameExpr(m) n [m.n]) b [m.n.b])) ExpressionStmt:4( MemberExpr:4( NameExpr(m) a [m.a]))) MypyFile:1( tmp/m/n/k.py AssignmentStmt:1( NameExpr(x* [m.n.k.x]) IntExpr(1))) [case testImportInSubmodule] import m._n y = m._n.x [file m/__init__.py] [file m/_n.py] from m._k import x [file m/_k.py] x = 1 [out] MypyFile:1( Import:1(m._n) AssignmentStmt:2( NameExpr(y* [__main__.y]) MemberExpr:2( MemberExpr:2( NameExpr(m) _n [m._n]) x [m._k.x]))) [case testBuiltinsUsingModule] o = None # type: __builtins__.object [out] MypyFile:1( AssignmentStmt:1( NameExpr(o [__main__.o]) NameExpr(None [builtins.None]) builtins.object)) [case testImplicitAccessToBuiltins] object [out] MypyFile:1( ExpressionStmt:1( NameExpr(object [builtins.object]))) [case testAssignmentToModuleAttribute] import _m _m.x = ( _m.x) [file _m.py] x = None [out] MypyFile:1( Import:1(_m) AssignmentStmt:2( MemberExpr:2( NameExpr(_m) x [_m.x]) MemberExpr:3( NameExpr(_m) x [_m.x]))) [case testAssignmentThatRefersToModule] import _m _m.x[None] = None [file _m.py] x = None [out] MypyFile:1( Import:1(_m) AssignmentStmt:2( IndexExpr:2( MemberExpr:2( NameExpr(_m) x [_m.x]) NameExpr(None [builtins.None])) NameExpr(None [builtins.None]))) [case testImportInBlock] if 1: import _x _x.y [file _x.py] y = 1 [out] MypyFile:1( IfStmt:1( If( IntExpr(1)) Then( Import:2(_x) ExpressionStmt:3( MemberExpr:3( NameExpr(_x) y [_x.y]))))) [case testImportInFunction] def f() -> None: import _x _x.y [file _x.py] y = 1 [out] MypyFile:1( FuncDef:1( f def () Block:1( Import:2(_x) ExpressionStmt:3( MemberExpr:3( NameExpr(_x) y [_x.y]))))) [case testImportInClassBody] class A: from _x import y z = y [file _x.py] y = 1 [out] MypyFile:1( ClassDef:1( A ImportFrom:2(_x, [y]) AssignmentStmt:3( NameExpr(z* [m]) NameExpr(y [_x.y])))) [case testImportInClassBody2] class A: import _x z = _x.y [file _x.py] y = 1 [out] MypyFile:1( ClassDef:1( A Import:2(_x) AssignmentStmt:3( NameExpr(z* [m]) MemberExpr:3( NameExpr(_x) y [_x.y])))) [case testImportModuleTwice] def f() -> None: import x import x x.y [file x.py] y = 1 [out] MypyFile:1( FuncDef:1( f def () Block:1( Import:2(x) Import:3(x) ExpressionStmt:4( MemberExpr:4( NameExpr(x) y [x.y]))))) MypyFile:1( tmp/x.py AssignmentStmt:1( NameExpr(y* [x.y]) IntExpr(1))) [case testRelativeImport0] import m.x m.x.z.y [file m/__init__.py] [file m/x.py] from . import z [file m/z.py] y = 1 [out] MypyFile:1( Import:1(m.x) ExpressionStmt:2( MemberExpr:2( MemberExpr:2( MemberExpr:2( NameExpr(m) x [m.x]) z [m.z]) y [m.z.y]))) MypyFile:1( tmp/m/x.py ImportFrom:1(., [z])) MypyFile:1( tmp/m/z.py AssignmentStmt:1( NameExpr(y* [m.z.y]) IntExpr(1))) [case testRelativeImport1] import m.t.b as b b.x.y b.z.y [file m/__init__.py] [file m/x.py] y = 1 [file m/z.py] y = 3 [file m/t/__init__.py] [file m/t/b.py] from .. import x, z [out] MypyFile:1( Import:1(m.t.b : b) ExpressionStmt:2( MemberExpr:2( MemberExpr:2( NameExpr(b [m.t.b]) x [m.x]) y [m.x.y])) ExpressionStmt:3( MemberExpr:3( MemberExpr:3( NameExpr(b [m.t.b]) z [m.z]) y [m.z.y]))) MypyFile:1( tmp/m/t/b.py ImportFrom:1(.., [x, z])) MypyFile:1( tmp/m/x.py AssignmentStmt:1( NameExpr(y* [m.x.y]) IntExpr(1))) MypyFile:1( tmp/m/z.py AssignmentStmt:1( NameExpr(y* [m.z.y]) IntExpr(3))) [case testRelativeImport2] import m.t.b as b b.xy b.zy [file m/__init__.py] [file m/x.py] y = 1 [file m/z.py] y = 3 [file m/t/__init__.py] [file m/t/b.py] from ..x import y as xy from ..z import y as zy [out] MypyFile:1( Import:1(m.t.b : b) ExpressionStmt:2( MemberExpr:2( NameExpr(b [m.t.b]) xy [m.x.y])) ExpressionStmt:3( MemberExpr:3( NameExpr(b [m.t.b]) zy [m.z.y]))) MypyFile:1( tmp/m/t/b.py ImportFrom:1(..x, [y : xy]) ImportFrom:2(..z, [y : zy])) MypyFile:1( tmp/m/x.py AssignmentStmt:1( NameExpr(y* [m.x.y]) IntExpr(1))) MypyFile:1( tmp/m/z.py AssignmentStmt:1( NameExpr(y* [m.z.y]) IntExpr(3))) [case testRelativeImport3] import m.t m.zy m.xy m.t.y [file m/__init__.py] from .x import * from .z import * [file m/x.py] from .z import zy as xy [file m/z.py] zy = 3 [file m/t/__init__.py] from .b import * [file m/t/b.py] from .. import xy as y [out] MypyFile:1( Import:1(m.t) ExpressionStmt:2( MemberExpr:2( NameExpr(m) zy [m.z.zy])) ExpressionStmt:3( MemberExpr:3( NameExpr(m) xy [m.z.zy])) ExpressionStmt:4( MemberExpr:4( MemberExpr:4( NameExpr(m) t [m.t]) y [m.z.zy]))) MypyFile:1( tmp/m/t/b.py ImportFrom:1(.., [xy : y])) MypyFile:1( tmp/m/x.py ImportFrom:1(.z, [zy : xy])) MypyFile:1( tmp/m/z.py AssignmentStmt:1( NameExpr(zy* [m.z.zy]) IntExpr(3))) [case testRelativeImportFromSameModule] import m.x [file m/__init__.py] [file m/x.py] from .x import nonexistent [out] tmp/m/x.py:1: error: Module 'm.x' has no attribute 'nonexistent' [case testImportFromSameModule] import m.x [file m/__init__.py] [file m/x.py] from m.x import nonexistent [out] tmp/m/x.py:1: error: Module 'm.x' has no attribute 'nonexistent' [case testFromImportAsInStub] from m import * x y # E: Name 'y' is not defined [file m.pyi] from m2 import x as x from m2 import y [file m2.py] x = 1 y = 2 [out] [case testFromImportAsInNonStub] from m_ import * x y [file m_.py] from m2_ import x as x from m2_ import y [file m2_.py] x = 1 y = 2 [out] MypyFile:1( ImportAll:1(m_) ExpressionStmt:2( NameExpr(x [m2_.x])) ExpressionStmt:3( NameExpr(y [m2_.y]))) [case testImportAsInStub] from m import * m2 m3 # E: Name 'm3' is not defined [file m.pyi] import m2 as m2 import m3 [file m2.py] [file m3.py] [out] [case testImportAsInNonStub] from m_ import * m2_ m3_ [file m_.py] import m2_ as m2_ import m3_ [file m2_.py] [file m3_.py] [out] MypyFile:1( ImportAll:1(m_) ExpressionStmt:2( NameExpr(m2_)) ExpressionStmt:3( NameExpr(m3_))) [case testErrorsInMultipleModules] import m x [file m.py] y [out] tmp/m.py:1: error: Name 'y' is not defined main:2: error: Name 'x' is not defined [case testImportTwice] import typing from x import a, a # ok (we could give a warning, but this is valid) def f() -> None: from x import a from x import a # ok import x import x # ok, since we may import multiple submodules of a package [file x.py] a = 1 [out] MypyFile:1( Import:1(typing) ImportFrom:2(x, [a, a]) FuncDef:3( f def () Block:3( ImportFrom:4(x, [a]) ImportFrom:5(x, [a]))) Import:6(x) Import:7(x)) MypyFile:1( tmp/x.py AssignmentStmt:1( NameExpr(a* [x.a]) IntExpr(1))) mypy-0.560/test-data/unit/semanal-namedtuple.test0000644€tŠÔÚ€2›s®0000001130413215007206026211 0ustar jukkaDROPBOX\Domain Users00000000000000-- Semantic analysis of named tuples [case testSimpleNamedtuple] from collections import namedtuple N = namedtuple('N', ['a']) def f() -> N: pass [out] MypyFile:1( ImportFrom:1(collections, [namedtuple]) AssignmentStmt:2( NameExpr(N* [__main__.N]) NamedTupleExpr:2(N, Tuple[Any])) FuncDef:3( f def () -> Tuple[Any, fallback=__main__.N] Block:3( PassStmt:3()))) [case testTwoItemNamedtuple] from collections import namedtuple N = namedtuple('N', ['a', 'xyz']) def f() -> N: pass [out] MypyFile:1( ImportFrom:1(collections, [namedtuple]) AssignmentStmt:2( NameExpr(N* [__main__.N]) NamedTupleExpr:2(N, Tuple[Any, Any])) FuncDef:3( f def () -> Tuple[Any, Any, fallback=__main__.N] Block:3( PassStmt:3()))) [case testTwoItemNamedtupleWithTupleFieldNames] from collections import namedtuple N = namedtuple('N', ('a', 'xyz')) def f() -> N: pass [out] MypyFile:1( ImportFrom:1(collections, [namedtuple]) AssignmentStmt:2( NameExpr(N* [__main__.N]) NamedTupleExpr:2(N, Tuple[Any, Any])) FuncDef:3( f def () -> Tuple[Any, Any, fallback=__main__.N] Block:3( PassStmt:3()))) [case testTwoItemNamedtupleWithShorthandSyntax] from collections import namedtuple N = namedtuple('N', ' a xyz ') def f() -> N: pass [out] MypyFile:1( ImportFrom:1(collections, [namedtuple]) AssignmentStmt:2( NameExpr(N* [__main__.N]) NamedTupleExpr:2(N, Tuple[Any, Any])) FuncDef:3( f def () -> Tuple[Any, Any, fallback=__main__.N] Block:3( PassStmt:3()))) [case testNamedTupleWithItemTypes] from typing import NamedTuple N = NamedTuple('N', [('a', int), ('b', str)]) [out] MypyFile:1( ImportFrom:1(typing, [NamedTuple]) AssignmentStmt:2( NameExpr(N* [__main__.N]) NamedTupleExpr:2(N, Tuple[builtins.int, builtins.str]))) [case testNamedTupleWithTupleFieldNamesWithItemTypes] from typing import NamedTuple N = NamedTuple('N', (('a', int), ('b', str))) [out] MypyFile:1( ImportFrom:1(typing, [NamedTuple]) AssignmentStmt:2( NameExpr(N* [__main__.N]) NamedTupleExpr:2(N, Tuple[builtins.int, builtins.str]))) [case testNamedTupleBaseClass] from collections import namedtuple N = namedtuple('N', ['x']) class A(N): pass [out] MypyFile:1( ImportFrom:1(collections, [namedtuple]) AssignmentStmt:2( NameExpr(N* [__main__.N]) NamedTupleExpr:2(N, Tuple[Any])) ClassDef:3( A TupleType( Tuple[Any, fallback=__main__.N]) BaseType( __main__.N) PassStmt:3())) [case testNamedTupleBaseClass2] from collections import namedtuple class A(namedtuple('N', ['x'])): pass [out] MypyFile:1( ImportFrom:1(collections, [namedtuple]) ClassDef:2( A TupleType( Tuple[Any, fallback=__main__.N@2]) BaseType( __main__.N@2) PassStmt:2())) [case testNamedTupleBaseClassWithItemTypes] from typing import NamedTuple class A(NamedTuple('N', [('x', int)])): pass [out] MypyFile:1( ImportFrom:1(typing, [NamedTuple]) ClassDef:2( A TupleType( Tuple[builtins.int, fallback=__main__.N@2]) BaseType( __main__.N@2) PassStmt:2())) -- Errors [case testNamedTupleWithTooFewArguments] from collections import namedtuple N = namedtuple('N') # E: Too few arguments for namedtuple() [case testNamedTupleWithTooManyArguments] from collections import namedtuple N = namedtuple('N', ['x'], 'y') # E: Too many arguments for namedtuple() [case testNamedTupleWithInvalidName] from collections import namedtuple N = namedtuple(1, ['x']) # E: namedtuple() expects a string literal as the first argument [case testNamedTupleWithInvalidItems] from collections import namedtuple N = namedtuple('N', 1) # E: List or tuple literal expected as the second argument to namedtuple() [case testNamedTupleWithInvalidItems2] from collections import namedtuple N = namedtuple('N', ['x', 1]) # E: String literal expected as namedtuple() item [case testNamedTupleWithUnderscoreItemName] from collections import namedtuple N = namedtuple('N', ['_fallback']) # E: namedtuple() field names cannot start with an underscore: _fallback -- NOTE: The following code works at runtime but is not yet supported by mypy. -- Keyword arguments may potentially be supported in the future. [case testNamedTupleWithNonpositionalArgs] from collections import namedtuple N = namedtuple(typename='N', field_names=['x']) # E: Unexpected arguments to namedtuple() [case testInvalidNamedTupleBaseClass] from typing import NamedTuple class A(NamedTuple('N', [1])): pass # E: Tuple expected as NamedTuple() field class B(A): pass [case testInvalidNamedTupleBaseClass2] class A(NamedTuple('N', [1])): pass class B(A): pass [out] main:1: error: Name 'NamedTuple' is not defined main:1: error: Invalid base class mypy-0.560/test-data/unit/semanal-python2.test0000644€tŠÔÚ€2›s®0000000300013215007206025450 0ustar jukkaDROPBOX\Domain Users00000000000000-- Python 2 semantic analysis test cases. [case testPrintStatement_python2] print int, None [out] MypyFile:1( PrintStmt:1( NameExpr(int [builtins.int]) NameExpr(None [builtins.None]) Newline)) [case testPrintStatementWithTarget] print >>int, None [out] MypyFile:1( PrintStmt:1( NameExpr(None [builtins.None]) Target( NameExpr(int [builtins.int])) Newline)) [case testExecStatement] exec None exec None in int exec None in int, str [out] MypyFile:1( ExecStmt:1( NameExpr(None [builtins.None])) ExecStmt:2( NameExpr(None [builtins.None]) NameExpr(int [builtins.int])) ExecStmt:3( NameExpr(None [builtins.None]) NameExpr(int [builtins.int]) NameExpr(str [builtins.str]))) [case testVariableLengthTuple_python2] from typing import Tuple, cast cast(Tuple[int, ...], ()) [builtins_py2 fixtures/tuple.pyi] [out] MypyFile:1( ImportFrom:1(typing, [Tuple, cast]) ExpressionStmt:2( CastExpr:2( TupleExpr:2() builtins.tuple[builtins.int]))) [case testTupleArgList_python2] def f(x, (y, z)): x = y [out] MypyFile:1( FuncDef:1( f Args( Var(x) Var(__tuple_arg_2)) Block:1( AssignmentStmt:1( TupleExpr:1( NameExpr(y* [l]) NameExpr(z* [l])) NameExpr(__tuple_arg_2 [l])) AssignmentStmt:2( NameExpr(x [l]) NameExpr(y [l]))))) [case testBackquoteExpr_python2] `object` [out] MypyFile:1( ExpressionStmt:1( BackquoteExpr:1( NameExpr(object [builtins.object])))) mypy-0.560/test-data/unit/semanal-statements.test0000644€tŠÔÚ€2›s®0000004020413215007206026243 0ustar jukkaDROPBOX\Domain Users00000000000000[case testReturn] def f(x): return x def g(): return [out] MypyFile:1( FuncDef:1( f Args( Var(x)) Block:1( ReturnStmt:1( NameExpr(x [l])))) FuncDef:2( g Block:2( ReturnStmt:2()))) [case testRaise] raise object() [out] MypyFile:1( RaiseStmt:1( CallExpr:1( NameExpr(object [builtins.object]) Args()))) [case testYield] def f(): yield f [out] MypyFile:1( FuncDef:1( f Generator Block:1( ExpressionStmt:1( YieldExpr:1( NameExpr(f [__main__.f])))))) [case testAssert] assert object [out] MypyFile:1( AssertStmt:1( NameExpr(object [builtins.object]))) [case testOperatorAssignment] x = y = 1 x += y y |= x [out] MypyFile:1( AssignmentStmt:1( Lvalues( NameExpr(x* [__main__.x]) NameExpr(y* [__main__.y])) IntExpr(1)) OperatorAssignmentStmt:2( + NameExpr(x [__main__.x]) NameExpr(y [__main__.y])) OperatorAssignmentStmt:3( | NameExpr(y [__main__.y]) NameExpr(x [__main__.x]))) [case testWhile] x = y = 1 while x: y [out] MypyFile:1( AssignmentStmt:1( Lvalues( NameExpr(x* [__main__.x]) NameExpr(y* [__main__.y])) IntExpr(1)) WhileStmt:2( NameExpr(x [__main__.x]) Block:2( ExpressionStmt:3( NameExpr(y [__main__.y]))))) [case testFor] for x in object: x [out] MypyFile:1( ForStmt:1( NameExpr(x* [__main__.x]) NameExpr(object [builtins.object]) Block:1( ExpressionStmt:2( NameExpr(x [__main__.x]))))) [case testForInFunction] def f(): for x in f: x [out] MypyFile:1( FuncDef:1( f Block:1( ForStmt:2( NameExpr(x* [l]) NameExpr(f [__main__.f]) Block:2( ExpressionStmt:3( NameExpr(x [l]))))))) [case testMultipleForIndexVars] for x, y in []: x, y [out] MypyFile:1( ForStmt:1( TupleExpr:1( NameExpr(x* [__main__.x]) NameExpr(y* [__main__.y])) ListExpr:1() Block:1( ExpressionStmt:2( TupleExpr:2( NameExpr(x [__main__.x]) NameExpr(y [__main__.y])))))) [case testForIndexVarScope] for x in []: pass x [out] MypyFile:1( ForStmt:1( NameExpr(x* [__main__.x]) ListExpr:1() Block:1( PassStmt:2())) ExpressionStmt:3( NameExpr(x [__main__.x]))) [case testForIndexVarScope2] def f(): for x in []: pass x [out] MypyFile:1( FuncDef:1( f Block:1( ForStmt:2( NameExpr(x* [l]) ListExpr:2() Block:2( PassStmt:3())) ExpressionStmt:4( NameExpr(x [l]))))) [case testReusingForLoopIndexVariable] for x in None: pass for x in None: pass [out] MypyFile:1( ForStmt:1( NameExpr(x* [__main__.x]) NameExpr(None [builtins.None]) Block:1( PassStmt:2())) ForStmt:3( NameExpr(x [__main__.x]) NameExpr(None [builtins.None]) Block:3( PassStmt:4()))) [case testReusingForLoopIndexVariable2] def f(): for x in None: pass for x in None: pass [out] MypyFile:1( FuncDef:1( f Block:1( ForStmt:2( NameExpr(x* [l]) NameExpr(None [builtins.None]) Block:2( PassStmt:3())) ForStmt:4( NameExpr(x [l]) NameExpr(None [builtins.None]) Block:4( PassStmt:5()))))) [case testLoopWithElse] for x in []: pass else: x while 1: pass else: x [out] MypyFile:1( ForStmt:1( NameExpr(x* [__main__.x]) ListExpr:1() Block:1( PassStmt:2()) Else( ExpressionStmt:4( NameExpr(x [__main__.x])))) WhileStmt:5( IntExpr(1) Block:5( PassStmt:6()) Else( ExpressionStmt:8( NameExpr(x [__main__.x]))))) [case testBreak] while 1: break for x in []: break [out] MypyFile:1( WhileStmt:1( IntExpr(1) Block:1( BreakStmt:2())) ForStmt:3( NameExpr(x* [__main__.x]) ListExpr:3() Block:3( BreakStmt:4()))) [case testContinue] while 1: continue for x in []: continue [out] MypyFile:1( WhileStmt:1( IntExpr(1) Block:1( ContinueStmt:2())) ForStmt:3( NameExpr(x* [__main__.x]) ListExpr:3() Block:3( ContinueStmt:4()))) [case testIf] x = 1 if x: x elif x: x elif x: x else: x [out] MypyFile:1( AssignmentStmt:1( NameExpr(x* [__main__.x]) IntExpr(1)) IfStmt:2( If( NameExpr(x [__main__.x])) Then( ExpressionStmt:3( NameExpr(x [__main__.x]))) Else( IfStmt:4( If( NameExpr(x [__main__.x])) Then( ExpressionStmt:5( NameExpr(x [__main__.x]))) Else( IfStmt:6( If( NameExpr(x [__main__.x])) Then( ExpressionStmt:7( NameExpr(x [__main__.x]))) Else( ExpressionStmt:9( NameExpr(x [__main__.x]))))))))) [case testSimpleIf] if object: object [out] MypyFile:1( IfStmt:1( If( NameExpr(object [builtins.object])) Then( ExpressionStmt:2( NameExpr(object [builtins.object]))))) [case testLvalues] x = y = 1 x = 1 x.m = 1 x[y] = 1 x, y = 1 [x, y] = 1 (x, y) = 1 [out] MypyFile:1( AssignmentStmt:1( Lvalues( NameExpr(x* [__main__.x]) NameExpr(y* [__main__.y])) IntExpr(1)) AssignmentStmt:2( NameExpr(x [__main__.x]) IntExpr(1)) AssignmentStmt:3( MemberExpr:3( NameExpr(x [__main__.x]) m) IntExpr(1)) AssignmentStmt:4( IndexExpr:4( NameExpr(x [__main__.x]) NameExpr(y [__main__.y])) IntExpr(1)) AssignmentStmt:5( TupleExpr:5( NameExpr(x [__main__.x]) NameExpr(y [__main__.y])) IntExpr(1)) AssignmentStmt:6( ListExpr:6( NameExpr(x [__main__.x]) NameExpr(y [__main__.y])) IntExpr(1)) AssignmentStmt:7( TupleExpr:7( NameExpr(x [__main__.x]) NameExpr(y [__main__.y])) IntExpr(1))) [case testStarLvalues] *x, y = 1 *x, (y, *z) = 1 *(x, q), r = 1 [out] MypyFile:1( AssignmentStmt:1( TupleExpr:1( StarExpr:1( NameExpr(x* [__main__.x])) NameExpr(y* [__main__.y])) IntExpr(1)) AssignmentStmt:2( TupleExpr:2( StarExpr:2( NameExpr(x [__main__.x])) TupleExpr:2( NameExpr(y [__main__.y]) StarExpr:2( NameExpr(z* [__main__.z])))) IntExpr(1)) AssignmentStmt:3( TupleExpr:3( StarExpr:3( TupleExpr:3( NameExpr(x [__main__.x]) NameExpr(q* [__main__.q]))) NameExpr(r* [__main__.r])) IntExpr(1))) [case testMultipleDefinition] x, y = 1 x, y = 2 [out] MypyFile:1( AssignmentStmt:1( TupleExpr:1( NameExpr(x* [__main__.x]) NameExpr(y* [__main__.y])) IntExpr(1)) AssignmentStmt:2( TupleExpr:2( NameExpr(x [__main__.x]) NameExpr(y [__main__.y])) IntExpr(2))) [case testComplexDefinitions] (x) = 1 ([y]) = 2 [out] MypyFile:1( AssignmentStmt:1( NameExpr(x* [__main__.x]) IntExpr(1)) AssignmentStmt:2( ListExpr:2( NameExpr(y* [__main__.y])) IntExpr(2))) [case testLocalComplexDefinition] def f(): (x) = 1 x [out] MypyFile:1( FuncDef:1( f Block:1( AssignmentStmt:2( NameExpr(x* [l]) IntExpr(1)) ExpressionStmt:3( NameExpr(x [l]))))) [case testMultipleDefOnlySomeNew] x = 1 y, x = 1 [out] MypyFile:1( AssignmentStmt:1( NameExpr(x* [__main__.x]) IntExpr(1)) AssignmentStmt:2( TupleExpr:2( NameExpr(y* [__main__.y]) NameExpr(x [__main__.x])) IntExpr(1))) [case testMultipleDefOnlySomeNewNestedTuples] x = 1 y, (x, z) = 1 [out] MypyFile:1( AssignmentStmt:1( NameExpr(x* [__main__.x]) IntExpr(1)) AssignmentStmt:2( TupleExpr:2( NameExpr(y* [__main__.y]) TupleExpr:2( NameExpr(x [__main__.x]) NameExpr(z* [__main__.z]))) IntExpr(1))) [case testMultipleDefOnlySomeNewNestedLists] x = 1 y, [x, z] = 1 [p, [x, r]] = 1 [out] MypyFile:1( AssignmentStmt:1( NameExpr(x* [__main__.x]) IntExpr(1)) AssignmentStmt:2( TupleExpr:2( NameExpr(y* [__main__.y]) ListExpr:2( NameExpr(x [__main__.x]) NameExpr(z* [__main__.z]))) IntExpr(1)) AssignmentStmt:3( ListExpr:3( NameExpr(p* [__main__.p]) ListExpr:3( NameExpr(x [__main__.x]) NameExpr(r* [__main__.r]))) IntExpr(1))) [case testIndexedDel] x = y = 1 del x[y] [out] MypyFile:1( AssignmentStmt:1( Lvalues( NameExpr(x* [__main__.x]) NameExpr(y* [__main__.y])) IntExpr(1)) DelStmt:2( IndexExpr:2( NameExpr(x [__main__.x]) NameExpr(y [__main__.y])))) [case testDelGlobalName] x = 1 del x [out] MypyFile:1( AssignmentStmt:1( NameExpr(x* [__main__.x]) IntExpr(1)) DelStmt:2( NameExpr(x [__main__.x]))) [case testDelLocalName] def f(x): del x [out] MypyFile:1( FuncDef:1( f Args( Var(x)) Block:1( DelStmt:2( NameExpr(x [l]))))) [case testDelMultipleThings] def f(x, y): del x, y[0] [out] MypyFile:1( FuncDef:1( f Args( Var(x) Var(y)) Block:1( DelStmt:2( TupleExpr:2( NameExpr(x [l]) IndexExpr:2( NameExpr(y [l]) IntExpr(0))))))) [case testDelMultipleThingsInvalid] def f(x, y) -> None: del x, y + 1 [out] main:2: error: can't delete operator [case testTry] class c: pass try: c except object: c except c as e: e except: c finally: c [out] MypyFile:1( ClassDef:1( c PassStmt:1()) TryStmt:2( Block:2( ExpressionStmt:3( NameExpr(c [__main__.c]))) NameExpr(object [builtins.object]) Block:4( ExpressionStmt:5( NameExpr(c [__main__.c]))) NameExpr(c [__main__.c]) NameExpr(e* [__main__.e]) Block:6( ExpressionStmt:7( NameExpr(e [__main__.e]))) Block:8( ExpressionStmt:9( NameExpr(c [__main__.c]))) Finally( ExpressionStmt:11( NameExpr(c [__main__.c]))))) [case testTryElse] try: pass except: pass else: object [out] MypyFile:1( TryStmt:1( Block:1( PassStmt:2()) Block:3( PassStmt:4()) Else( ExpressionStmt:6( NameExpr(object [builtins.object]))))) [case testTryWithOnlyFinally] try: pass finally: pass [out] MypyFile:1( TryStmt:1( Block:1( PassStmt:2()) Finally( PassStmt:4()))) [case testExceptWithMultipleTypes] class c: pass try: pass except (c, object) as e: e [out] MypyFile:1( ClassDef:1( c PassStmt:1()) TryStmt:2( Block:2( PassStmt:3()) TupleExpr:4( NameExpr(c [__main__.c]) NameExpr(object [builtins.object])) NameExpr(e* [__main__.e]) Block:4( ExpressionStmt:5( NameExpr(e [__main__.e]))))) [case testRaiseWithoutExpr] raise [out] MypyFile:1( RaiseStmt:1()) [case testWith] with object: object [out] MypyFile:1( WithStmt:1( Expr( NameExpr(object [builtins.object])) Block:1( ExpressionStmt:2( NameExpr(object [builtins.object]))))) [case testWithAndVariable] with object as x: x [out] MypyFile:1( WithStmt:1( Expr( NameExpr(object [builtins.object])) Target( NameExpr(x* [__main__.x])) Block:1( ExpressionStmt:2( NameExpr(x [__main__.x]))))) [case testWithInFunction] def f(): with f as x: x [out] MypyFile:1( FuncDef:1( f Block:1( WithStmt:2( Expr( NameExpr(f [__main__.f])) Target( NameExpr(x* [l])) Block:2( ExpressionStmt:3( NameExpr(x [l]))))))) [case testComplexWith] with object, object: pass with object as a, object as b: pass [out] MypyFile:1( WithStmt:1( Expr( NameExpr(object [builtins.object])) Expr( NameExpr(object [builtins.object])) Block:1( PassStmt:2())) WithStmt:3( Expr( NameExpr(object [builtins.object])) Target( NameExpr(a* [__main__.a])) Expr( NameExpr(object [builtins.object])) Target( NameExpr(b* [__main__.b])) Block:3( PassStmt:4()))) [case testVariableInBlock] while object: x = None x = x [out] MypyFile:1( WhileStmt:1( NameExpr(object [builtins.object]) Block:1( AssignmentStmt:2( NameExpr(x* [__main__.x]) NameExpr(None [builtins.None])) AssignmentStmt:3( NameExpr(x [__main__.x]) NameExpr(x [__main__.x]))))) [case testVariableInExceptHandler] try: pass except object as o: x = None o = x [out] MypyFile:1( TryStmt:1( Block:1( PassStmt:2()) NameExpr(object [builtins.object]) NameExpr(o* [__main__.o]) Block:3( AssignmentStmt:4( NameExpr(x* [__main__.x]) NameExpr(None [builtins.None])) AssignmentStmt:5( NameExpr(o [__main__.o]) NameExpr(x [__main__.x]))))) [case testCallInExceptHandler] try: pass except object as o: o = object() [out] MypyFile:1( TryStmt:1( Block:1( PassStmt:2()) NameExpr(object [builtins.object]) NameExpr(o* [__main__.o]) Block:3( AssignmentStmt:4( NameExpr(o [__main__.o]) CallExpr:4( NameExpr(object [builtins.object]) Args()))))) [case testTryExceptWithMultipleHandlers] try: pass except BaseException as e: pass except Err as f: f = BaseException() # Fail f = Err() class Err(BaseException): pass [builtins fixtures/exception.pyi] [out] MypyFile:1( TryStmt:1( Block:1( PassStmt:2()) NameExpr(BaseException [builtins.BaseException]) NameExpr(e* [__main__.e]) Block:3( PassStmt:4()) NameExpr(Err [__main__.Err]) NameExpr(f* [__main__.f]) Block:5( AssignmentStmt:6( NameExpr(f [__main__.f]) CallExpr:6( NameExpr(BaseException [builtins.BaseException]) Args())) AssignmentStmt:7( NameExpr(f [__main__.f]) CallExpr:7( NameExpr(Err [__main__.Err]) Args())))) ClassDef:8( Err BaseType( builtins.BaseException) PassStmt:8())) [case testMultipleAssignmentWithPartialNewDef] o = None x, o = o, o [out] MypyFile:1( AssignmentStmt:1( NameExpr(o* [__main__.o]) NameExpr(None [builtins.None])) AssignmentStmt:2( TupleExpr:2( NameExpr(x* [__main__.x]) NameExpr(o [__main__.o])) TupleExpr:2( NameExpr(o [__main__.o]) NameExpr(o [__main__.o])))) [case testFunctionDecorator] def decorate(f): pass @decorate def g(): g() [out] MypyFile:1( FuncDef:1( decorate Args( Var(f)) Block:1( PassStmt:1())) Decorator:2( Var(g) NameExpr(decorate [__main__.decorate]) FuncDef:3( g Block:3( ExpressionStmt:4( CallExpr:4( NameExpr(g [__main__.g]) Args())))))) [case testTryWithinFunction] def f() -> None: try: pass except object as o: pass [out] MypyFile:1( FuncDef:1( f def () Block:1( TryStmt:2( Block:2( PassStmt:3()) NameExpr(object [builtins.object]) NameExpr(o* [l]) Block:4( PassStmt:5()))))) [case testReuseExceptionVariable] def f() -> None: try: pass except object as o: pass except object as o: pass [out] MypyFile:1( FuncDef:1( f def () Block:1( TryStmt:2( Block:2( PassStmt:3()) NameExpr(object [builtins.object]) NameExpr(o* [l]) Block:4( PassStmt:5()) NameExpr(object [builtins.object]) NameExpr(o [l]) Block:6( PassStmt:7()))))) [case testWithMultiple] def f(a): pass def main(): with f(0) as a, f(a) as b: x = a, b [out] MypyFile:1( FuncDef:1( f Args( Var(a)) Block:1( PassStmt:2())) FuncDef:3( main Block:3( WithStmt:4( Expr( CallExpr:4( NameExpr(f [__main__.f]) Args( IntExpr(0)))) Target( NameExpr(a* [l])) Expr( CallExpr:4( NameExpr(f [__main__.f]) Args( NameExpr(a [l])))) Target( NameExpr(b* [l])) Block:4( AssignmentStmt:5( NameExpr(x* [l]) TupleExpr:5( NameExpr(a [l]) NameExpr(b [l])))))))) mypy-0.560/test-data/unit/semanal-symtable.test0000644€tŠÔÚ€2›s®0000000354713215007206025705 0ustar jukkaDROPBOX\Domain Users00000000000000[case testEmptyFile] [out] -- Note that builtins are ignored to simplify output. __main__: SymbolTable() [case testVarDef] x = 1 [out] __main__: SymbolTable( x : Gdef/Var (__main__.x)) [case testFuncDef] def f(): pass [out] __main__: SymbolTable( f : Gdef/FuncDef (__main__.f)) [case testEmptyClassDef] class c: pass [out] __main__: SymbolTable( c : Gdef/TypeInfo (__main__.c)) [case testImport] import m [file m.py] x = 1 [out] __main__: SymbolTable( m : ModuleRef/MypyFile (m)) m: SymbolTable( x : Gdef/Var (m.x)) [case testImportFromModule] from m import x [file m.py] class x: pass y = 1 [out] __main__: SymbolTable( x : Gdef/TypeInfo (m.x)) m: SymbolTable( x : Gdef/TypeInfo (m.x) y : Gdef/Var (m.y)) [case testImportAs] from m import x as xx [file m.py] class x: pass y = 1 [out] __main__: SymbolTable( xx : Gdef/TypeInfo (m.x)) m: SymbolTable( x : Gdef/TypeInfo (m.x) y : Gdef/Var (m.y)) [case testFailingImports] from sys import non_existing1 # type: ignore from xyz import non_existing2 # type: ignore if int(): from sys import non_existing3 # type: ignore import non_existing4 # type: ignore [out] __main__: SymbolTable( non_existing1 : Gdef/Var (__main__.non_existing1) : Any non_existing2 : Gdef/Var (__main__.non_existing2) : Any non_existing3 : Gdef/Var (__main__.non_existing3) : Any non_existing4 : Gdef/Var (__main__.non_existing4) : Any) sys: SymbolTable( platform : Gdef/Var (sys.platform) version_info : Gdef/Var (sys.version_info)) [case testDecorator] from typing import Callable def dec(f: Callable[[], None]) -> Callable[[], None]: return f @dec def g() -> None: pass [out] __main__: SymbolTable( Callable : Gdef/Var (typing.Callable) dec : Gdef/FuncDef (__main__.dec) : def (f: def ()) -> def () g : Gdef/Decorator (__main__.g) : def ()) mypy-0.560/test-data/unit/semanal-typealiases.test0000644€tŠÔÚ€2›s®0000002071713215007206026406 0ustar jukkaDROPBOX\Domain Users00000000000000[case testListTypeAlias] from typing import List def f() -> List[int]: pass [builtins fixtures/list.pyi] [out] MypyFile:1( ImportFrom:1(typing, [List]) FuncDef:2( f def () -> builtins.list[builtins.int] Block:2( PassStmt:2()))) [case testDictTypeAlias] from typing import Dict def f() -> Dict[int, str]: pass [builtins fixtures/dict.pyi] [out] MypyFile:1( ImportFrom:1(typing, [Dict]) FuncDef:2( f def () -> builtins.dict[builtins.int, builtins.str] Block:2( PassStmt:2()))) [case testQualifiedTypeAlias] import typing def f() -> typing.List[int]: pass [builtins fixtures/list.pyi] [out] MypyFile:1( Import:1(typing) FuncDef:2( f def () -> builtins.list[builtins.int] Block:2( PassStmt:2()))) [case testTypeApplicationWithTypeAlias] from typing import List List[List[int]] [builtins fixtures/list.pyi] [out] MypyFile:1( ImportFrom:1(typing, [List]) ExpressionStmt:2( TypeApplication:2( NameExpr(List [builtins.list]) Types( builtins.list[builtins.int])))) [case testTypeApplicationWithQualifiedTypeAlias] import typing typing.List[typing.List[int]] [builtins fixtures/list.pyi] [out] MypyFile:1( Import:1(typing) ExpressionStmt:2( TypeApplication:2( MemberExpr:2( NameExpr(typing) List [builtins.list]) Types( builtins.list[builtins.int])))) [case testSimpleTypeAlias] import typing class A: pass A2 = A def f(x: A2) -> A: pass [out] MypyFile:1( Import:1(typing) ClassDef:2( A PassStmt:2()) AssignmentStmt:3( NameExpr(A2* [__main__.A2]) NameExpr(A [__main__.A])) FuncDef:4( f Args( Var(x)) def (x: __main__.A) -> __main__.A Block:4( PassStmt:4()))) [case testQualifiedSimpleTypeAlias] import typing import _m A2 = _m.A x = 1 # type: A2 [file _m.py] import typing class A: pass [out] MypyFile:1( Import:1(typing) Import:2(_m) AssignmentStmt:3( NameExpr(A2* [__main__.A2]) MemberExpr:3( NameExpr(_m) A [_m.A])) AssignmentStmt:4( NameExpr(x [__main__.x]) IntExpr(1) _m.A)) [case testUnionTypeAlias] from typing import Union U = Union[int, str] def f(x: U) -> None: pass [out] MypyFile:1( ImportFrom:1(typing, [Union]) AssignmentStmt:2( NameExpr(U* [__main__.U]) TypeAliasExpr(Union[builtins.int, builtins.str])) FuncDef:3( f Args( Var(x)) def (x: Union[builtins.int, builtins.str]) Block:3( PassStmt:3()))) [case testUnionTypeAlias2] from typing import Union class A: pass U = Union[int, A] def f(x: U) -> None: pass [out] MypyFile:1( ImportFrom:1(typing, [Union]) ClassDef:2( A PassStmt:2()) AssignmentStmt:3( NameExpr(U* [__main__.U]) TypeAliasExpr(Union[builtins.int, __main__.A])) FuncDef:4( f Args( Var(x)) def (x: Union[builtins.int, __main__.A]) Block:4( PassStmt:4()))) [case testUnionTypeAliasWithQualifiedUnion] import typing U = typing.Union[int, str] def f(x: U) -> None: pass [out] MypyFile:1( Import:1(typing) AssignmentStmt:2( NameExpr(U* [__main__.U]) TypeAliasExpr(Union[builtins.int, builtins.str])) FuncDef:3( f Args( Var(x)) def (x: Union[builtins.int, builtins.str]) Block:3( PassStmt:3()))) [case testTupleTypeAlias] from typing import Tuple T = Tuple[int, str] def f(x: T) -> None: pass [out] MypyFile:1( ImportFrom:1(typing, [Tuple]) AssignmentStmt:2( NameExpr(T* [__main__.T]) TypeAliasExpr(Tuple[builtins.int, builtins.str])) FuncDef:3( f Args( Var(x)) def (x: Tuple[builtins.int, builtins.str]) Block:3( PassStmt:3()))) [case testCallableTypeAlias] from typing import Callable C = Callable[[int], None] def f(x: C) -> None: pass [out] MypyFile:1( ImportFrom:1(typing, [Callable]) AssignmentStmt:2( NameExpr(C* [__main__.C]) TypeAliasExpr(def (builtins.int))) FuncDef:3( f Args( Var(x)) def (x: def (builtins.int)) Block:3( PassStmt:3()))) [case testGenericTypeAlias] from typing import Generic, TypeVar T = TypeVar('T') class G(Generic[T]): pass A = G[int] def f(x: A) -> None: pass [out] MypyFile:1( ImportFrom:1(typing, [Generic, TypeVar]) AssignmentStmt:2( NameExpr(T* [__main__.T]) TypeVarExpr:2()) ClassDef:3( G TypeVars( T) PassStmt:3()) AssignmentStmt:4( NameExpr(A* [__main__.A]) TypeAliasExpr(__main__.G[builtins.int])) FuncDef:5( f Args( Var(x)) def (x: __main__.G[builtins.int]) Block:5( PassStmt:5()))) [case testGenericTypeAlias2] from typing import List A = List[int] def f(x: A) -> None: pass [builtins fixtures/list.pyi] [out] MypyFile:1( ImportFrom:1(typing, [List]) AssignmentStmt:2( NameExpr(A* [__main__.A]) TypeAliasExpr(builtins.list[builtins.int])) FuncDef:3( f Args( Var(x)) def (x: builtins.list[builtins.int]) Block:3( PassStmt:3()))) [case testImportUnionTypeAlias] import typing from _m import U def f(x: U) -> None: pass [file _m.py] from typing import Union class A: pass U = Union[int, A] [out] MypyFile:1( Import:1(typing) ImportFrom:2(_m, [U]) FuncDef:3( f Args( Var(x)) def (x: Union[builtins.int, _m.A]) Block:3( PassStmt:3()))) [case testImportUnionTypeAlias2] import typing import _m def f(x: _m.U) -> None: pass [file _m.py] from typing import Union class A: pass U = Union[int, A] [out] MypyFile:1( Import:1(typing) Import:2(_m) FuncDef:3( f Args( Var(x)) def (x: Union[builtins.int, _m.A]) Block:3( PassStmt:3()))) [case testImportSimpleTypeAlias] import typing from _m import A def f(x: A) -> None: pass [file _m.py] import typing A = int [out] MypyFile:1( Import:1(typing) ImportFrom:2(_m, [A]) FuncDef:3( f Args( Var(x)) def (x: builtins.int) Block:3( PassStmt:3()))) [case testImportSimpleTypeAlias2] import typing import _m def f(x: _m.A) -> None: pass [file _m.py] import typing A = int [out] MypyFile:1( Import:1(typing) Import:2(_m) FuncDef:3( f Args( Var(x)) def (x: builtins.int) Block:3( PassStmt:3()))) [case testAnyTypeAlias] from typing import Any A = Any a = 1 # type: A [out] MypyFile:1( ImportFrom:1(typing, [Any]) AssignmentStmt:2( NameExpr(A* [__main__.A]) NameExpr(Any [typing.Any])) AssignmentStmt:3( NameExpr(a [__main__.a]) IntExpr(1) Any)) [case testAnyTypeAlias2] import typing A = typing.Any a = 1 # type: A [out] MypyFile:1( Import:1(typing) AssignmentStmt:2( NameExpr(A* [__main__.A]) MemberExpr:2( NameExpr(typing) Any [typing.Any])) AssignmentStmt:3( NameExpr(a [__main__.a]) IntExpr(1) Any)) [case testTypeAliasAlias] from typing import Union U = Union[int, str] U2 = U x = 1 # type: U2 [out] MypyFile:1( ImportFrom:1(typing, [Union]) AssignmentStmt:2( NameExpr(U* [__main__.U]) TypeAliasExpr(Union[builtins.int, builtins.str])) AssignmentStmt:3( NameExpr(U2* [__main__.U2]) NameExpr(U [__main__.U])) AssignmentStmt:4( NameExpr(x [__main__.x]) IntExpr(1) Union[builtins.int, builtins.str])) [case testTypeAliasOfImportedAlias] from typing import Union from _m import U U2 = U x = 1 # type: U2 [file _m.py] from typing import Union U = Union[int, str] [out] MypyFile:1( ImportFrom:1(typing, [Union]) ImportFrom:2(_m, [U]) AssignmentStmt:3( NameExpr(U2* [__main__.U2]) NameExpr(U [_m.U])) AssignmentStmt:4( NameExpr(x [__main__.x]) IntExpr(1) Union[builtins.int, builtins.str])) [case testListTypeDoesNotGenerateAlias] import typing A = [int, str] a = 1 # type: A # E: Invalid type "__main__.A" [case testCantUseStringLiteralAsTypeAlias] from typing import Union A = 'Union[int, str]' a = 1 # type: A # E: Invalid type "__main__.A" [case testStringLiteralTypeAsAliasComponent] from typing import Union A = Union['int', str] a = 1 # type: A [out] MypyFile:1( ImportFrom:1(typing, [Union]) AssignmentStmt:2( NameExpr(A* [__main__.A]) TypeAliasExpr(Union[builtins.int, builtins.str])) AssignmentStmt:3( NameExpr(a [__main__.a]) IntExpr(1) Union[builtins.int, builtins.str])) [case testComplexTypeAlias] from typing import Union, Tuple, Any A = Union['int', Tuple[int, Any]] a = 1 # type: A [out] MypyFile:1( ImportFrom:1(typing, [Union, Tuple, Any]) AssignmentStmt:2( NameExpr(A* [__main__.A]) TypeAliasExpr(Union[builtins.int, Tuple[builtins.int, Any]])) AssignmentStmt:3( NameExpr(a [__main__.a]) IntExpr(1) Union[builtins.int, Tuple[builtins.int, Any]])) mypy-0.560/test-data/unit/semanal-typeddict.test0000644€tŠÔÚ€2›s®0000000275213215007206026053 0ustar jukkaDROPBOX\Domain Users00000000000000-- Create Type -- TODO: Implement support for this syntax. --[case testCanCreateTypedDictTypeWithKeywordArguments] --from mypy_extensions import TypedDict --Point = TypedDict('Point', x=int, y=int) --[builtins fixtures/dict.pyi] --[out] --MypyFile:1( -- ImportFrom:1(mypy_extensions, [TypedDict]) -- AssignmentStmt:2( -- NameExpr(Point* [__main__.Point]) -- TypedDictExpr:2(Point))) -- TODO: Implement support for this syntax. --[case testCanCreateTypedDictTypeWithDictCall] --from mypy_extensions import TypedDict --Point = TypedDict('Point', dict(x=int, y=int)) --[builtins fixtures/dict.pyi] --[out] --MypyFile:1( -- ImportFrom:1(mypy_extensions, [TypedDict]) -- AssignmentStmt:2( -- NameExpr(Point* [__main__.Point]) -- TypedDictExpr:2(Point))) [case testCanCreateTypedDictTypeWithDictLiteral] from mypy_extensions import TypedDict Point = TypedDict('Point', {'x': int, 'y': int}) [builtins fixtures/dict.pyi] [out] MypyFile:1( ImportFrom:1(mypy_extensions, [TypedDict]) AssignmentStmt:2( NameExpr(Point* [__main__.Point]) TypedDictExpr:2(Point))) [case testTypedDictWithDocString] from mypy_extensions import TypedDict class A(TypedDict): """foo""" x: str [builtins fixtures/dict.pyi] [out] MypyFile:1( ImportFrom:1(mypy_extensions, [TypedDict]) ClassDef:2( A BaseTypeExpr( NameExpr(TypedDict [mypy_extensions.TypedDict])) ExpressionStmt:3( StrExpr(foo)) AssignmentStmt:4( NameExpr(x) TempNode:-1( Any) str?))) mypy-0.560/test-data/unit/semanal-typeinfo.test0000644€tŠÔÚ€2›s®0000000317513215007206025717 0ustar jukkaDROPBOX\Domain Users00000000000000[case testEmptyFile] [out] TypeInfoMap() [case testEmptyClass] class c: pass [out] TypeInfoMap( __main__.c : TypeInfo( Name(__main__.c) Bases(builtins.object) Mro(__main__.c, builtins.object) Names())) [case testClassWithMethod] class c: def f(self): pass [out] TypeInfoMap( __main__.c : TypeInfo( Name(__main__.c) Bases(builtins.object) Mro(__main__.c, builtins.object) Names( f))) [case testClassWithAttributes] class c: def __init__(self, x): self.y = x self.z = 1 [out] TypeInfoMap( __main__.c : TypeInfo( Name(__main__.c) Bases(builtins.object) Mro(__main__.c, builtins.object) Names( __init__ y z))) [case testBaseClass] class base: pass class c(base): pass [out] TypeInfoMap( __main__.base : TypeInfo( Name(__main__.base) Bases(builtins.object) Mro(__main__.base, builtins.object) Names()) __main__.c : TypeInfo( Name(__main__.c) Bases(__main__.base) Mro(__main__.c, __main__.base, builtins.object) Names())) [case testClassAndAbstractClass] from abc import abstractmethod, ABCMeta import typing class i(metaclass=ABCMeta): pass class c(i): pass [out] TypeInfoMap( __main__.c : TypeInfo( Name(__main__.c) Bases(__main__.i) Mro(__main__.c, __main__.i, builtins.object) Names()) __main__.i : TypeInfo( Name(__main__.i) Bases(builtins.object) Mro(__main__.i, builtins.object) Names())) [case testAttributeWithoutType] class A: a = A [out] TypeInfoMap( __main__.A : TypeInfo( Name(__main__.A) Bases(builtins.object) Mro(__main__.A, builtins.object) Names( a))) mypy-0.560/test-data/unit/semanal-types.test0000644€tŠÔÚ€2›s®0000007146413215007206025234 0ustar jukkaDROPBOX\Domain Users00000000000000[case testVarWithType] import typing class A: pass x = A() # type: A y = x [out] MypyFile:1( Import:1(typing) ClassDef:2( A PassStmt:2()) AssignmentStmt:3( NameExpr(x [__main__.x]) CallExpr:3( NameExpr(A [__main__.A]) Args()) __main__.A) AssignmentStmt:4( NameExpr(y* [__main__.y]) NameExpr(x [__main__.x]))) [case testLocalVarWithType] class A: pass def f(): x = None # type: A y = x [out] MypyFile:1( ClassDef:1( A PassStmt:1()) FuncDef:2( f Block:2( AssignmentStmt:3( NameExpr(x [l]) NameExpr(None [builtins.None]) __main__.A) AssignmentStmt:4( NameExpr(y* [l]) NameExpr(x [l]))))) [case testAnyType] from typing import Any x = None # type: Any y = x [out] MypyFile:1( ImportFrom:1(typing, [Any]) AssignmentStmt:2( NameExpr(x [__main__.x]) NameExpr(None [builtins.None]) Any) AssignmentStmt:3( NameExpr(y* [__main__.y]) NameExpr(x [__main__.x]))) [case testMemberVarWithType] import typing class A: def __init__(self): self.x = None # type: int [out] MypyFile:1( Import:1(typing) ClassDef:2( A FuncDef:3( __init__ Args( Var(self)) Block:3( AssignmentStmt:4( MemberExpr:4( NameExpr(self [l]) x) NameExpr(None [builtins.None]) builtins.int))))) [case testClassVarWithType] import typing class A: x = None # type: int x = 1 [out] MypyFile:1( Import:1(typing) ClassDef:2( A AssignmentStmt:3( NameExpr(x [m]) NameExpr(None [builtins.None]) builtins.int) AssignmentStmt:4( NameExpr(x [__main__.A.x]) IntExpr(1)))) [case testFunctionSig] from typing import Any class A: pass def f(x: A) -> A: pass def g(x: Any, y: A) -> None: z = x, y [out] MypyFile:1( ImportFrom:1(typing, [Any]) ClassDef:2( A PassStmt:2()) FuncDef:3( f Args( Var(x)) def (x: __main__.A) -> __main__.A Block:3( PassStmt:3())) FuncDef:4( g Args( Var(x) Var(y)) def (x: Any, y: __main__.A) Block:4( AssignmentStmt:5( NameExpr(z* [l]) TupleExpr:5( NameExpr(x [l]) NameExpr(y [l])))))) [case testBaseclass] class A: pass class B(A): pass [out] MypyFile:1( ClassDef:1( A PassStmt:1()) ClassDef:2( B BaseType( __main__.A) PassStmt:2())) [case testMultipleVarDef] class A: pass class B: pass a, b = None, None # type: (A, B) x = a, b [out] MypyFile:1( ClassDef:2( A PassStmt:2()) ClassDef:3( B PassStmt:3()) AssignmentStmt:4( TupleExpr:4( NameExpr(a [__main__.a]) NameExpr(b [__main__.b])) TupleExpr:4( NameExpr(None [builtins.None]) NameExpr(None [builtins.None])) Tuple[__main__.A, __main__.B]) AssignmentStmt:5( NameExpr(x* [__main__.x]) TupleExpr:5( NameExpr(a [__main__.a]) NameExpr(b [__main__.b])))) [case testGenericType] from typing import TypeVar, Generic, Any t = TypeVar('t') class A(Generic[t]): pass class B: pass x = None # type: A[B] y = None # type: A[Any] [out] MypyFile:1( ImportFrom:1(typing, [TypeVar, Generic, Any]) AssignmentStmt:3( NameExpr(t* [__main__.t]) TypeVarExpr:3()) ClassDef:5( A TypeVars( t) PassStmt:5()) ClassDef:6( B PassStmt:6()) AssignmentStmt:7( NameExpr(x [__main__.x]) NameExpr(None [builtins.None]) __main__.A[__main__.B]) AssignmentStmt:8( NameExpr(y [__main__.y]) NameExpr(None [builtins.None]) __main__.A[Any])) [case testGenericType2] from typing import TypeVar, Generic, Any t = TypeVar('t') s = TypeVar('s') class A(Generic[t, s]): pass class B: pass x = None # type: A[B, Any] [out] MypyFile:1( ImportFrom:1(typing, [TypeVar, Generic, Any]) AssignmentStmt:2( NameExpr(t* [__main__.t]) TypeVarExpr:2()) AssignmentStmt:3( NameExpr(s* [__main__.s]) TypeVarExpr:3()) ClassDef:4( A TypeVars( t s) PassStmt:4()) ClassDef:5( B PassStmt:5()) AssignmentStmt:6( NameExpr(x [__main__.x]) NameExpr(None [builtins.None]) __main__.A[__main__.B, Any])) [case testAssignmentAfterDef] class A: pass a = None # type: A a = 1 def f(): b = None # type: A b = 1 [out] MypyFile:1( ClassDef:3( A PassStmt:3()) AssignmentStmt:4( NameExpr(a [__main__.a]) NameExpr(None [builtins.None]) __main__.A) AssignmentStmt:5( NameExpr(a [__main__.a]) IntExpr(1)) FuncDef:6( f Block:6( AssignmentStmt:7( NameExpr(b [l]) NameExpr(None [builtins.None]) __main__.A) AssignmentStmt:8( NameExpr(b [l]) IntExpr(1))))) [case testCast] from typing import TypeVar, Generic, Any, cast t = TypeVar('t') class c: pass class d(Generic[t]): pass cast(Any, 1) cast(c, 1) cast(d[c], c) [out] MypyFile:1( ImportFrom:1(typing, [TypeVar, Generic, Any, cast]) AssignmentStmt:2( NameExpr(t* [__main__.t]) TypeVarExpr:2()) ClassDef:3( c PassStmt:3()) ClassDef:4( d TypeVars( t) PassStmt:4()) ExpressionStmt:5( CastExpr:5( IntExpr(1) Any)) ExpressionStmt:6( CastExpr:6( IntExpr(1) __main__.c)) ExpressionStmt:7( CastExpr:7( NameExpr(c [__main__.c]) __main__.d[__main__.c]))) [case testCastToQualifiedTypeAndCast] import typing import _m typing.cast(_m.C, object) [file _m.py] class C: pass [out] MypyFile:1( Import:1(typing) Import:2(_m) ExpressionStmt:3( CastExpr:3( NameExpr(object [builtins.object]) _m.C))) [case testLongQualifiedCast] import typing import _m._n typing.cast(_m._n.C, object) [file _m/__init__.py] [file _m/_n.py] class C: pass [out] MypyFile:1( Import:1(typing) Import:2(_m._n) ExpressionStmt:3( CastExpr:3( NameExpr(object [builtins.object]) _m._n.C))) [case testCastTargetWithTwoTypeArgs] from typing import TypeVar, Generic, cast t = TypeVar('t') s = TypeVar('s') class C(Generic[t, s]): pass cast(C[str, int], C) [out] MypyFile:1( ImportFrom:1(typing, [TypeVar, Generic, cast]) AssignmentStmt:2( NameExpr(t* [__main__.t]) TypeVarExpr:2()) AssignmentStmt:3( NameExpr(s* [__main__.s]) TypeVarExpr:3()) ClassDef:4( C TypeVars( t s) PassStmt:4()) ExpressionStmt:5( CastExpr:5( NameExpr(C [__main__.C]) __main__.C[builtins.str, builtins.int]))) [case testCastToTupleType] from typing import Tuple, cast cast(Tuple[int, str], None) [out] MypyFile:1( ImportFrom:1(typing, [Tuple, cast]) ExpressionStmt:2( CastExpr:2( NameExpr(None [builtins.None]) Tuple[builtins.int, builtins.str]))) [case testCastToFunctionType] from typing import Callable, cast cast(Callable[[int], str], None) [out] MypyFile:1( ImportFrom:1(typing, [Callable, cast]) ExpressionStmt:2( CastExpr:2( NameExpr(None [builtins.None]) def (builtins.int) -> builtins.str))) [case testCastToStringLiteralType] from typing import cast cast('int', 1) [out] MypyFile:1( ImportFrom:1(typing, [cast]) ExpressionStmt:2( CastExpr:2( IntExpr(1) builtins.int))) [case testFunctionTypeVariable] from typing import TypeVar t = TypeVar('t') def f(x: t) -> None: y = None # type: t [out] MypyFile:1( ImportFrom:1(typing, [TypeVar]) AssignmentStmt:2( NameExpr(t* [__main__.t]) TypeVarExpr:2()) FuncDef:3( f Args( Var(x)) def [t] (x: t`-1) Block:3( AssignmentStmt:4( NameExpr(y [l]) NameExpr(None [builtins.None]) t`-1)))) [case testTwoFunctionTypeVariables] from typing import TypeVar t = TypeVar('t') u = TypeVar('u') def f(x: t, y: u, z: t) -> None: pass [out] MypyFile:1( ImportFrom:1(typing, [TypeVar]) AssignmentStmt:2( NameExpr(t* [__main__.t]) TypeVarExpr:2()) AssignmentStmt:3( NameExpr(u* [__main__.u]) TypeVarExpr:3()) FuncDef:4( f Args( Var(x) Var(y) Var(z)) def [t, u] (x: t`-1, y: u`-2, z: t`-1) Block:4( PassStmt:4()))) [case testNestedGenericFunctionTypeVariable] from typing import TypeVar, Generic t = TypeVar('t') class A(Generic[t]): pass def f(x: A[t], y) -> None: pass [out] MypyFile:1( ImportFrom:1(typing, [TypeVar, Generic]) AssignmentStmt:2( NameExpr(t* [__main__.t]) TypeVarExpr:2()) ClassDef:3( A TypeVars( t) PassStmt:3()) FuncDef:4( f Args( Var(x) Var(y)) def [t] (x: __main__.A[t`-1], y: Any) Block:4( PassStmt:4()))) [case testNestedGenericFunctionTypeVariable2] from typing import TypeVar, Tuple, Generic t = TypeVar('t') class A(Generic[t]): pass def f(x: Tuple[int, t]) -> None: pass [out] MypyFile:1( ImportFrom:1(typing, [TypeVar, Tuple, Generic]) AssignmentStmt:2( NameExpr(t* [__main__.t]) TypeVarExpr:2()) ClassDef:3( A TypeVars( t) PassStmt:3()) FuncDef:4( f Args( Var(x)) def [t] (x: Tuple[builtins.int, t`-1]) Block:4( PassStmt:4()))) [case testNestedGenericFunctionTypeVariable3] from typing import TypeVar, Callable, Generic t = TypeVar('t') class A(Generic[t]): pass def f(x: Callable[[int, t], int]) -> None: pass [out] MypyFile:1( ImportFrom:1(typing, [TypeVar, Callable, Generic]) AssignmentStmt:2( NameExpr(t* [__main__.t]) TypeVarExpr:2()) ClassDef:3( A TypeVars( t) PassStmt:3()) FuncDef:4( f Args( Var(x)) def [t] (x: def (builtins.int, t`-1) -> builtins.int) Block:4( PassStmt:4()))) [case testNestedGenericFunctionTypeVariable4] from typing import TypeVar, Callable, Generic t = TypeVar('t') class A(Generic[t]): pass def f(x: Callable[[], t]) -> None: pass [out] MypyFile:1( ImportFrom:1(typing, [TypeVar, Callable, Generic]) AssignmentStmt:2( NameExpr(t* [__main__.t]) TypeVarExpr:2()) ClassDef:3( A TypeVars( t) PassStmt:3()) FuncDef:4( f Args( Var(x)) def [t] (x: def () -> t`-1) Block:4( PassStmt:4()))) [case testGenericFunctionTypeVariableInReturnType] from typing import TypeVar t = TypeVar('t') def f() -> t: pass [out] MypyFile:1( ImportFrom:1(typing, [TypeVar]) AssignmentStmt:2( NameExpr(t* [__main__.t]) TypeVarExpr:2()) FuncDef:3( f def [t] () -> t`-1 Block:3( PassStmt:3()))) [case testSelfType] class A: def f(self, o: object) -> None: pass [out] MypyFile:1( ClassDef:1( A FuncDef:2( f Args( Var(self) Var(o)) def (self: __main__.A, o: builtins.object) Block:2( PassStmt:2())))) [case testNestedGenericFunction] from typing import TypeVar t = TypeVar('t') def f() -> None: def g() -> t: pass [out] MypyFile:1( ImportFrom:1(typing, [TypeVar]) AssignmentStmt:2( NameExpr(t* [__main__.t]) TypeVarExpr:2()) FuncDef:3( f def () Block:3( FuncDef:4( g def [t] () -> t`-1 Block:4( PassStmt:4()))))) [case testClassTvar] from typing import TypeVar, Generic t = TypeVar('t') class c(Generic[t]): def f(self) -> t: pass [out] MypyFile:1( ImportFrom:1(typing, [TypeVar, Generic]) AssignmentStmt:3( NameExpr(t* [__main__.t]) TypeVarExpr:3()) ClassDef:5( c TypeVars( t) FuncDef:6( f Args( Var(self)) def (self: __main__.c[t`1]) -> t`1 Block:6( PassStmt:6())))) [case testClassTvar2] from typing import TypeVar, Generic t = TypeVar('t') s = TypeVar('s') class c(Generic[t, s]): def f(self, x: s) -> t: pass [out] MypyFile:1( ImportFrom:1(typing, [TypeVar, Generic]) AssignmentStmt:3( NameExpr(t* [__main__.t]) TypeVarExpr:3()) AssignmentStmt:4( NameExpr(s* [__main__.s]) TypeVarExpr:4()) ClassDef:6( c TypeVars( t s) FuncDef:7( f Args( Var(self) Var(x)) def (self: __main__.c[t`1, s`2], x: s`2) -> t`1 Block:7( PassStmt:7())))) [case testGenericBaseClass] from typing import TypeVar, Generic t = TypeVar('t') class d(Generic[t]): pass class c(d[t], Generic[t]): pass [out] MypyFile:1( ImportFrom:1(typing, [TypeVar, Generic]) AssignmentStmt:2( NameExpr(t* [__main__.t]) TypeVarExpr:2()) ClassDef:3( d TypeVars( t) PassStmt:3()) ClassDef:4( c TypeVars( t) BaseType( __main__.d[t`1]) PassStmt:4())) [case testTupleType] from typing import Tuple t = None # type: tuple t1 = None # type: Tuple[object] t2 = None # type: Tuple[int, object] [builtins fixtures/tuple.pyi] [out] MypyFile:1( ImportFrom:1(typing, [Tuple]) AssignmentStmt:2( NameExpr(t [__main__.t]) NameExpr(None [builtins.None]) builtins.tuple[Any]) AssignmentStmt:3( NameExpr(t1 [__main__.t1]) NameExpr(None [builtins.None]) Tuple[builtins.object]) AssignmentStmt:4( NameExpr(t2 [__main__.t2]) NameExpr(None [builtins.None]) Tuple[builtins.int, builtins.object])) [case testVariableLengthTuple] from typing import Tuple t = None # type: Tuple[int, ...] [builtins fixtures/tuple.pyi] [out] MypyFile:1( ImportFrom:1(typing, [Tuple]) AssignmentStmt:2( NameExpr(t [__main__.t]) NameExpr(None [builtins.None]) builtins.tuple[builtins.int])) [case testInvalidTupleType] from typing import Tuple t = None # type: Tuple[int, str, ...] # E: Unexpected '...' [out] [case testFunctionTypes] from typing import Callable f = None # type: Callable[[object, int], str] g = None # type: Callable[[], None] [out] MypyFile:1( ImportFrom:1(typing, [Callable]) AssignmentStmt:2( NameExpr(f [__main__.f]) NameExpr(None [builtins.None]) def (builtins.object, builtins.int) -> builtins.str) AssignmentStmt:3( NameExpr(g [__main__.g]) NameExpr(None [builtins.None]) def ())) [case testOverloadedFunction] from typing import overload, Any @overload def f(a: object) -> int: a @overload def f(a: str) -> object: a def f(a: Any) -> Any: return a [out] MypyFile:1( ImportFrom:1(typing, [overload, Any]) OverloadedFuncDef:2( FuncDef:7( f Args( Var(a)) def (a: Any) -> Any Block:7( ReturnStmt:7( NameExpr(a [l])))) Overload(def (a: builtins.object) -> builtins.int, \ def (a: builtins.str) -> builtins.object) Decorator:2( Var(f) NameExpr(overload [typing.overload]) FuncDef:3( f Args( Var(a)) def (a: builtins.object) -> builtins.int Block:3( ExpressionStmt:3( NameExpr(a [l]))))) Decorator:4( Var(f) NameExpr(overload [typing.overload]) FuncDef:5( f Args( Var(a)) def (a: builtins.str) -> builtins.object Block:5( ExpressionStmt:5( NameExpr(a [l]))))))) [case testReferenceToOverloadedFunction] from typing import overload @overload def f() -> None: pass @overload def f(x: int) -> None: pass def f(*args) -> None: pass x = f [out] MypyFile:1( ImportFrom:1(typing, [overload]) OverloadedFuncDef:2( FuncDef:7( f def (*args: Any) VarArg( Var(args)) Block:7( PassStmt:7())) Overload(def (), def (x: builtins.int)) Decorator:2( Var(f) NameExpr(overload [typing.overload]) FuncDef:3( f def () Block:3( PassStmt:3()))) Decorator:4( Var(f) NameExpr(overload [typing.overload]) FuncDef:5( f Args( Var(x)) def (x: builtins.int) Block:5( PassStmt:5())))) AssignmentStmt:9( NameExpr(x* [__main__.x]) NameExpr(f [__main__.f]))) [case testNestedOverloadedFunction] from typing import overload def f(): @overload def g(): pass @overload def g(x): pass def g(*args): pass y = g [out] MypyFile:1( ImportFrom:1(typing, [overload]) FuncDef:2( f Block:2( OverloadedFuncDef:3( FuncDef:8( g VarArg( Var(args)) Block:8( PassStmt:8())) Overload(def () -> Any, def (x: Any) -> Any) Decorator:3( Var(g) NameExpr(overload [typing.overload]) FuncDef:4( g Block:4( PassStmt:4()))) Decorator:5( Var(g) NameExpr(overload [typing.overload]) FuncDef:6( g Args( Var(x)) Block:6( PassStmt:6())))) AssignmentStmt:10( NameExpr(y* [l]) NameExpr(g [l]))))) [case testImplicitGenericTypeArgs] from typing import TypeVar, Generic t = TypeVar('t') s = TypeVar('s') class A(Generic[t, s]): pass x = None # type: A [out] MypyFile:1( ImportFrom:1(typing, [TypeVar, Generic]) AssignmentStmt:2( NameExpr(t* [__main__.t]) TypeVarExpr:2()) AssignmentStmt:3( NameExpr(s* [__main__.s]) TypeVarExpr:3()) ClassDef:4( A TypeVars( t s) PassStmt:4()) AssignmentStmt:5( NameExpr(x [__main__.x]) NameExpr(None [builtins.None]) __main__.A[Any, Any])) [case testImplicitTypeArgsAndGenericBaseClass] from typing import TypeVar, Generic t = TypeVar('t') s = TypeVar('s') class B(Generic[s]): pass class A(B, Generic[t]): pass [out] MypyFile:1( ImportFrom:1(typing, [TypeVar, Generic]) AssignmentStmt:2( NameExpr(t* [__main__.t]) TypeVarExpr:2()) AssignmentStmt:3( NameExpr(s* [__main__.s]) TypeVarExpr:3()) ClassDef:4( B TypeVars( s) PassStmt:4()) ClassDef:5( A TypeVars( t) BaseType( __main__.B[Any]) PassStmt:5())) [case testTypeApplication] from typing import TypeVar, Generic t = TypeVar('t') class A(Generic[t]): pass x = A[int]() [out] MypyFile:1( ImportFrom:1(typing, [TypeVar, Generic]) AssignmentStmt:2( NameExpr(t* [__main__.t]) TypeVarExpr:2()) ClassDef:3( A TypeVars( t) PassStmt:3()) AssignmentStmt:4( NameExpr(x* [__main__.x]) CallExpr:4( TypeApplication:4( NameExpr(A [__main__.A]) Types( builtins.int)) Args()))) [case testTypeApplicationWithTwoTypeArgs] from typing import TypeVar, Generic, Any t = TypeVar('t') s = TypeVar('s') class A(Generic[t, s]): pass x = A[int, Any]() [out] MypyFile:1( ImportFrom:1(typing, [TypeVar, Generic, Any]) AssignmentStmt:2( NameExpr(t* [__main__.t]) TypeVarExpr:2()) AssignmentStmt:3( NameExpr(s* [__main__.s]) TypeVarExpr:3()) ClassDef:4( A TypeVars( t s) PassStmt:4()) AssignmentStmt:5( NameExpr(x* [__main__.x]) CallExpr:5( TypeApplication:5( NameExpr(A [__main__.A]) Types( builtins.int Any)) Args()))) [case testFunctionTypeApplication] from typing import TypeVar t = TypeVar('t') def f(x: t) -> None: pass f[int](1) [out] MypyFile:1( ImportFrom:1(typing, [TypeVar]) AssignmentStmt:2( NameExpr(t* [__main__.t]) TypeVarExpr:2()) FuncDef:3( f Args( Var(x)) def [t] (x: t`-1) Block:3( PassStmt:3())) ExpressionStmt:4( CallExpr:4( TypeApplication:4( NameExpr(f [__main__.f]) Types( builtins.int)) Args( IntExpr(1))))) [case testTypeApplicationWithStringLiteralType] from typing import TypeVar, Generic t = TypeVar('t') class A(Generic[t]): pass A['int']() [out] MypyFile:1( ImportFrom:1(typing, [TypeVar, Generic]) AssignmentStmt:2( NameExpr(t* [__main__.t]) TypeVarExpr:2()) ClassDef:3( A TypeVars( t) PassStmt:3()) ExpressionStmt:4( CallExpr:4( TypeApplication:4( NameExpr(A [__main__.A]) Types( builtins.int)) Args()))) [case testVarArgsAndKeywordArgs] def g(*x: int, y: str = ''): pass [out] MypyFile:1( FuncDef:1( g MaxPos(0) Args( default( Var(y) StrExpr())) def (*x: builtins.int, *, y: builtins.str =) -> Any VarArg( Var(x)) Block:1( PassStmt:1()))) [case testQualifiedGeneric] from typing import TypeVar import typing T = TypeVar('T') class A(typing.Generic[T]): pass [out] MypyFile:1( ImportFrom:1(typing, [TypeVar]) Import:2(typing) AssignmentStmt:3( NameExpr(T* [__main__.T]) TypeVarExpr:3()) ClassDef:4( A TypeVars( T) PassStmt:4())) [case testQualifiedTypevar] import typing T = typing.TypeVar('T') def f(x: T) -> T: pass [out] MypyFile:1( Import:1(typing) AssignmentStmt:2( NameExpr(T* [__main__.T]) TypeVarExpr:2()) FuncDef:3( f Args( Var(x)) def [T] (x: T`-1) -> T`-1 Block:3( PassStmt:3()))) [case testAliasedTypevar] from typing import TypeVar as tv T = tv('T') def f(x: T) -> T: pass [out] MypyFile:1( ImportFrom:1(typing, [TypeVar : tv]) AssignmentStmt:2( NameExpr(T* [__main__.T]) TypeVarExpr:2()) FuncDef:3( f Args( Var(x)) def [T] (x: T`-1) -> T`-1 Block:3( PassStmt:3()))) [case testLocalTypevar] from typing import TypeVar def f(): T = TypeVar('T') def g(x: T) -> T: pass [out] MypyFile:1( ImportFrom:1(typing, [TypeVar]) FuncDef:2( f Block:2( AssignmentStmt:3( NameExpr(T* [l]) TypeVarExpr:3()) FuncDef:4( g Args( Var(x)) def [T] (x: T`-1) -> T`-1 Block:4( PassStmt:4()))))) [case testClassLevelTypevar] from typing import TypeVar class A: T = TypeVar('T') def g(self, x: T) -> T: pass [out] MypyFile:1( ImportFrom:1(typing, [TypeVar]) ClassDef:2( A AssignmentStmt:3( NameExpr(T* [m]) TypeVarExpr:3()) FuncDef:4( g Args( Var(self) Var(x)) def [T] (self: __main__.A, x: T`-1) -> T`-1 Block:4( PassStmt:4())))) [case testImportTypevar] from typing import Generic from _m import T class A(Generic[T]): y = None # type: T [file _m.py] from typing import TypeVar T = TypeVar('T') [out] MypyFile:1( ImportFrom:1(typing, [Generic]) ImportFrom:2(_m, [T]) ClassDef:3( A TypeVars( T) AssignmentStmt:4( NameExpr(y [m]) NameExpr(None [builtins.None]) T`1))) [case testQualifiedReferenceToTypevarInClass] from typing import Generic import _m class A(Generic[_m.T]): a = None # type: _m.T def f(self, x: _m.T): b = None # type: _m.T [file _m.py] from typing import TypeVar T = TypeVar('T') [out] MypyFile:1( ImportFrom:1(typing, [Generic]) Import:2(_m) ClassDef:3( A TypeVars( _m.T) AssignmentStmt:4( NameExpr(a [m]) NameExpr(None [builtins.None]) _m.T`1) FuncDef:5( f Args( Var(self) Var(x)) def (self: __main__.A[_m.T`1], x: _m.T`1) -> Any Block:5( AssignmentStmt:6( NameExpr(b [l]) NameExpr(None [builtins.None]) _m.T`1))))) [case testQualifiedReferenceToTypevarInFunctionSignature] import _m def f(x: _m.T) -> None: a = None # type: _m.T [file _m.py] from typing import TypeVar T = TypeVar('T') [out] MypyFile:1( Import:1(_m) FuncDef:2( f Args( Var(x)) def [_m.T] (x: _m.T`-1) Block:2( AssignmentStmt:3( NameExpr(a [l]) NameExpr(None [builtins.None]) _m.T`-1)))) [case testFunctionCommentAnnotation] from typing import Any def f(x): # type: (int) -> Any x = 1 [out] MypyFile:1( ImportFrom:1(typing, [Any]) FuncDef:2( f Args( Var(x)) def (x: builtins.int) -> Any Block:2( AssignmentStmt:3( NameExpr(x [l]) IntExpr(1))))) [case testMethodCommentAnnotation] import typing class A: def f(self, x): # type: (int) -> str x = 1 [out] MypyFile:1( Import:1(typing) ClassDef:2( A FuncDef:3( f Args( Var(self) Var(x)) def (self: __main__.A, x: builtins.int) -> builtins.str Block:3( AssignmentStmt:4( NameExpr(x [l]) IntExpr(1)))))) [case testTypevarWithValues] from typing import TypeVar, Any T = TypeVar('T', int, str) S = TypeVar('S', Any, int, str) [out] MypyFile:1( ImportFrom:1(typing, [TypeVar, Any]) AssignmentStmt:2( NameExpr(T* [__main__.T]) TypeVarExpr:2( Values( builtins.int builtins.str))) AssignmentStmt:3( NameExpr(S* [__main__.S]) TypeVarExpr:3( Values( Any builtins.int builtins.str)))) [case testTypevarWithValuesAndVariance] from typing import TypeVar T = TypeVar('T', int, str, covariant=True) [builtins fixtures/bool.pyi] [out] MypyFile:1( ImportFrom:1(typing, [TypeVar]) AssignmentStmt:2( NameExpr(T* [__main__.T]) TypeVarExpr:2( Variance(COVARIANT) Values( builtins.int builtins.str)))) [case testTypevarWithBound] from typing import TypeVar T = TypeVar('T', bound=int) [out] MypyFile:1( ImportFrom:1(typing, [TypeVar]) AssignmentStmt:2( NameExpr(T* [__main__.T]) TypeVarExpr:2( UpperBound(builtins.int)))) [case testGenericFunctionWithValueSet] from typing import TypeVar T = TypeVar('T', int, str) def f(x: T) -> T: pass [out] MypyFile:1( ImportFrom:1(typing, [TypeVar]) AssignmentStmt:2( NameExpr(T* [__main__.T]) TypeVarExpr:2( Values( builtins.int builtins.str))) FuncDef:3( f Args( Var(x)) def [T in (builtins.int, builtins.str)] (x: T`-1) -> T`-1 Block:3( PassStmt:3()))) [case testGenericClassWithValueSet] from typing import TypeVar, Generic T = TypeVar('T', int, str) class C(Generic[T]): pass [out] MypyFile:1( ImportFrom:1(typing, [TypeVar, Generic]) AssignmentStmt:2( NameExpr(T* [__main__.T]) TypeVarExpr:2( Values( builtins.int builtins.str))) ClassDef:3( C TypeVars( T in (builtins.int, builtins.str)) PassStmt:3())) [case testGenericFunctionWithBound] from typing import TypeVar T = TypeVar('T', bound=int) def f(x: T) -> T: pass [out] MypyFile:1( ImportFrom:1(typing, [TypeVar]) AssignmentStmt:2( NameExpr(T* [__main__.T]) TypeVarExpr:2( UpperBound(builtins.int))) FuncDef:3( f Args( Var(x)) def [T <: builtins.int] (x: T`-1) -> T`-1 Block:3( PassStmt:3()))) [case testGenericClassWithBound] from typing import TypeVar, Generic T = TypeVar('T', bound=int) class C(Generic[T]): pass [out] MypyFile:1( ImportFrom:1(typing, [TypeVar, Generic]) AssignmentStmt:2( NameExpr(T* [__main__.T]) TypeVarExpr:2( UpperBound(builtins.int))) ClassDef:3( C TypeVars( T <: builtins.int) PassStmt:3())) [case testSimpleDucktypeDecorator] from typing import _promote @_promote(str) class S: pass [out] MypyFile:1( ImportFrom:1(typing, [_promote]) ClassDef:2( S Promote(builtins.str) Decorators( PromoteExpr:2(builtins.str)) PassStmt:3())) [case testUnionType] from typing import Union def f(x: Union[int, str]) -> None: pass [out] MypyFile:1( ImportFrom:1(typing, [Union]) FuncDef:2( f Args( Var(x)) def (x: Union[builtins.int, builtins.str]) Block:2( PassStmt:2()))) [case testUnionTypeWithNoneItem] from typing import Union def f(x: Union[int, None]) -> None: pass [out] MypyFile:1( ImportFrom:1(typing, [Union]) FuncDef:2( f Args( Var(x)) def (x: Union[builtins.int, builtins.None]) Block:2( PassStmt:2()))) [case testUnionTypeWithNoneItemAndTwoItems] from typing import Union def f(x: Union[int, None, str]) -> None: pass [out] MypyFile:1( ImportFrom:1(typing, [Union]) FuncDef:2( f Args( Var(x)) def (x: Union[builtins.int, builtins.None, builtins.str]) Block:2( PassStmt:2()))) [case testUnionTypeWithSingleItem] from typing import Union def f(x: Union[int]) -> None: pass [out] MypyFile:1( ImportFrom:1(typing, [Union]) FuncDef:2( f Args( Var(x)) def (x: builtins.int) Block:2( PassStmt:2()))) [case testOptionalTypes] from typing import Optional x = 1 # type: Optional[int] [out] MypyFile:1( ImportFrom:1(typing, [Optional]) AssignmentStmt:2( NameExpr(x [__main__.x]) IntExpr(1) Union[builtins.int, builtins.None])) [case testInvalidOptionalType] from typing import Optional x = 1 # type: Optional[int, str] # E: Optional[...] must have exactly one type argument y = 1 # type: Optional # E: Optional[...] must have exactly one type argument [out] [case testCoAndContravariantTypeVar] from typing import TypeVar T = TypeVar('T', covariant=True) S = TypeVar('S', contravariant=True) [builtins fixtures/bool.pyi] [out] MypyFile:1( ImportFrom:1(typing, [TypeVar]) AssignmentStmt:2( NameExpr(T* [__main__.T]) TypeVarExpr:2( Variance(COVARIANT))) AssignmentStmt:3( NameExpr(S* [__main__.S]) TypeVarExpr:3( Variance(CONTRAVARIANT)))) [case testTupleExpressionAsType] def f(x: (int, int)) -> None: pass [out] main:1: error: Invalid tuple literal type [case tesQualifiedTypeNameBasedOnAny] from typing import Any x = 0 # type: Any z = 0 # type: x.y [out] MypyFile:1( ImportFrom:1(typing, [Any]) AssignmentStmt:2( NameExpr(x [__main__.x]) IntExpr(0) Any) AssignmentStmt:3( NameExpr(z [__main__.z]) IntExpr(0) Any)) mypy-0.560/test-data/unit/stubgen.test0000644€tŠÔÚ€2›s®0000002774313215007206024122 0ustar jukkaDROPBOX\Domain Users00000000000000[case testEmptyFile] [out] [case testSingleFunction] def f(): x = 1 [out] def f(): ... [case testTwoFunctions] def f(a, b): x = 1 def g(arg): pass [out] def f(a, b): ... def g(arg): ... [case testDefaultArgInt] def f(a, b=2): ... def g(b=-1, c=0): ... [out] def f(a, b: int = ...): ... def g(b: int = ..., c: int = ...): ... [case testDefaultArgNone] def f(x=None): ... [out] from typing import Any, Optional def f(x: Optional[Any] = ...): ... [case testDefaultArgBool] def f(x=True, y=False): ... [out] def f(x: bool = ..., y: bool = ...): ... [case testDefaultArgStr] def f(x='foo'): ... [out] def f(x: str = ...): ... [case testDefaultArgBytes] def f(x=b'foo'): ... [out] def f(x: bytes = ...): ... [case testDefaultArgFloat] def f(x=1.2): ... [out] def f(x: float = ...): ... [case testDefaultArgOther] def f(x=ord): ... [out] from typing import Any def f(x: Any = ...): ... [case testPreserveFunctionAnnotation] def f(x: Foo) -> Bar: ... [out] def f(x: Foo) -> Bar: ... [case testPreserveVarAnnotation] x: Foo [out] x: Foo [case testPreserveVarAnnotationWithoutQuotes] x: 'Foo' [out] x: Foo [case testVarArgs] def f(x, *y): ... [out] def f(x, *y): ... [case testKwVarArgs] def f(x, **y): ... [out] def f(x, **y): ... [case testClass] class A: def f(self, x): x = 1 def g(): ... [out] class A: def f(self, x): ... def g(): ... [case testVariable] x = 1 [out] x: int [case testAnnotatedVariable] x: int = 1 [out] x: int [case testAnnotatedVariableGeneric] x: Foo[int, str] = ... [out] x: Foo[int, str] [case testAnnotatedVariableOldSyntax] x = 1 # type: int [out] x: int [case testAnnotatedVariableNone] x: None [out] x: None [case testAnnotatedVariableNoneOldSyntax] x = None # type: None [out] x: None [case testMultipleVariable] x = y = 1 [out] x: int y: int [case testClassVariable] class C: x = 1 [out] class C: x: int = ... [case testSelfAssignment] class C: def __init__(self): self.x = 1 x.y = 2 [out] class C: x: int = ... def __init__(self) -> None: ... [case testSelfAndClassBodyAssignment] x = 1 class C: x = 1 def __init__(self): self.x = 1 self.x = 1 [out] x: int class C: x: int = ... def __init__(self) -> None: ... [case testEmptyClass] class A: ... [out] class A: ... [case testSkipPrivateFunction] def _f(): ... def g(): ... [out] def g(): ... [case testIncludePrivateFunction] # flags: --include-private def _f(): ... def g(): ... [out] def _f(): ... def g(): ... [case testSkipPrivateMethod] class A: def _f(self): ... [out] class A: ... [case testIncludePrivateMethod] # flags: --include-private class A: def _f(self): ... [out] class A: def _f(self): ... [case testSkipPrivateVar] _x = 1 class A: _y = 1 [out] class A: ... [case testIncludePrivateVar] # flags: --include-private _x = 1 class A: _y = 1 [out] _x: int class A: _y: int = ... [case testSpecialInternalVar] __all__ = [] __author__ = '' __version__ = '' [out] [case testBaseClass] class A: ... class B(A): ... [out] class A: ... class B(A): ... [case testDecoratedFunction] @decorator def foo(x): ... [out] def foo(x): ... [case testMultipleAssignment] x, y = 1, 2 [out] from typing import Any x: Any y: Any [case testMultipleAssignmentAnnotated] x, y = 1, "2" # type: int, str [out] x: int y: str [case testMultipleAssignment2] [x, y] = 1, 2 [out] from typing import Any x: Any y: Any [case testKeywordOnlyArg] def f(x, *, y=1): ... def g(x, *, y=1, z=2): ... [out] def f(x, *, y: int = ...): ... def g(x, *, y: int = ..., z: int = ...): ... [case testProperty] class A: @property def f(self): return 1 @f.setter def f(self, x): ... [out] class A: @property def f(self): ... @f.setter def f(self, x): ... [case testStaticMethod] class A: @staticmethod def f(x): ... [out] class A: @staticmethod def f(x): ... [case testClassMethod] class A: @classmethod def f(cls): ... [out] class A: @classmethod def f(cls): ... [case testIfMainCheck] def a(): ... if __name__ == '__main__': x = 1 def f(): ... def b(): ... [out] def a(): ... def b(): ... [case testImportStar] from x import * from a.b import * def f(): ... [out] from x import * from a.b import * def f(): ... [case testNoSpacesBetweenEmptyClasses] class X: def g(self): ... class A: ... class B: ... class C: def f(self): ... [out] class X: def g(self): ... class A: ... class B: ... class C: def f(self): ... [case testExceptionBaseClasses] class A(Exception): ... class B(ValueError): ... [out] class A(Exception): ... class B(ValueError): ... [case testOmitSomeSpecialMethods] class A: def __str__(self): ... def __repr__(self): ... def __eq__(self): ... def __getstate__(self): ... def __setstate__(self, state): ... [out] class A: def __eq__(self): ... [case testOmitDefsNotInAll_import] __all__ = [] + ['f'] def f(): ... def g(): ... [out] def f(): ... [case testVarDefsNotInAll_import] __all__ = [] + ['f', 'g'] def f(): ... x = 1 y = 1 def g(): ... [out] def f(): ... def g(): ... [case testIncludeClassNotInAll_import] __all__ = [] + ['f'] def f(): ... class A: ... [out] def f(): ... class A: ... [case testAllAndClass_import] __all__ = ['A'] class A: x = 1 def f(self): ... [out] class A: x: int = ... def f(self): ... [case testSkipMultiplePrivateDefs] class A: ... _x = 1 _y = 1 _z = 1 class C: ... [out] class A: ... class C: ... [case testIncludeMultiplePrivateDefs] # flags: --include-private class A: ... _x = 1 _y = 1 _z = 1 class C: ... [out] class A: ... _x: int _y: int _z: int class C: ... [case testIncludeFromImportIfInAll_import] from re import match, search, sub __all__ = ['match', 'sub', 'x'] x = 1 [out] from re import match as match, sub as sub x: int [case testExportModule_import] import re __all__ = ['re', 'x'] x = 1 y = 2 [out] import re as re x: int [case testExportModule_import] import re __all__ = ['re', 'x'] x = 1 y = 2 [out] import re as re x: int [case testExportModuleAs_import] import re as rex __all__ = ['rex', 'x'] x = 1 y = 2 [out] import re as rex x: int [case testExportModuleInPackage_import] import urllib.parse as p __all__ = ['p'] [out] import urllib.parse as p [case testExportPackageOfAModule_import] import urllib.parse __all__ = ['urllib'] [out] import urllib as urllib [case testRelativeImportAll] from .x import * [out] from .x import * [case testCommentForUndefinedName_import] __all__ = ['f', 'x', 'C', 'g'] def f(): ... x = 1 class C: def g(self): ... [out] def f(): ... x: int class C: def g(self): ... # Names in __all__ with no definition: # g [case testIgnoreSlots] class A: __slots__ = () [out] class A: ... [case testSkipPrivateProperty] class A: @property def _foo(self): ... [out] class A: ... [case testIncludePrivateProperty] # flags: --include-private class A: @property def _foo(self): ... [out] class A: @property def _foo(self): ... [case testSkipPrivateStaticAndClassMethod] class A: @staticmethod def _foo(): ... @classmethod def _bar(cls): ... [out] class A: ... [case testIncludePrivateStaticAndClassMethod] # flags: --include-private class A: @staticmethod def _foo(): ... @classmethod def _bar(cls): ... [out] class A: @staticmethod def _foo(): ... @classmethod def _bar(cls): ... [case testNamedtuple] import collections, x X = collections.namedtuple('X', ['a', 'b']) [out] from collections import namedtuple X = namedtuple('X', ['a', 'b']) [case testNamedtupleAltSyntax] from collections import namedtuple, x X = namedtuple('X', 'a b') [out] from collections import namedtuple X = namedtuple('X', 'a b') [case testNamedtupleWithUnderscore] from collections import namedtuple as _namedtuple def f(): ... X = _namedtuple('X', 'a b') def g(): ... [out] from collections import namedtuple def f(): ... X = namedtuple('X', 'a b') def g(): ... [case testNamedtupleBaseClass] import collections, x _X = collections.namedtuple('_X', ['a', 'b']) class Y(_X): ... [out] from collections import namedtuple _X = namedtuple('_X', ['a', 'b']) class Y(_X): ... [case testArbitraryBaseClass] import x class D(x.C): ... [out] import x class D(x.C): ... [case testArbitraryBaseClass] import x.y class D(x.y.C): ... [out] import x.y class D(x.y.C): ... [case testUnqualifiedArbitraryBaseClassWithNoDef] class A(int): ... [out] class A(int): ... [case testUnqualifiedArbitraryBaseClass] from x import X class A(X): ... [out] from x import X class A(X): ... [case testUnqualifiedArbitraryBaseClassWithImportAs] from x import X as _X class A(_X): ... [out] from x import X as _X class A(_X): ... [case testGenericClass] class D(Generic[T]): ... [out] class D(Generic[T]): ... [case testObjectBaseClass] class A(object): ... [out] class A: ... [case testEmptyLines] def x(): ... def f(): class A: def f(self): self.x = 1 def g(): ... [out] def x(): ... def f(): ... def g(): ... [case testNestedClass] class A: class B: x = 1 def f(self): ... def g(self): ... [out] class A: class B: x: int = ... def f(self): ... def g(self): ... [case testExportViaRelativeImport] from .api import get [out] from .api import get [case testExportViaRelativePackageImport] from .packages.urllib3.contrib import parse [out] from .packages.urllib3.contrib import parse [case testNoExportViaRelativeImport] from . import get [out] [case testRelativeImportAndBase] from .x import X class A(X): pass [out] from .x import X class A(X): ... [case testDuplicateDef] def syslog(a): pass def syslog(a): pass [out] def syslog(a): ... [case testAsyncAwait_fast_parser] async def f(a): x = await y [out] def f(a): ... [case testInferOptionalOnlyFunc] class A: x = None def __init__(self, a=None): self.x = [] def method(self, a=None): self.x = [] [out] from typing import Any, Optional class A: x: Any = ... def __init__(self, a: Optional[Any] = ...) -> None: ... def method(self, a: Optional[Any] = ...): ... [case testAnnotationImportsFrom] import foo from collection import defaultdict x: defaultdict [out] from collection import defaultdict x: defaultdict [case testAnnotationImports] import foo import collection x: collection.defaultdict [out] import collection x: collection.defaultdict [case testAnnotationImports] from typing import List import collection x: List[collection.defaultdict] [out] import collection from typing import List x: List[collection.defaultdict] [case testAnnotationFwRefs] x: C class C: attr: C y: C [out] x: C class C: attr: C y: C [case testTypeVarPreserved] tv = TypeVar('tv') [out] from typing import TypeVar tv = TypeVar('tv') [case testTypeVarArgsPreserved] tv = TypeVar('tv', int, str) [out] from typing import TypeVar tv = TypeVar('tv', int, str) [case testTypeVarNamedArgsPreserved] tv = TypeVar('tv', bound=bool, covariant=True) [out] from typing import TypeVar tv = TypeVar('tv', bound=bool, covariant=True) [case testTypeAliasPreserved] alias = str [out] alias = str [case testDeepTypeAliasPreserved] alias = Dict[str, List[str]] [out] alias = Dict[str, List[str]] [case testDeepGenericTypeAliasPreserved] from typing import TypeVar T = TypeVar('T') alias = Union[T, List[T]] [out] from typing import TypeVar T = TypeVar('T') alias = Union[T, List[T]] [case testEllipsisAliasPreserved] alias = Tuple[int, ...] [out] alias = Tuple[int, ...] [case testCallableAliasPreserved] alias1 = Callable[..., int] alias2 = Callable[[str, bool], None] [out] alias1 = Callable[..., int] alias2 = Callable[[str, bool], None] [case testAliasPullsImport] from module import Container alias = Container[Any] [out] from module import Container from typing import Any alias = Container[Any] [case testAliasOnlyToplevel] class Foo: alias = str [out] from typing import Any class Foo: alias: Any = ... [case testAliasExceptions] noalias1 = None noalias2 = ... noalias3 = True [out] from typing import Any noalias1: Any noalias2: Any noalias3: bool -- More features/fixes: -- do not export deleted names mypy-0.560/test-data/unit/typexport-basic.test0000644€tŠÔÚ€2›s®0000005620513215007206025603 0ustar jukkaDROPBOX\Domain Users00000000000000-- Test cases for exporting node types from the type checker. -- -- Each test case consists of at least two sections. -- The first section contains [case NAME-skip] followed by the input code, -- while the second section contains [out] followed by the output from the type -- checker. -- -- The first line of input code should be a regexp in comment that describes -- the information to dump (prefix with ##). The regexp is matched against -- the following items: -- -- * each name of an expression node -- * each type string of a node (e.g. OpExpr) -- -- Lines starting with "--" in this file will be ignored. -- Expressions -- ----------- [case testConstructorCall] import typing A() B() class A: pass class B: pass [out] CallExpr(2) : A NameExpr(2) : def () -> A CallExpr(3) : B NameExpr(3) : def () -> B [case testLiterals] import typing 5 2.3 'foo' [builtins fixtures/primitives.pyi] [out] IntExpr(2) : builtins.int FloatExpr(3) : builtins.float StrExpr(4) : builtins.str [case testNameExpression] a = None # type: A a # node def f(aa: 'A') -> None: b = None # type: B aa # node b # node class A: def g(self) -> None: self # node class B: pass [out] NameExpr(3) : A NameExpr(6) : A NameExpr(7) : B NameExpr(10) : A [case testEllipsis] import typing ... [out] EllipsisExpr(2) : builtins.ellipsis [case testMemberAccess] ## MemberExpr|CallExpr a = None # type: A a.m a.f a.f() class A: m = None # type: A def f(self) -> 'B': pass class B: pass [out] MemberExpr(4) : A MemberExpr(5) : def () -> B CallExpr(6) : B MemberExpr(6) : def () -> B [case testCastExpression] ## CastExpr|[a-z] from typing import Any, cast d = None # type: Any b = None # type: B class A: pass class B(A): pass cast(A, d) cast(A, b) cast(B, b) [out] CastExpr(7) : A NameExpr(7) : Any CastExpr(8) : A NameExpr(8) : B CastExpr(9) : B NameExpr(9) : B [case testArithmeticOps] ## OpExpr import typing a = 1 + 2 1.2 * 3 2.2 - 3 1 / 2 [file builtins.py] class object: def __init__(self) -> None: pass class function: pass class int: def __add__(self, x: int) -> int: pass def __truediv__(self, x: int) -> float: pass class float: def __mul__(self, x: int) -> float: pass def __sub__(self, x: int) -> float: pass class type: pass class str: pass [out] OpExpr(3) : builtins.int OpExpr(4) : builtins.float OpExpr(5) : builtins.float OpExpr(6) : builtins.float [case testComparisonOps] ## ComparisonExpr import typing 1 == object() 1 == 2 2 < 3 1 < 2 < 3 8 > 3 4 < 6 > 2 [file builtins.py] class object: def __init__(self) -> None: pass class int: def __eq__(self, x: object) -> bool: pass def __lt__(self, x: int) -> bool: pass def __gt__(self, x: int) -> int: pass class bool: pass class type: pass class function: pass class str: pass [out] ComparisonExpr(3) : builtins.bool ComparisonExpr(4) : builtins.bool ComparisonExpr(5) : builtins.bool ComparisonExpr(6) : builtins.bool ComparisonExpr(7) : builtins.int ComparisonExpr(8) : builtins.object [case testBooleanOps] ## OpExpr|UnaryExpr import typing a = 1 a and a a or a not a [builtins fixtures/bool.pyi] [out] OpExpr(4) : builtins.int OpExpr(5) : builtins.int UnaryExpr(6) : builtins.bool [case testBooleanOpsOnBools] ## OpExpr|UnaryExpr import typing a = bool() a and a a or a not a [builtins fixtures/bool.pyi] [out] OpExpr(4) : builtins.bool OpExpr(5) : builtins.bool UnaryExpr(6) : builtins.bool [case testFunctionCall] ## CallExpr from typing import Tuple f( A(), B()) class A: pass class B: pass def f(a: A, b: B) -> Tuple[A, B]: pass [builtins fixtures/tuple-simple.pyi] [out] CallExpr(3) : Tuple[A, B] CallExpr(4) : A CallExpr(5) : B -- Statements -- ---------- [case testSimpleAssignment] from typing import Any a = None # type: A b = a # type: Any b = a a = b class A: pass [out] NameExpr(3) : A NameExpr(4) : A NameExpr(4) : Any NameExpr(5) : A NameExpr(5) : Any [case testMemberAssignment] from typing import Any class A: a = None # type: A b = None # type: Any def f(self) -> None: self.b = self.a self.a.a = self.b [out] MemberExpr(6) : A MemberExpr(6) : Any NameExpr(6) : A NameExpr(6) : A MemberExpr(7) : A MemberExpr(7) : A MemberExpr(7) : A NameExpr(7) : A NameExpr(7) : A [case testIf] a = None # type: bool if a: 1 elif not a: 1 [builtins fixtures/bool.pyi] [out] NameExpr(3) : builtins.bool IntExpr(4) : builtins.int NameExpr(5) : builtins.bool UnaryExpr(5) : builtins.bool IntExpr(6) : builtins.int [case testWhile] a = None # type: bool while a: a [builtins fixtures/bool.pyi] [out] NameExpr(3) : builtins.bool NameExpr(4) : builtins.bool -- Simple type inference -- --------------------- [case testInferSingleType] import typing x = () [builtins fixtures/primitives.pyi] [out] NameExpr(2) : Tuple[] TupleExpr(2) : Tuple[] [case testInferTwoTypes] ## NameExpr import typing (s, i) = 'x', 1 [builtins fixtures/primitives.pyi] [out] NameExpr(3) : builtins.str NameExpr(4) : builtins.int [case testInferSingleLocalVarType] import typing def f() -> None: x = () [builtins fixtures/primitives.pyi] [out] NameExpr(3) : Tuple[] TupleExpr(3) : Tuple[] -- Basic generics -- -------------- [case testImplicitBoundTypeVarsForMethod] ## MemberExpr from typing import TypeVar, Generic T = TypeVar('T') class A(Generic[T]): def f(self) -> T: pass class B: pass def g() -> None: a = None # type: A[B] f = a.f [out] MemberExpr(9) : def () -> B [case testImplicitBoundTypeVarsForSelfMethodReference] from typing import TypeVar, Generic T = TypeVar('T') class A(Generic[T]): def f(self) -> T: return self.f() [out] CallExpr(5) : T`1 MemberExpr(5) : def () -> T`1 NameExpr(5) : A[T`1] [case testGenericFunctionCallWithTypeApp-skip] ## CallExpr|TypeApplication|NameExpr from typing import Any, TypeVar, Tuple T = TypeVar('T') class A: pass f[A](A()) f[Any](A()) def f(a: T) -> Tuple[T, T]: pass [builtins fixtures/tuple.pyi] [out] CallExpr(5) : A CallExpr(5) : Tuple[A, A] NameExpr(5) : def () -> A NameExpr(5) : def (a: A) -> Tuple[A, A] TypeApplication(5) : def (a: A) -> Tuple[A, A] CallExpr(6) : A CallExpr(6) : Tuple[Any, Any] NameExpr(6) : def () -> A NameExpr(6) : def (a: Any) -> Tuple[Any, Any] TypeApplication(6) : def (a: Any) -> Tuple[Any, Any] -- NOTE: Type applications are not supported for generic methods, so the -- following test cases are commented out. --[case testGenericMethodCallWithTypeApp] --## CallExpr|MemberExpr|TypeApplication --from typing import Any, TypeVar, Tuple --T = TypeVar('T') --class A: -- def f(self, a: T) -> Tuple[T, T]: pass --a.f[A](a) --a.f[Any](a) --a = None # type: A --[builtins fixtures/tuple.py] --[out] --CallExpr(2) : Tuple[A, A] --MemberExpr(2) : def (A a) -> Tuple[A, A] --TypeApplication(2) : def (A a) -> Tuple[A, A] --CallExpr(3) : Tuple[Any, Any] --MemberExpr(3) : def (any a) -> Tuple[Any, Any] --TypeApplication(3) : def (any a) -> Tuple[Any, Any] --[case testGenericMethodCallInGenericTypeWithTypeApp] --## CallExpr|MemberExpr|TypeApplication --from typing import Any, TypeVar, Generic, Tuple --T = TypeVar('T') --S = TypeVar('S') --class B: pass --class C: pass --a.f[B](b) --a.f[Any](b) --class A(Generic[T]): -- def f(self, a: S) -> Tuple[T, S]: pass --a = None # type: A[C] --b = None # type: B --[builtins fixtures/tuple.py] --[out] --CallExpr(6) : Tuple[C, B] --MemberExpr(6) : def (B a) -> Tuple[C, B] --TypeApplication(6) : def (B a) -> Tuple[C, B] --CallExpr(7) : Tuple[C, Any] --MemberExpr(7) : def (any a) -> Tuple[C, Any] --TypeApplication(7) : def (any a) -> Tuple[C, Any] [case testGenericTypeVariableInference] from typing import TypeVar, Generic T = TypeVar('T') class A(Generic[T]): def __init__(self, a: T) -> None: pass class B: pass A(A(B())) [out] CallExpr(6) : A[A[B]] CallExpr(6) : A[B] CallExpr(6) : B NameExpr(6) : def (a: A[B]) -> A[A[B]] NameExpr(6) : def (a: B) -> A[B] NameExpr(6) : def () -> B -- Generic inheritance -- ------------------- [case testInheritedMethodReferenceWithGenericInheritance] from typing import TypeVar, Generic T = TypeVar('T') class C: pass class A(Generic[T]): def f(self, a: T) -> None: pass class B(A[C]): def g(self, c: C) -> None: self.f(c) [out] CallExpr(8) : builtins.None MemberExpr(8) : def (a: C) NameExpr(8) : C NameExpr(8) : B [case testInheritedMethodReferenceWithGenericSubclass] from typing import TypeVar, Generic S = TypeVar('S') T = TypeVar('T') class C: pass class A(Generic[S, T]): def f(self, a: C) -> None: pass class B(A[C, T], Generic[T]): def g(self, c: C) -> None: self.f(c) [out] CallExpr(9) : builtins.None MemberExpr(9) : def (a: C) NameExpr(9) : C NameExpr(9) : B[T`1] [case testExternalReferenceWithGenericInheritance] from typing import TypeVar, Generic T = TypeVar('T') class C: pass class A(Generic[T]): def f(self, a: T) -> None: pass class B(A[C]): pass b = None # type: B c = None # type: C b.f(c) [out] CallExpr(9) : builtins.None MemberExpr(9) : def (a: C) NameExpr(9) : B NameExpr(9) : C -- Implicit Any types -- ------------------ [case testDynamicallyTypedFunction] def f(x): y = x + o z = o z o = None # type: object [out] NameExpr(3) : builtins.object NameExpr(3) : Any NameExpr(3) : Any OpExpr(3) : Any NameExpr(4) : builtins.object NameExpr(4) : Any NameExpr(5) : Any [case testDynamicallyTypedMethod] class A: def f(self, x): y = ( o) # Place y and o on separate lines x y o = None # type: object [out] NameExpr(4) : Any NameExpr(5) : builtins.object NameExpr(6) : Any NameExpr(7) : Any [case testDynamicallyTypedConstructor] class A: def __init__(self, x): y = o x y o = None # type: object [out] NameExpr(4) : builtins.object NameExpr(4) : Any NameExpr(5) : Any NameExpr(6) : Any [case testCallInDynamicallyTypedFunction] def f(): g(o) def g(a: object) -> object: pass o = None # type: object [out] CallExpr(3) : Any NameExpr(3) : def (a: builtins.object) -> builtins.object NameExpr(3) : builtins.object [case testExpressionInDynamicallyTypedFn] import typing def f(): x = None x.f() [out] CallExpr(4) : Any MemberExpr(4) : Any NameExpr(4) : Any [case testGenericCall] from typing import TypeVar, Generic T = TypeVar('T') def f() -> None: a1 = A(b) # type: A[B] a2 = A(b) # type: A[object] class A(Generic[T]): def __init__(self, a: T) -> None: pass class B: pass b = None # type: B [out] CallExpr(4) : A[B] NameExpr(4) : def (a: B) -> A[B] NameExpr(4) : B CallExpr(5) : A[builtins.object] NameExpr(5) : def (a: builtins.object) -> A[builtins.object] NameExpr(5) : B [case testGenericCallInDynamicallyTypedFunction] from typing import TypeVar, Generic T = TypeVar('T') def f(): A() class A(Generic[T]): pass [out] CallExpr(4) : Any NameExpr(4) : def [T] () -> A[T`1] [case testGenericCallInDynamicallyTypedFunction2] from typing import TypeVar, Generic T = TypeVar('T') def f(): A(f) class A(Generic[T]): def __init__(self, x: T) -> None: pass [out] CallExpr(4) : Any NameExpr(4) : def [T] (x: T`1) -> A[T`1] NameExpr(4) : def () -> Any [case testGenericCallInDynamicallyTypedFunction3] from typing import TypeVar t = TypeVar('t') def f(): g(None) def g(x: t) -> t: pass [out] CallExpr(4) : Any NameExpr(4) : def [t] (x: t`-1) -> t`-1 -- Generic types and type inference -- -------------------------------- [case testInferenceInArgumentContext] ## CallExpr from typing import TypeVar, Generic T = TypeVar('T') f(g()) f(h(b)) f(h(c)) b = None # type: B c = None # type: C def f(a: 'A[B]') -> None: pass def g() -> 'A[T]': pass def h(a: T) -> 'A[T]': pass class A(Generic[T]): pass class B: pass class C(B): pass [out] CallExpr(4) : builtins.None CallExpr(4) : A[B] CallExpr(5) : builtins.None CallExpr(5) : A[B] CallExpr(6) : builtins.None CallExpr(6) : A[B] [case testInferGenericTypeForLocalVariable] from typing import TypeVar, Generic T = TypeVar('T') def f() -> None: a = A(b) a a2, a3 = A(b), A(c) a2 a3 b = None # type: B c = None # type: C class A(Generic[T]): def __init__(self, x: T) -> None: pass class B: pass class C: pass [out] CallExpr(4) : A[B] NameExpr(4) : def (x: B) -> A[B] NameExpr(4) : A[B] NameExpr(4) : B NameExpr(5) : A[B] CallExpr(6) : A[B] CallExpr(6) : A[C] NameExpr(6) : def (x: B) -> A[B] NameExpr(6) : def (x: C) -> A[C] NameExpr(6) : A[B] NameExpr(6) : A[C] NameExpr(6) : B NameExpr(6) : C NameExpr(7) : A[B] NameExpr(8) : A[C] [case testNestedGenericCalls] from typing import TypeVar, Generic T = TypeVar('T') S = TypeVar('S') def h() -> None: g(f(c)) c = None # type: C class A(Generic[T]): pass class B(Generic[T]): pass class C: pass def f(a: T) -> A[T]: pass def g(a: S) -> B[S]: pass [out] CallExpr(5) : A[C] CallExpr(5) : B[A[C]] NameExpr(5) : C NameExpr(5) : def (a: C) -> A[C] NameExpr(5) : def (a: A[C]) -> B[A[C]] [case testInferListLiterals] from typing import List a = [] # type: List[A] class A: pass [builtins fixtures/list.pyi] [out] ListExpr(2) : builtins.list[A] [case testInferGenericTypeInTypeAnyContext] from typing import Any a = [] # type: Any [builtins fixtures/list.pyi] [out] ListExpr(2) : builtins.list[Any] [case testHigherOrderFunction] from typing import TypeVar, Callable, List t = TypeVar('t') s = TypeVar('s') map( f, [A()]) def map(f: Callable[[t], s], a: List[t]) -> List[s]: pass class A: pass class B: pass def f(a: A) -> B: pass [builtins fixtures/list.pyi] [out] CallExpr(4) : builtins.list[B] NameExpr(4) : def (f: def (A) -> B, a: builtins.list[A]) -> builtins.list[B] NameExpr(5) : def (a: A) -> B CallExpr(6) : A ListExpr(6) : builtins.list[A] NameExpr(6) : def () -> A -- Lambdas -- ------- [case testLambdaWithTypeInferredFromContext] from typing import Callable f = lambda x: x.a # type: Callable[[B], A] class A: pass class B: a = None # type: A [out] LambdaExpr(2) : def (B) -> A MemberExpr(2) : A NameExpr(2) : B [case testLambdaWithInferredType] ## LambdaExpr|NameExpr import typing f = lambda: 1 [out] LambdaExpr(3) : def () -> builtins.int NameExpr(3) : def () -> builtins.int [case testLambdaWithInferredType2] ## LambdaExpr|NameExpr import typing f = lambda: [1] [builtins fixtures/list.pyi] [out] LambdaExpr(3) : def () -> builtins.list[builtins.int] NameExpr(3) : def () -> builtins.list[builtins.int] [case testLambdaWithInferredType2] from typing import List, Callable f = lambda x: [] # type: Callable[[B], List[A]] class A: pass class B: a = None # type: A [builtins fixtures/list.pyi] [out] LambdaExpr(2) : def (B) -> builtins.list[A] ListExpr(2) : builtins.list[A] [case testLambdaAndHigherOrderFunction] from typing import TypeVar, Callable, List t = TypeVar('t') s = TypeVar('s') l = None # type: List[A] map( lambda x: f(x), l) def map(f: Callable[[t], s], a: List[t]) -> List[s]: pass class A: pass class B: pass def f(a: A) -> B: pass [builtins fixtures/list.pyi] [out] CallExpr(5) : builtins.list[B] NameExpr(5) : def (f: def (A) -> B, a: builtins.list[A]) -> builtins.list[B] CallExpr(6) : B LambdaExpr(6) : def (A) -> B NameExpr(6) : def (a: A) -> B NameExpr(6) : builtins.list[A] NameExpr(6) : A [case testLambdaAndHigherOrderFunction2] ## LambdaExpr|NameExpr|ListExpr from typing import TypeVar, List, Callable t = TypeVar('t') s = TypeVar('s') l = None # type: List[A] map( lambda x: [f(x)], l) def map(f: Callable[[t], List[s]], a: List[t]) -> List[s]: pass class A: pass class B: pass def f(a: A) -> B: pass [builtins fixtures/list.pyi] [out] NameExpr(6) : def (f: def (A) -> builtins.list[B], a: builtins.list[A]) -> builtins.list[B] LambdaExpr(7) : def (A) -> builtins.list[B] ListExpr(7) : builtins.list[B] NameExpr(7) : def (a: A) -> B NameExpr(7) : builtins.list[A] NameExpr(7) : A [case testLambdaInListAndHigherOrderFunction] from typing import TypeVar, Callable, List t = TypeVar('t') s = TypeVar('s') l = None # type: List[A] map( [lambda x: x], l) def map(f: List[Callable[[t], s]], a: List[t]) -> List[s]: pass class A: pass [builtins fixtures/list.pyi] [out] -- TODO We probably should not silently infer 'Any' types in statically typed -- context. Perhaps just fail instead? CallExpr(5) : builtins.list[Any] NameExpr(5) : def (f: builtins.list[def (A) -> Any], a: builtins.list[A]) -> builtins.list[Any] LambdaExpr(6) : def (A) -> A ListExpr(6) : builtins.list[def (A) -> Any] NameExpr(6) : A NameExpr(7) : builtins.list[A] [case testLambdaAndHigherOrderFunction3] from typing import TypeVar, Callable, List t = TypeVar('t') s = TypeVar('s') l = None # type: List[A] map( lambda x: x.b, l) def map(f: Callable[[t], s], a: List[t]) -> List[s]: pass class A: b = None # type: B class B: pass [builtins fixtures/list.pyi] [out] CallExpr(5) : builtins.list[B] NameExpr(5) : def (f: def (A) -> B, a: builtins.list[A]) -> builtins.list[B] LambdaExpr(6) : def (A) -> B MemberExpr(6) : B NameExpr(6) : A NameExpr(7) : builtins.list[A] [case testLambdaAndHigherOrderFunctionAndKeywordArgs] from typing import TypeVar, Callable, List t = TypeVar('t') s = TypeVar('s') l = None # type: List[A] map( a=l, f=lambda x: x.b) def map(f: Callable[[t], s], a: List[t]) -> List[s]: pass class A: b = None # type: B class B: pass [builtins fixtures/list.pyi] [out] CallExpr(5) : builtins.list[B] NameExpr(5) : def (f: def (A) -> B, a: builtins.list[A]) -> builtins.list[B] NameExpr(6) : builtins.list[A] LambdaExpr(7) : def (A) -> B MemberExpr(7) : B NameExpr(7) : A -- Boolean operations -- ------------------ [case testBooleanOr] from typing import List a = None # type: List[A] a or [] a = a or [] a = [] or a class A: pass [builtins fixtures/list.pyi] [out] ListExpr(3) : builtins.list[A] NameExpr(3) : builtins.list[A] OpExpr(3) : builtins.list[A] ListExpr(4) : builtins.list[A] NameExpr(4) : builtins.list[A] NameExpr(4) : builtins.list[A] OpExpr(4) : builtins.list[A] ListExpr(5) : builtins.list[A] NameExpr(5) : builtins.list[A] NameExpr(5) : builtins.list[A] OpExpr(5) : builtins.list[A] -- Class attributes -- ---------------- [case testUnboundMethod] ## MemberExpr import typing class A: def f(self) -> None: pass A.f [out] MemberExpr(5) : def (self: A) [case testUnboundMethodWithImplicitSig] ## MemberExpr import typing class A: def f(self): pass A.f [out] MemberExpr(5) : def (self: Any) -> Any [case testOverloadedUnboundMethod] ## MemberExpr from typing import overload class A: @overload def f(self) -> None: pass @overload def f(self, __x: object) -> None: pass def f(self, *args) -> None: pass A.f [out] MemberExpr(10) : Overload(def (self: A), def (self: A, builtins.object)) [case testOverloadedUnboundMethodWithImplicitSig] ## MemberExpr from typing import overload class A: @overload def f(self): pass @overload def f(self, __x): pass def f(self, *args): pass A.f [out] MemberExpr(10) : Overload(def (self: Any) -> Any, def (self: Any, Any) -> Any) [case testUnboundMethodWithInheritance] ## MemberExpr import typing class A: def __init__(self) -> None: pass def f(self) -> None: pass class B(A): pass B.f [out] MemberExpr(8) : def (self: A) [case testUnboundGenericMethod] ## MemberExpr from typing import TypeVar t = TypeVar('t') class B: pass class A: def f(self, x: t) -> None: pass A.f(A(), B()) [out] MemberExpr(7) : def (self: A, x: B) [case testUnboundMethodOfGenericClass] ## MemberExpr from typing import TypeVar, Generic t = TypeVar('t') class B: pass class A(Generic[t]): def f(self, x: t) -> None: pass A.f a_b = A() # type: A[B] A.f(a_b, B()) [out] MemberExpr(7) : def [t] (self: A[t`1], x: t`1) MemberExpr(9) : def (self: A[B], x: B) [case testUnboundOverloadedMethodOfGenericClass] ## CallExpr from typing import TypeVar, Generic, overload t = TypeVar('t') class B: pass class A(Generic[t]): @overload def f(self, x: t) -> t: pass @overload def f(self) -> object: pass def f(self, *args): pass ab, b = None, None # type: (A[B], B) A.f(ab, b) [out] CallExpr(13) : B [case testUnboundMethodOfGenericClassWithImplicitSig] ## MemberExpr from typing import TypeVar, Generic t = TypeVar('t') class B: pass class A(Generic[t]): def f(self, x): pass A.f(None, None) [out] MemberExpr(7) : def (self: Any, x: Any) -> Any [case testGenericMethodOfGenericClass] ## MemberExpr from typing import TypeVar, Generic t = TypeVar('t') s = TypeVar('s') class B: pass class A(Generic[t]): def f(self, y: s) -> None: pass ab = None # type: A[B] o = None # type: object A.f(ab, o) [out] MemberExpr(10) : def (self: A[B], y: builtins.object) -- Type variables with value restriction -- ------------------------------------- [case testTypeVariableWithValueRestriction] ## NameExpr from typing import TypeVar T = TypeVar('T', int, str) def f(x: T) -> None: pass f(1) f('x') [out] NameExpr(5) : def (x: builtins.int) NameExpr(6) : def (x: builtins.str) [case testTypeVariableWithValueRestrictionAndSubtype] ## NameExpr|CallExpr from typing import TypeVar T = TypeVar('T', int, str) def f(x: T) -> T: pass class S(str): pass s = None # type: S f(s) [out] CallExpr(7) : builtins.str NameExpr(7) : def (x: builtins.str) -> builtins.str NameExpr(7) : S -- Binary operations -- ----------------- [case testBinaryOperatorWithAnyLeftOperand] ## OpExpr from typing import Any, cast class B: def __add__(self, x: int) -> str: pass class A: def __radd__(self, x: B) -> int: pass cast(Any, 1) + A() B() + A() [out] OpExpr(7) : Any OpExpr(8) : builtins.int [case testBinaryOperatorWithAnyRightOperand] ## OpExpr from typing import Any, cast class A: def __add__(self, x: str) -> int: pass A() + cast(Any, 1) [out] OpExpr(5) : Any -- Callable overloading -- -------------------- [case testOverloadedFunctionType] ## CallExpr from typing import overload @overload def f(x: int) -> str: pass @overload def f(x: str) -> int: pass def f(x): pass f(1) f('') [out] CallExpr(8) : builtins.str CallExpr(9) : builtins.int [case testOverlappingOverloadedFunctionType] ## CallExpr from typing import overload, Any class A: pass class B(A): pass @overload def f(x: B) -> B: pass @overload def f(x: A) -> A: pass def f(x) -> Any: pass a = None # type: A b = None # type: B f(a) f(b) [out] CallExpr(14) : A CallExpr(15) : B [case testOverloadedErasedType] from typing import Callable from typing import List from typing import overload from typing import TypeVar T = TypeVar("T") V = TypeVar("V") def fun(s: int) -> int: pass def m(fun: Callable[[T], V], iter: List[T]) -> None: pass nums = [1] # type: List[int] m(fun, nums) [builtins fixtures/list.pyi] [out] IntExpr(13) : builtins.int ListExpr(13) : builtins.list[builtins.int] CallExpr(14) : builtins.None NameExpr(14) : def (s: builtins.int) -> builtins.int NameExpr(14) : def (fun: def (builtins.int) -> builtins.int, iter: builtins.list[builtins.int]) NameExpr(15) : builtins.list[builtins.int] -- Special cases -- ------------- [case testImplicitDataAttributeInit] ## NameExpr import typing class A: def __init__(self) -> None: self.x = ( A()) [out] NameExpr(5) : A NameExpr(6) : def () -> A [case testListMultiplicationInContext] ## ListExpr|OpExpr|IntExpr from typing import List a = [None] * 3 # type: List[str] [builtins fixtures/list.pyi] [out] IntExpr(3) : builtins.int ListExpr(3) : builtins.list[builtins.str] OpExpr(3) : builtins.list[builtins.str] -- TODO -- -- test expressions -- list literal -- tuple literal -- unary minus -- indexing -- super expression -- more complex lambda (multiple arguments etc.) -- list comprehension -- generator expression -- overloads -- other things -- type inference -- default argument value -- for loop variable -- exception variable -- varargs -- generics -- explicit types -- type of 'None' (currently stripped, but sometimes we may want to dump it) mypy-0.560/typeshed/0000755€tŠÔÚ€2›s®0000000000013215007242020515 5ustar jukkaDROPBOX\Domain Users00000000000000mypy-0.560/typeshed/stdlib/0000755€tŠÔÚ€2›s®0000000000013215007242021776 5ustar jukkaDROPBOX\Domain Users00000000000000mypy-0.560/typeshed/stdlib/2/0000755€tŠÔÚ€2›s®0000000000013215007244022141 5ustar jukkaDROPBOX\Domain Users00000000000000mypy-0.560/typeshed/stdlib/2/__builtin__.pyi0000644€tŠÔÚ€2›s®0000012232013215007212025121 0ustar jukkaDROPBOX\Domain Users00000000000000# Stubs for builtins (Python 2.7) # True and False are deliberately omitted because they are keywords in # Python 3, and stub files conform to Python 3 syntax. from typing import ( TypeVar, Iterator, Iterable, overload, Sequence, Mapping, Tuple, List, Any, Dict, Callable, Generic, Set, AbstractSet, FrozenSet, Sized, Reversible, SupportsInt, SupportsFloat, SupportsAbs, SupportsRound, IO, BinaryIO, Union, AnyStr, MutableSequence, MutableMapping, MutableSet, ItemsView, KeysView, ValuesView, Optional, Container, Type ) from abc import abstractmethod, ABCMeta from mypy_extensions import NoReturn _T = TypeVar('_T') _T_co = TypeVar('_T_co', covariant=True) _KT = TypeVar('_KT') _VT = TypeVar('_VT') _S = TypeVar('_S') _T1 = TypeVar('_T1') _T2 = TypeVar('_T2') _T3 = TypeVar('_T3') _T4 = TypeVar('_T4') _T5 = TypeVar('_T5') _TT = TypeVar('_TT', bound='type') class object: __doc__ = ... # type: Optional[str] __class__ = ... # type: type __dict__ = ... # type: Dict[str, Any] __slots__ = ... # type: Optional[Union[str, unicode, Iterable[Union[str, unicode]]]] __module__ = ... # type: str def __init__(self) -> None: ... def __new__(cls) -> Any: ... def __setattr__(self, name: str, value: Any) -> None: ... def __eq__(self, o: object) -> bool: ... def __ne__(self, o: object) -> bool: ... def __str__(self) -> str: ... def __repr__(self) -> str: ... def __hash__(self) -> int: ... def __format__(self, format_spec: str) -> str: ... def __getattribute__(self, name: str) -> Any: ... def __delattr__(self, name: str) -> None: ... def __sizeof__(self) -> int: ... def __reduce__(self) -> tuple: ... def __reduce_ex__(self, protocol: int) -> tuple: ... class staticmethod(object): # Special, only valid as a decorator. __func__ = ... # type: function def __init__(self, f: function) -> None: ... def __new__(cls: Type[_T], *args: Any, **kwargs: Any) -> _T: ... def __get__(self, obj: _T, type: Optional[Type[_T]]=...) -> function: ... class classmethod(object): # Special, only valid as a decorator. __func__ = ... # type: function def __init__(self, f: function) -> None: ... def __new__(cls: Type[_T], *args: Any, **kwargs: Any) -> _T: ... def __get__(self, obj: _T, type: Optional[Type[_T]]=...) -> function: ... class type(object): __bases__ = ... # type: Tuple[type, ...] __name__ = ... # type: str __module__ = ... # type: str @overload def __init__(self, o: object) -> None: ... @overload def __init__(self, name: str, bases: Tuple[type, ...], dict: Dict[str, Any]) -> None: ... # TODO: __new__ may have to be special and not a static method. @overload def __new__(cls, o: object) -> type: ... @overload def __new__(cls, name: str, bases: Tuple[type, ...], namespace: Dict[str, Any]) -> type: ... def __call__(self, *args: Any, **kwds: Any) -> Any: ... # Only new-style classes __mro__ = ... # type: Tuple[type, ...] # Note: the documentation doesnt specify what the return type is, the standard # implementation seems to be returning a list. def mro(self) -> List[type]: ... def __subclasses__(self: _TT) -> List[_TT]: ... def __instancecheck__(self, instance: Any) -> bool: ... def __subclasscheck__(self, subclass: type) -> bool: ... class int: @overload def __init__(self, x: SupportsInt = ...) -> None: ... @overload def __init__(self, x: Union[str, unicode, bytearray], base: int = ...) -> None: ... def bit_length(self) -> int: ... def __add__(self, x: int) -> int: ... def __sub__(self, x: int) -> int: ... def __mul__(self, x: int) -> int: ... def __floordiv__(self, x: int) -> int: ... def __div__(self, x: int) -> int: ... def __truediv__(self, x: int) -> float: ... def __mod__(self, x: int) -> int: ... def __radd__(self, x: int) -> int: ... def __rsub__(self, x: int) -> int: ... def __rmul__(self, x: int) -> int: ... def __rfloordiv__(self, x: int) -> int: ... def __rdiv__(self, x: int) -> int: ... def __rtruediv__(self, x: int) -> float: ... def __rmod__(self, x: int) -> int: ... def __pow__(self, x: int) -> Any: ... # Return type can be int or float, depending on x. def __rpow__(self, x: int) -> Any: ... def __and__(self, n: int) -> int: ... def __or__(self, n: int) -> int: ... def __xor__(self, n: int) -> int: ... def __lshift__(self, n: int) -> int: ... def __rshift__(self, n: int) -> int: ... def __rand__(self, n: int) -> int: ... def __ror__(self, n: int) -> int: ... def __rxor__(self, n: int) -> int: ... def __rlshift__(self, n: int) -> int: ... def __rrshift__(self, n: int) -> int: ... def __neg__(self) -> int: ... def __pos__(self) -> int: ... def __invert__(self) -> int: ... def __eq__(self, x: object) -> bool: ... def __ne__(self, x: object) -> bool: ... def __lt__(self, x: int) -> bool: ... def __le__(self, x: int) -> bool: ... def __gt__(self, x: int) -> bool: ... def __ge__(self, x: int) -> bool: ... def __str__(self) -> str: ... def __float__(self) -> float: ... def __int__(self) -> int: ... def __abs__(self) -> int: ... def __hash__(self) -> int: ... def __nonzero__(self) -> bool: ... class float: def __init__(self, x: Union[SupportsFloat, str, unicode, bytearray] = ...) -> None: ... def as_integer_ratio(self) -> Tuple[int, int]: ... def hex(self) -> str: ... def is_integer(self) -> bool: ... @classmethod def fromhex(cls, s: str) -> float: ... def __add__(self, x: float) -> float: ... def __sub__(self, x: float) -> float: ... def __mul__(self, x: float) -> float: ... def __floordiv__(self, x: float) -> float: ... def __div__(self, x: float) -> float: ... def __truediv__(self, x: float) -> float: ... def __mod__(self, x: float) -> float: ... def __pow__(self, x: float) -> float: ... def __radd__(self, x: float) -> float: ... def __rsub__(self, x: float) -> float: ... def __rmul__(self, x: float) -> float: ... def __rfloordiv__(self, x: float) -> float: ... def __rdiv__(self, x: float) -> float: ... def __rtruediv__(self, x: float) -> float: ... def __rmod__(self, x: float) -> float: ... def __rpow__(self, x: float) -> float: ... def __eq__(self, x: object) -> bool: ... def __ne__(self, x: object) -> bool: ... def __lt__(self, x: float) -> bool: ... def __le__(self, x: float) -> bool: ... def __gt__(self, x: float) -> bool: ... def __ge__(self, x: float) -> bool: ... def __neg__(self) -> float: ... def __pos__(self) -> float: ... def __str__(self) -> str: ... def __int__(self) -> int: ... def __float__(self) -> float: ... def __abs__(self) -> float: ... def __hash__(self) -> int: ... def __nonzero__(self) -> bool: ... class complex: @overload def __init__(self, re: float = ..., im: float = ...) -> None: ... @overload def __init__(self, s: str) -> None: ... @property def real(self) -> float: ... @property def imag(self) -> float: ... def conjugate(self) -> complex: ... def __add__(self, x: complex) -> complex: ... def __sub__(self, x: complex) -> complex: ... def __mul__(self, x: complex) -> complex: ... def __pow__(self, x: complex) -> complex: ... def __div__(self, x: complex) -> complex: ... def __truediv__(self, x: complex) -> complex: ... def __radd__(self, x: complex) -> complex: ... def __rsub__(self, x: complex) -> complex: ... def __rmul__(self, x: complex) -> complex: ... def __rpow__(self, x: complex) -> complex: ... def __rdiv__(self, x: complex) -> complex: ... def __rtruediv__(self, x: complex) -> complex: ... def __eq__(self, x: object) -> bool: ... def __ne__(self, x: object) -> bool: ... def __neg__(self) -> complex: ... def __pos__(self) -> complex: ... def __str__(self) -> str: ... def __abs__(self) -> float: ... def __hash__(self) -> int: ... def __nonzero__(self) -> bool: ... class super(object): @overload def __init__(self, t: Any, obj: Any) -> None: ... @overload def __init__(self, t: Any) -> None: ... class basestring(metaclass=ABCMeta): ... class unicode(basestring, Sequence[unicode]): @overload def __init__(self) -> None: ... @overload def __init__(self, o: object) -> None: ... @overload def __init__(self, o: str, encoding: unicode = ..., errors: unicode = ...) -> None: ... def capitalize(self) -> unicode: ... def center(self, width: int, fillchar: unicode = ...) -> unicode: ... def count(self, x: unicode) -> int: ... def decode(self, encoding: unicode = ..., errors: unicode = ...) -> unicode: ... def encode(self, encoding: unicode = ..., errors: unicode = ...) -> str: ... def endswith(self, suffix: Union[unicode, Tuple[unicode, ...]], start: int = ..., end: int = ...) -> bool: ... def expandtabs(self, tabsize: int = ...) -> unicode: ... def find(self, sub: unicode, start: int = ..., end: int = ...) -> int: ... def format(self, *args: Any, **kwargs: Any) -> unicode: ... def format_map(self, map: Mapping[unicode, Any]) -> unicode: ... def index(self, sub: unicode, start: int = ..., end: int = ...) -> int: ... def isalnum(self) -> bool: ... def isalpha(self) -> bool: ... def isdecimal(self) -> bool: ... def isdigit(self) -> bool: ... def isidentifier(self) -> bool: ... def islower(self) -> bool: ... def isnumeric(self) -> bool: ... def isprintable(self) -> bool: ... def isspace(self) -> bool: ... def istitle(self) -> bool: ... def isupper(self) -> bool: ... def join(self, iterable: Iterable[unicode]) -> unicode: ... def ljust(self, width: int, fillchar: unicode = ...) -> unicode: ... def lower(self) -> unicode: ... def lstrip(self, chars: unicode = ...) -> unicode: ... def partition(self, sep: unicode) -> Tuple[unicode, unicode, unicode]: ... def replace(self, old: unicode, new: unicode, count: int = ...) -> unicode: ... def rfind(self, sub: unicode, start: int = ..., end: int = ...) -> int: ... def rindex(self, sub: unicode, start: int = ..., end: int = ...) -> int: ... def rjust(self, width: int, fillchar: unicode = ...) -> unicode: ... def rpartition(self, sep: unicode) -> Tuple[unicode, unicode, unicode]: ... def rsplit(self, sep: Optional[unicode] = ..., maxsplit: int = ...) -> List[unicode]: ... def rstrip(self, chars: unicode = ...) -> unicode: ... def split(self, sep: Optional[unicode] = ..., maxsplit: int = ...) -> List[unicode]: ... def splitlines(self, keepends: bool = ...) -> List[unicode]: ... def startswith(self, prefix: Union[unicode, Tuple[unicode, ...]], start: int = ..., end: int = ...) -> bool: ... def strip(self, chars: unicode = ...) -> unicode: ... def swapcase(self) -> unicode: ... def title(self) -> unicode: ... def translate(self, table: Union[Dict[int, Any], unicode]) -> unicode: ... def upper(self) -> unicode: ... def zfill(self, width: int) -> unicode: ... @overload def __getitem__(self, i: int) -> unicode: ... @overload def __getitem__(self, s: slice) -> unicode: ... def __getslice__(self, start: int, stop: int) -> unicode: ... def __add__(self, s: unicode) -> unicode: ... def __mul__(self, n: int) -> unicode: ... def __rmul__(self, n: int) -> unicode: ... def __mod__(self, x: Any) -> unicode: ... def __eq__(self, x: object) -> bool: ... def __ne__(self, x: object) -> bool: ... def __lt__(self, x: unicode) -> bool: ... def __le__(self, x: unicode) -> bool: ... def __gt__(self, x: unicode) -> bool: ... def __ge__(self, x: unicode) -> bool: ... def __len__(self) -> int: ... def __contains__(self, s: object) -> bool: ... def __iter__(self) -> Iterator[unicode]: ... def __str__(self) -> str: ... def __repr__(self) -> str: ... def __int__(self) -> int: ... def __float__(self) -> float: ... def __hash__(self) -> int: ... class str(basestring, Sequence[str]): def __init__(self, object: object = ...) -> None: ... def capitalize(self) -> str: ... def center(self, width: int, fillchar: str = ...) -> str: ... def count(self, x: unicode, __start: Optional[int] = ..., __end: Optional[int] = ...) -> int: ... def decode(self, encoding: unicode = ..., errors: unicode = ...) -> unicode: ... def encode(self, encoding: unicode = ..., errors: unicode = ...) -> str: ... def endswith(self, suffix: Union[unicode, Tuple[unicode, ...]]) -> bool: ... def expandtabs(self, tabsize: int = ...) -> str: ... def find(self, sub: unicode, __start: Optional[int] = ..., __end: Optional[int] = ...) -> int: ... def format(self, *args: Any, **kwargs: Any) -> str: ... def index(self, sub: unicode, __start: Optional[int] = ..., __end: Optional[int] = ...) -> int: ... def isalnum(self) -> bool: ... def isalpha(self) -> bool: ... def isdigit(self) -> bool: ... def islower(self) -> bool: ... def isspace(self) -> bool: ... def istitle(self) -> bool: ... def isupper(self) -> bool: ... def join(self, iterable: Iterable[AnyStr]) -> AnyStr: ... def ljust(self, width: int, fillchar: str = ...) -> str: ... def lower(self) -> str: ... @overload def lstrip(self, chars: str = ...) -> str: ... @overload def lstrip(self, chars: unicode) -> unicode: ... @overload def partition(self, sep: bytearray) -> Tuple[str, bytearray, str]: ... @overload def partition(self, sep: str) -> Tuple[str, str, str]: ... @overload def partition(self, sep: unicode) -> Tuple[unicode, unicode, unicode]: ... def replace(self, old: AnyStr, new: AnyStr, count: int = ...) -> AnyStr: ... def rfind(self, sub: unicode, __start: Optional[int] = ..., __end: Optional[int] = ...) -> int: ... def rindex(self, sub: unicode, __start: Optional[int] = ..., __end: Optional[int] = ...) -> int: ... def rjust(self, width: int, fillchar: str = ...) -> str: ... @overload def rpartition(self, sep: bytearray) -> Tuple[str, bytearray, str]: ... @overload def rpartition(self, sep: str) -> Tuple[str, str, str]: ... @overload def rpartition(self, sep: unicode) -> Tuple[unicode, unicode, unicode]: ... @overload def rsplit(self, sep: Optional[str] = ..., maxsplit: int = ...) -> List[str]: ... @overload def rsplit(self, sep: unicode, maxsplit: int = ...) -> List[unicode]: ... @overload def rstrip(self, chars: str = ...) -> str: ... @overload def rstrip(self, chars: unicode) -> unicode: ... @overload def split(self, sep: Optional[str] = ..., maxsplit: int = ...) -> List[str]: ... @overload def split(self, sep: unicode, maxsplit: int = ...) -> List[unicode]: ... def splitlines(self, keepends: bool = ...) -> List[str]: ... def startswith(self, prefix: Union[unicode, Tuple[unicode, ...]]) -> bool: ... @overload def strip(self, chars: str = ...) -> str: ... @overload def strip(self, chars: unicode) -> unicode: ... def swapcase(self) -> str: ... def title(self) -> str: ... def translate(self, table: Optional[AnyStr], deletechars: AnyStr = ...) -> AnyStr: ... def upper(self) -> str: ... def zfill(self, width: int) -> str: ... def __len__(self) -> int: ... def __iter__(self) -> Iterator[str]: ... def __str__(self) -> str: ... def __repr__(self) -> str: ... def __int__(self) -> int: ... def __float__(self) -> float: ... def __hash__(self) -> int: ... @overload def __getitem__(self, i: int) -> str: ... @overload def __getitem__(self, s: slice) -> str: ... def __getslice__(self, start: int, stop: int) -> str: ... def __add__(self, s: AnyStr) -> AnyStr: ... def __mul__(self, n: int) -> str: ... def __rmul__(self, n: int) -> str: ... def __contains__(self, o: object) -> bool: ... def __eq__(self, x: object) -> bool: ... def __ne__(self, x: object) -> bool: ... def __lt__(self, x: unicode) -> bool: ... def __le__(self, x: unicode) -> bool: ... def __gt__(self, x: unicode) -> bool: ... def __ge__(self, x: unicode) -> bool: ... def __mod__(self, x: Any) -> str: ... class bytearray(MutableSequence[int]): @overload def __init__(self) -> None: ... @overload def __init__(self, x: Union[Iterable[int], str]) -> None: ... @overload def __init__(self, x: unicode, encoding: unicode, errors: unicode = ...) -> None: ... @overload def __init__(self, length: int) -> None: ... def capitalize(self) -> bytearray: ... def center(self, width: int, fillchar: str = ...) -> bytearray: ... def count(self, x: str) -> int: ... def decode(self, encoding: unicode = ..., errors: unicode = ...) -> str: ... def endswith(self, suffix: Union[str, Tuple[str, ...]]) -> bool: ... def expandtabs(self, tabsize: int = ...) -> bytearray: ... def find(self, sub: str, start: int = ..., end: int = ...) -> int: ... def index(self, sub: str, start: int = ..., end: int = ...) -> int: ... def insert(self, index: int, object: int) -> None: ... def isalnum(self) -> bool: ... def isalpha(self) -> bool: ... def isdigit(self) -> bool: ... def islower(self) -> bool: ... def isspace(self) -> bool: ... def istitle(self) -> bool: ... def isupper(self) -> bool: ... def join(self, iterable: Iterable[str]) -> bytearray: ... def ljust(self, width: int, fillchar: str = ...) -> bytearray: ... def lower(self) -> bytearray: ... def lstrip(self, chars: str = ...) -> bytearray: ... def partition(self, sep: str) -> Tuple[bytearray, bytearray, bytearray]: ... def replace(self, old: str, new: str, count: int = ...) -> bytearray: ... def rfind(self, sub: str, start: int = ..., end: int = ...) -> int: ... def rindex(self, sub: str, start: int = ..., end: int = ...) -> int: ... def rjust(self, width: int, fillchar: str = ...) -> bytearray: ... def rpartition(self, sep: str) -> Tuple[bytearray, bytearray, bytearray]: ... def rsplit(self, sep: Optional[str] = ..., maxsplit: int = ...) -> List[bytearray]: ... def rstrip(self, chars: str = ...) -> bytearray: ... def split(self, sep: Optional[str] = ..., maxsplit: int = ...) -> List[bytearray]: ... def splitlines(self, keepends: bool = ...) -> List[bytearray]: ... def startswith(self, prefix: Union[str, Tuple[str, ...]]) -> bool: ... def strip(self, chars: str = ...) -> bytearray: ... def swapcase(self) -> bytearray: ... def title(self) -> bytearray: ... def translate(self, table: str) -> bytearray: ... def upper(self) -> bytearray: ... def zfill(self, width: int) -> bytearray: ... @staticmethod def fromhex(x: str) -> bytearray: ... def __len__(self) -> int: ... def __iter__(self) -> Iterator[int]: ... def __str__(self) -> str: ... def __repr__(self) -> str: ... def __int__(self) -> int: ... def __float__(self) -> float: ... def __hash__(self) -> int: ... @overload def __getitem__(self, i: int) -> int: ... @overload def __getitem__(self, s: slice) -> bytearray: ... def __getslice__(self, start: int, stop: int) -> bytearray: ... @overload def __setitem__(self, i: int, x: int) -> None: ... @overload def __setitem__(self, s: slice, x: Union[Iterable[int], str]) -> None: ... def __setslice__(self, start: int, stop: int, x: Union[Sequence[int], str]) -> None: ... def __delitem__(self, i: Union[int, slice]) -> None: ... def __delslice__(self, start: int, stop: int) -> None: ... def __add__(self, s: str) -> bytearray: ... def __mul__(self, n: int) -> bytearray: ... def __contains__(self, o: object) -> bool: ... def __eq__(self, x: object) -> bool: ... def __ne__(self, x: object) -> bool: ... def __lt__(self, x: str) -> bool: ... def __le__(self, x: str) -> bool: ... def __gt__(self, x: str) -> bool: ... def __ge__(self, x: str) -> bool: ... class bool(int): def __init__(self, o: object = ...) -> None: ... class slice(object): start = ... # type: Optional[int] step = ... # type: Optional[int] stop = ... # type: Optional[int] @overload def __init__(self, stop: Optional[int]) -> None: ... @overload def __init__(self, start: Optional[int], stop: Optional[int], step: Optional[int] = ...) -> None: ... def indices(self, len: int) -> Tuple[int, int, int]: ... class tuple(Sequence[_T_co], Generic[_T_co]): def __init__(self, iterable: Iterable[_T_co] = ...) -> None: ... def __len__(self) -> int: ... def __contains__(self, x: object) -> bool: ... @overload def __getitem__(self, x: int) -> _T_co: ... @overload def __getitem__(self, x: slice) -> Tuple[_T_co, ...]: ... def __iter__(self) -> Iterator[_T_co]: ... def __lt__(self, x: Tuple[_T_co, ...]) -> bool: ... def __le__(self, x: Tuple[_T_co, ...]) -> bool: ... def __gt__(self, x: Tuple[_T_co, ...]) -> bool: ... def __ge__(self, x: Tuple[_T_co, ...]) -> bool: ... def __add__(self, x: Tuple[_T_co, ...]) -> Tuple[_T_co, ...]: ... def __mul__(self, n: int) -> Tuple[_T_co, ...]: ... def __rmul__(self, n: int) -> Tuple[_T_co, ...]: ... def count(self, x: Any) -> int: ... def index(self, x: Any) -> int: ... class function: # TODO name of the class (corresponds to Python 'function' class) __name__ = ... # type: str __module__ = ... # type: str class list(MutableSequence[_T], Generic[_T]): @overload def __init__(self) -> None: ... @overload def __init__(self, iterable: Iterable[_T]) -> None: ... def append(self, object: _T) -> None: ... def extend(self, iterable: Iterable[_T]) -> None: ... def pop(self, index: int = ...) -> _T: ... def index(self, object: _T, start: int = ..., stop: int = ...) -> int: ... def count(self, object: _T) -> int: ... def insert(self, index: int, object: _T) -> None: ... def remove(self, object: _T) -> None: ... def reverse(self) -> None: ... def sort(self, cmp: Callable[[_T, _T], Any] = ..., key: Callable[[_T], Any] = ..., reverse: bool = ...) -> None: ... def __len__(self) -> int: ... def __iter__(self) -> Iterator[_T]: ... def __str__(self) -> str: ... def __hash__(self) -> int: ... @overload def __getitem__(self, i: int) -> _T: ... @overload def __getitem__(self, s: slice) -> List[_T]: ... def __getslice__(self, start: int, stop: int) -> List[_T]: ... @overload def __setitem__(self, i: int, o: _T) -> None: ... @overload def __setitem__(self, s: slice, o: Iterable[_T]) -> None: ... def __setslice__(self, start: int, stop: int, o: Sequence[_T]) -> None: ... def __delitem__(self, i: Union[int, slice]) -> None: ... def __delslice__(self, start: int, stop: int) -> None: ... def __add__(self, x: List[_T]) -> List[_T]: ... def __iadd__(self, x: Iterable[_T]) -> List[_T]: ... def __mul__(self, n: int) -> List[_T]: ... def __rmul__(self, n: int) -> List[_T]: ... def __contains__(self, o: object) -> bool: ... def __reversed__(self) -> Iterator[_T]: ... def __gt__(self, x: List[_T]) -> bool: ... def __ge__(self, x: List[_T]) -> bool: ... def __lt__(self, x: List[_T]) -> bool: ... def __le__(self, x: List[_T]) -> bool: ... class dict(MutableMapping[_KT, _VT], Generic[_KT, _VT]): # NOTE: Keyword arguments are special. If they are used, _KT must include # str, but we have no way of enforcing it here. @overload def __init__(self, **kwargs: _VT) -> None: ... @overload def __init__(self, map: Mapping[_KT, _VT], **kwargs: _VT) -> None: ... @overload def __init__(self, iterable: Iterable[Tuple[_KT, _VT]], **kwargs: _VT) -> None: ... def __new__(cls: Type[_T1], *args: Any, **kwargs: Any) -> _T1: ... def has_key(self, k: _KT) -> bool: ... def clear(self) -> None: ... def copy(self) -> Dict[_KT, _VT]: ... def popitem(self) -> Tuple[_KT, _VT]: ... def setdefault(self, k: _KT, default: _VT = ...) -> _VT: ... @overload def update(self, __m: Mapping[_KT, _VT], **kwargs: _VT) -> None: ... @overload def update(self, __m: Iterable[Tuple[_KT, _VT]], **kwargs: _VT) -> None: ... @overload def update(self, **kwargs: _VT) -> None: ... def iterkeys(self) -> Iterator[_KT]: ... def itervalues(self) -> Iterator[_VT]: ... def iteritems(self) -> Iterator[Tuple[_KT, _VT]]: ... def viewkeys(self) -> KeysView[_KT]: ... def viewvalues(self) -> ValuesView[_VT]: ... def viewitems(self) -> ItemsView[_KT, _VT]: ... @staticmethod @overload def fromkeys(seq: Sequence[_T]) -> Dict[_T, Any]: ... # TODO: Actually a class method (mypy/issues#328) @staticmethod @overload def fromkeys(seq: Sequence[_T], value: _S) -> Dict[_T, _S]: ... def __len__(self) -> int: ... def __getitem__(self, k: _KT) -> _VT: ... def __setitem__(self, k: _KT, v: _VT) -> None: ... def __delitem__(self, v: _KT) -> None: ... def __iter__(self) -> Iterator[_KT]: ... def __str__(self) -> str: ... class set(MutableSet[_T], Generic[_T]): def __init__(self, iterable: Iterable[_T] = ...) -> None: ... def add(self, element: _T) -> None: ... def clear(self) -> None: ... def copy(self) -> Set[_T]: ... def difference(self, *s: Iterable[Any]) -> Set[_T]: ... def difference_update(self, *s: Iterable[Any]) -> None: ... def discard(self, element: _T) -> None: ... def intersection(self, *s: Iterable[Any]) -> Set[_T]: ... def intersection_update(self, *s: Iterable[Any]) -> None: ... def isdisjoint(self, s: Iterable[object]) -> bool: ... def issubset(self, s: Iterable[object]) -> bool: ... def issuperset(self, s: Iterable[object]) -> bool: ... def pop(self) -> _T: ... def remove(self, element: _T) -> None: ... def symmetric_difference(self, s: Iterable[_T]) -> Set[_T]: ... def symmetric_difference_update(self, s: Iterable[_T]) -> None: ... def union(self, *s: Iterable[_T]) -> Set[_T]: ... def update(self, *s: Iterable[_T]) -> None: ... def __len__(self) -> int: ... def __contains__(self, o: object) -> bool: ... def __iter__(self) -> Iterator[_T]: ... def __str__(self) -> str: ... def __and__(self, s: AbstractSet[object]) -> Set[_T]: ... def __iand__(self, s: AbstractSet[object]) -> Set[_T]: ... def __or__(self, s: AbstractSet[_S]) -> Set[Union[_T, _S]]: ... def __ior__(self, s: AbstractSet[_S]) -> Set[Union[_T, _S]]: ... def __sub__(self, s: AbstractSet[object]) -> Set[_T]: ... def __isub__(self, s: AbstractSet[object]) -> Set[_T]: ... def __xor__(self, s: AbstractSet[_S]) -> Set[Union[_T, _S]]: ... def __ixor__(self, s: AbstractSet[_S]) -> Set[Union[_T, _S]]: ... def __le__(self, s: AbstractSet[object]) -> bool: ... def __lt__(self, s: AbstractSet[object]) -> bool: ... def __ge__(self, s: AbstractSet[object]) -> bool: ... def __gt__(self, s: AbstractSet[object]) -> bool: ... # TODO more set operations class frozenset(AbstractSet[_T], Generic[_T]): @overload def __init__(self) -> None: ... @overload def __init__(self, iterable: Iterable[_T]) -> None: ... def copy(self) -> FrozenSet[_T]: ... def difference(self, *s: Iterable[object]) -> FrozenSet[_T]: ... def intersection(self, *s: Iterable[object]) -> FrozenSet[_T]: ... def isdisjoint(self, s: Iterable[_T]) -> bool: ... def issubset(self, s: Iterable[object]) -> bool: ... def issuperset(self, s: Iterable[object]) -> bool: ... def symmetric_difference(self, s: Iterable[_T]) -> FrozenSet[_T]: ... def union(self, *s: Iterable[_T]) -> FrozenSet[_T]: ... def __len__(self) -> int: ... def __contains__(self, o: object) -> bool: ... def __iter__(self) -> Iterator[_T]: ... def __str__(self) -> str: ... def __and__(self, s: AbstractSet[_T]) -> FrozenSet[_T]: ... def __or__(self, s: AbstractSet[_S]) -> FrozenSet[Union[_T, _S]]: ... def __sub__(self, s: AbstractSet[_T]) -> FrozenSet[_T]: ... def __xor__(self, s: AbstractSet[_S]) -> FrozenSet[Union[_T, _S]]: ... def __le__(self, s: AbstractSet[object]) -> bool: ... def __lt__(self, s: AbstractSet[object]) -> bool: ... def __ge__(self, s: AbstractSet[object]) -> bool: ... def __gt__(self, s: AbstractSet[object]) -> bool: ... class enumerate(Iterator[Tuple[int, _T]], Generic[_T]): def __init__(self, iterable: Iterable[_T], start: int = ...) -> None: ... def __iter__(self) -> Iterator[Tuple[int, _T]]: ... def next(self) -> Tuple[int, _T]: ... # TODO __getattribute__ class xrange(Sized, Iterable[int], Reversible[int]): @overload def __init__(self, stop: int) -> None: ... @overload def __init__(self, start: int, stop: int, step: int = ...) -> None: ... def __len__(self) -> int: ... def __iter__(self) -> Iterator[int]: ... def __getitem__(self, i: int) -> int: ... def __reversed__(self) -> Iterator[int]: ... class property(object): def __init__(self, fget: Optional[Callable[[Any], Any]] = ..., fset: Optional[Callable[[Any, Any], None]] = ..., fdel: Optional[Callable[[Any], None]] = ..., doc: Optional[str] = ...) -> None: ... def getter(self, fget: Callable[[Any], Any]) -> property: ... def setter(self, fset: Callable[[Any, Any], None]) -> property: ... def deleter(self, fdel: Callable[[Any], None]) -> property: ... def __get__(self, obj: Any, type: Optional[type] = ...) -> Any: ... def __set__(self, obj: Any, value: Any) -> None: ... def __delete__(self, obj: Any) -> None: ... def fget(self) -> Any: ... def fset(self, value: Any) -> None: ... def fdel(self) -> None: ... long = int bytes = str NotImplemented = ... # type: Any def abs(n: SupportsAbs[_T]) -> _T: ... def all(i: Iterable[object]) -> bool: ... def any(i: Iterable[object]) -> bool: ... def bin(number: int) -> str: ... def callable(o: object) -> bool: ... def chr(code: int) -> str: ... def compile(source: Any, filename: unicode, mode: str, flags: int = ..., dont_inherit: int = ...) -> Any: ... def delattr(o: Any, name: unicode) -> None: ... def dir(o: object = ...) -> List[str]: ... @overload def divmod(a: int, b: int) -> Tuple[int, int]: ... @overload def divmod(a: float, b: float) -> Tuple[float, float]: ... def exit(code: Any = ...) -> NoReturn: ... @overload def filter(function: Callable[[_T], Any], iterable: Iterable[_T]) -> List[_T]: ... @overload def filter(function: None, iterable: Iterable[Optional[_T]]) -> List[_T]: ... def format(o: object, format_spec: str = ...) -> str: ... # TODO unicode def getattr(o: Any, name: unicode, default: Optional[Any] = ...) -> Any: ... def hasattr(o: Any, name: unicode) -> bool: ... def hash(o: object) -> int: ... def hex(i: int) -> str: ... # TODO __index__ def id(o: object) -> int: ... def input(prompt: Any = ...) -> Any: ... def intern(string: str) -> str: ... @overload def iter(iterable: Iterable[_T]) -> Iterator[_T]: ... @overload def iter(function: Callable[[], _T], sentinel: _T) -> Iterator[_T]: ... def isinstance(o: object, t: Union[type, Tuple[Union[type, Tuple], ...]]) -> bool: ... def issubclass(cls: type, classinfo: Union[type, Tuple[Union[type, Tuple], ...]]) -> bool: ... def len(o: Sized) -> int: ... @overload def map(func: Callable[[_T1], _S], iter1: Iterable[_T1]) -> List[_S]: ... @overload def map(func: Callable[[_T1, _T2], _S], iter1: Iterable[_T1], iter2: Iterable[_T2]) -> List[_S]: ... # TODO more than two iterables @overload def map(func: None, iter1: Iterable[_T1]) -> List[_T1]: ... @overload def map(func: None, iter1: Iterable[_T1], iter2: Iterable[_T2]) -> List[Tuple[_T1, _T2]]: ... # TODO more than two iterables @overload def max(arg1: _T, arg2: _T, *args: _T, key: Callable[[_T], Any] = ...) -> _T: ... @overload def max(iterable: Iterable[_T], key: Callable[[_T], Any] = ...) -> _T: ... @overload def min(arg1: _T, arg2: _T, *args: _T, key: Callable[[_T], Any] = ...) -> _T: ... @overload def min(iterable: Iterable[_T], key: Callable[[_T], Any] = ...) -> _T: ... @overload def next(i: Iterator[_T]) -> _T: ... @overload def next(i: Iterator[_T], default: _VT) -> Union[_T, _VT]: ... def oct(i: int) -> str: ... # TODO __index__ @overload def open(file: str, mode: unicode = ..., buffering: int = ...) -> BinaryIO: ... @overload def open(file: unicode, mode: unicode = ..., buffering: int = ...) -> BinaryIO: ... @overload def open(file: int, mode: unicode = ..., buffering: int = ...) -> BinaryIO: ... def ord(c: unicode) -> int: ... # This is only available after from __future__ import print_function. def print(*values: Any, sep: unicode = ..., end: unicode = ..., file: IO[Any] = ...) -> None: ... @overload def pow(x: int, y: int) -> Any: ... # The return type can be int or float, depending on y. @overload def pow(x: int, y: int, z: int) -> Any: ... @overload def pow(x: float, y: float) -> float: ... @overload def pow(x: float, y: float, z: float) -> float: ... def quit(code: int = ...) -> None: ... def range(x: int, y: int = ..., step: int = ...) -> List[int]: ... def raw_input(prompt: Any = ...) -> str: ... @overload def reduce(function: Callable[[_T, _S], _T], iterable: Iterable[_S], initializer: _T) -> _T: ... @overload def reduce(function: Callable[[_T, _T], _T], iterable: Iterable[_T]) -> _T: ... def reload(module: Any) -> Any: ... @overload def reversed(object: Reversible[_T]) -> Iterator[_T]: ... @overload def reversed(object: Sequence[_T]) -> Iterator[_T]: ... def repr(o: object) -> str: ... @overload def round(number: float) -> float: ... @overload def round(number: float, ndigits: int) -> float: ... # Always return a float if given ndigits. @overload def round(number: SupportsRound[_T]) -> _T: ... @overload def round(number: SupportsRound[_T], ndigits: int) -> _T: ... def setattr(object: Any, name: unicode, value: Any) -> None: ... def sorted(iterable: Iterable[_T], *, cmp: Callable[[_T, _T], int] = ..., key: Callable[[_T], Any] = ..., reverse: bool = ...) -> List[_T]: ... @overload def sum(iterable: Iterable[_T]) -> Union[_T, int]: ... @overload def sum(iterable: Iterable[_T], start: _S) -> Union[_T, _S]: ... def unichr(i: int) -> unicode: ... def vars(object: Any = ...) -> Dict[str, Any]: ... @overload def zip(iter1: Iterable[_T1]) -> List[Tuple[_T1]]: ... @overload def zip(iter1: Iterable[_T1], iter2: Iterable[_T2]) -> List[Tuple[_T1, _T2]]: ... @overload def zip(iter1: Iterable[_T1], iter2: Iterable[_T2], iter3: Iterable[_T3]) -> List[Tuple[_T1, _T2, _T3]]: ... @overload def zip(iter1: Iterable[_T1], iter2: Iterable[_T2], iter3: Iterable[_T3], iter4: Iterable[_T4]) -> List[Tuple[_T1, _T2, _T3, _T4]]: ... @overload def zip(iter1: Iterable[_T1], iter2: Iterable[_T2], iter3: Iterable[_T3], iter4: Iterable[_T4], iter5: Iterable[_T5]) -> List[Tuple[_T1, _T2, _T3, _T4, _T5]]: ... @overload def zip(iter1: Iterable[Any], iter2: Iterable[Any], iter3: Iterable[Any], iter4: Iterable[Any], iter5: Iterable[Any], iter6: Iterable[Any], *iterables: Iterable[Any]) -> List[Tuple[Any, ...]]: ... def __import__(name: unicode, globals: Dict[str, Any] = ..., locals: Dict[str, Any] = ..., fromlist: List[str] = ..., level: int = ...) -> Any: ... def globals() -> Dict[str, Any]: ... def locals() -> Dict[str, Any]: ... # Actually the type of Ellipsis is , but since it's # not exposed anywhere under that name, we make it private here. class ellipsis: ... Ellipsis = ... # type: ellipsis # TODO: buffer support is incomplete; e.g. some_string.startswith(some_buffer) doesn't type check. _AnyBuffer = TypeVar('_AnyBuffer', str, unicode, bytearray, buffer) class buffer(Sized): def __init__(self, object: _AnyBuffer, offset: int = ..., size: int = ...) -> None: ... def __add__(self, other: _AnyBuffer) -> str: ... def __cmp__(self, other: _AnyBuffer) -> bool: ... def __getitem__(self, key: Union[int, slice]) -> str: ... def __getslice__(self, i: int, j: int) -> str: ... def __len__(self) -> int: ... def __mul__(self, x: int) -> str: ... class memoryview(Sized, Container[bytes]): format = ... # type: str itemsize = ... # type: int shape = ... # type: Optional[Tuple[int, ...]] strides = ... # type: Optional[Tuple[int, ...]] suboffsets = ... # type: Optional[Tuple[int, ...]] readonly = ... # type: bool ndim = ... # type: int def __init__(self, obj: Union[str, bytearray, buffer, memoryview]) -> None: ... @overload def __getitem__(self, i: int) -> bytes: ... @overload def __getitem__(self, s: slice) -> memoryview: ... def __contains__(self, x: object) -> bool: ... def __iter__(self) -> Iterator[bytes]: ... def __len__(self) -> int: ... @overload def __setitem__(self, i: int, o: bytes) -> None: ... @overload def __setitem__(self, s: slice, o: Sequence[bytes]) -> None: ... @overload def __setitem__(self, s: slice, o: memoryview) -> None: ... def tobytes(self) -> bytes: ... def tolist(self) -> List[int]: ... class BaseException(object): args = ... # type: Tuple[Any, ...] message = ... # type: str def __init__(self, *args: object, **kwargs: object) -> None: ... def __getitem__(self, i: int) -> Any: ... def __getslice__(self, start: int, stop: int) -> Tuple[Any, ...]: ... class GeneratorExit(BaseException): ... class KeyboardInterrupt(BaseException): ... class SystemExit(BaseException): code = 0 class Exception(BaseException): ... class StopIteration(Exception): ... class StandardError(Exception): ... class ArithmeticError(StandardError): ... class BufferError(StandardError): ... class EnvironmentError(StandardError): errno = 0 strerror = ... # type: str # TODO can this be unicode? filename = ... # type: str class LookupError(StandardError): ... class RuntimeError(StandardError): ... class ValueError(StandardError): ... class AssertionError(StandardError): ... class AttributeError(StandardError): ... class EOFError(StandardError): ... class FloatingPointError(ArithmeticError): ... class IOError(EnvironmentError): ... class ImportError(StandardError): ... class IndexError(LookupError): ... class KeyError(LookupError): ... class MemoryError(StandardError): ... class NameError(StandardError): ... class NotImplementedError(RuntimeError): ... class OSError(EnvironmentError): ... class WindowsError(OSError): winerror = ... # type: int class OverflowError(ArithmeticError): ... class ReferenceError(StandardError): ... class SyntaxError(StandardError): msg = ... # type: str lineno = ... # type: int offset = ... # type: int text = ... # type: str filename = ... # type: str class IndentationError(SyntaxError): ... class TabError(IndentationError): ... class SystemError(StandardError): ... class TypeError(StandardError): ... class UnboundLocalError(NameError): ... class UnicodeError(ValueError): ... class UnicodeDecodeError(UnicodeError): ... class UnicodeEncodeError(UnicodeError): ... class UnicodeTranslateError(UnicodeError): ... class ZeroDivisionError(ArithmeticError): ... class Warning(Exception): ... class UserWarning(Warning): ... class DeprecationWarning(Warning): ... class SyntaxWarning(Warning): ... class RuntimeWarning(Warning): ... class FutureWarning(Warning): ... class PendingDeprecationWarning(Warning): ... class ImportWarning(Warning): ... class UnicodeWarning(Warning): ... class BytesWarning(Warning): ... class ResourceWarning(Warning): ... def eval(s: Union[str, unicode], globals: Dict[str, Any] = ..., locals: Dict[str, Any] = ...) -> Any: ... def exec(object: str, globals: Optional[Dict[str, Any]] = ..., locals: Optional[Dict[str, Any]] = ...) -> Any: ... # TODO code object as source def cmp(x: Any, y: Any) -> int: ... def execfile(filename: str, globals: Optional[Dict[str, Any]] = ..., locals: Optional[Dict[str, Any]] = ...) -> None: ... class file(BinaryIO): @overload def __init__(self, file: str, mode: str = ..., buffering: int = ...) -> None: ... @overload def __init__(self, file: unicode, mode: str = ..., buffering: int = ...) -> None: ... @overload def __init__(self, file: int, mode: str = ..., buffering: int = ...) -> None: ... def __iter__(self) -> Iterator[str]: ... def read(self, n: int = ...) -> str: ... def __enter__(self) -> BinaryIO: ... def __exit__(self, t: Optional[type] = ..., exc: Optional[BaseException] = ..., tb: Optional[Any] = ...) -> bool: ... def flush(self) -> None: ... def fileno(self) -> int: ... def isatty(self) -> bool: ... def close(self) -> None: ... def readable(self) -> bool: ... def writable(self) -> bool: ... def seekable(self) -> bool: ... def seek(self, offset: int, whence: int = ...) -> int: ... def tell(self) -> int: ... def readline(self, limit: int = ...) -> str: ... def readlines(self, hint: int = ...) -> List[str]: ... def write(self, data: str) -> int: ... def writelines(self, data: Iterable[str]) -> None: ... def truncate(self, pos: Optional[int] = ...) -> int: ... # Very old builtins def apply(func: Callable[..., _T], args: Optional[Sequence[Any]] = ..., kwds: Optional[Mapping[str, Any]] = ...) -> _T: ... _N = TypeVar('_N', bool, int, float, complex) def coerce(x: _N, y: _N) -> Tuple[_N, _N]: ... mypy-0.560/typeshed/stdlib/2/_ast.pyi0000644€tŠÔÚ€2›s®0000001664213215007212023616 0ustar jukkaDROPBOX\Domain Users00000000000000import typing from typing import Optional, Union __version__ = ... # type: str PyCF_ONLY_AST = ... # type: int _identifier = str class AST: _attributes = ... # type: typing.Tuple[str, ...] _fields = ... # type: typing.Tuple[str, ...] def __init__(self, *args, **kwargs) -> None: ... class mod(AST): ... class Module(mod): body = ... # type: typing.List[stmt] class Interactive(mod): body = ... # type: typing.List[stmt] class Expression(mod): body = ... # type: expr class Suite(mod): body = ... # type: typing.List[stmt] class stmt(AST): lineno = ... # type: int col_offset = ... # type: int class FunctionDef(stmt): name = ... # type: _identifier args = ... # type: arguments body = ... # type: typing.List[stmt] decorator_list = ... # type: typing.List[expr] class ClassDef(stmt): name = ... # type: _identifier bases = ... # type: typing.List[expr] body = ... # type: typing.List[stmt] decorator_list = ... # type: typing.List[expr] class Return(stmt): value = ... # type: Optional[expr] class Delete(stmt): targets = ... # type: typing.List[expr] class Assign(stmt): targets = ... # type: typing.List[expr] value = ... # type: expr class AugAssign(stmt): target = ... # type: expr op = ... # type: operator value = ... # type: expr class Print(stmt): dest = ... # type: Optional[expr] values = ... # type: typing.List[expr] nl = ... # type: bool class For(stmt): target = ... # type: expr iter = ... # type: expr body = ... # type: typing.List[stmt] orelse = ... # type: typing.List[stmt] class While(stmt): test = ... # type: expr body = ... # type: typing.List[stmt] orelse = ... # type: typing.List[stmt] class If(stmt): test = ... # type: expr body = ... # type: typing.List[stmt] orelse = ... # type: typing.List[stmt] class With(stmt): context_expr = ... # type: expr optional_vars = ... # type: Optional[expr] body = ... # type: typing.List[stmt] class Raise(stmt): type = ... # type: Optional[expr] inst = ... # type: Optional[expr] tback = ... # type: Optional[expr] class TryExcept(stmt): body = ... # type: typing.List[stmt] handlers = ... # type: typing.List[ExceptHandler] orelse = ... # type: typing.List[stmt] class TryFinally(stmt): body = ... # type: typing.List[stmt] finalbody = ... # type: typing.List[stmt] class Assert(stmt): test = ... # type: expr msg = ... # type: Optional[expr] class Import(stmt): names = ... # type: typing.List[alias] class ImportFrom(stmt): module = ... # type: Optional[_identifier] names = ... # type: typing.List[alias] level = ... # type: Optional[int] class Exec(stmt): body = ... # type: expr globals = ... # type: Optional[expr] locals = ... # type: Optional[expr] class Global(stmt): names = ... # type: typing.List[_identifier] class Expr(stmt): value = ... # type: expr class Pass(stmt): ... class Break(stmt): ... class Continue(stmt): ... class slice(AST): ... _slice = slice # this lets us type the variable named 'slice' below class Slice(slice): lower = ... # type: Optional[expr] upper = ... # type: Optional[expr] step = ... # type: Optional[expr] class ExtSlice(slice): dims = ... # type: typing.List[slice] class Index(slice): value = ... # type: expr class Ellipsis(slice): ... class expr(AST): lineno = ... # type: int col_offset = ... # type: int class BoolOp(expr): op = ... # type: boolop values = ... # type: typing.List[expr] class BinOp(expr): left = ... # type: expr op = ... # type: operator right = ... # type: expr class UnaryOp(expr): op = ... # type: unaryop operand = ... # type: expr class Lambda(expr): args = ... # type: arguments body = ... # type: expr class IfExp(expr): test = ... # type: expr body = ... # type: expr orelse = ... # type: expr class Dict(expr): keys = ... # type: typing.List[expr] values = ... # type: typing.List[expr] class Set(expr): elts = ... # type: typing.List[expr] class ListComp(expr): elt = ... # type: expr generators = ... # type: typing.List[comprehension] class SetComp(expr): elt = ... # type: expr generators = ... # type: typing.List[comprehension] class DictComp(expr): key = ... # type: expr value = ... # type: expr generators = ... # type: typing.List[comprehension] class GeneratorExp(expr): elt = ... # type: expr generators = ... # type: typing.List[comprehension] class Yield(expr): value = ... # type: Optional[expr] class Compare(expr): left = ... # type: expr ops = ... # type: typing.List[cmpop] comparators = ... # type: typing.List[expr] class Call(expr): func = ... # type: expr args = ... # type: typing.List[expr] keywords = ... # type: typing.List[keyword] starargs = ... # type: Optional[expr] kwargs = ... # type: Optional[expr] class Repr(expr): value = ... # type: expr class Num(expr): n = ... # type: Union[int, float] class Str(expr): s = ... # type: str class Attribute(expr): value = ... # type: expr attr = ... # type: _identifier ctx = ... # type: expr_context class Subscript(expr): value = ... # type: expr slice = ... # type: _slice ctx = ... # type: expr_context class Name(expr): id = ... # type: _identifier ctx = ... # type: expr_context class List(expr): elts = ... # type: typing.List[expr] ctx = ... # type: expr_context class Tuple(expr): elts = ... # type: typing.List[expr] ctx = ... # type: expr_context class expr_context(AST): ... class AugLoad(expr_context): ... class AugStore(expr_context): ... class Del(expr_context): ... class Load(expr_context): ... class Param(expr_context): ... class Store(expr_context): ... class boolop(AST): ... class And(boolop): ... class Or(boolop): ... class operator(AST): ... class Add(operator): ... class BitAnd(operator): ... class BitOr(operator): ... class BitXor(operator): ... class Div(operator): ... class FloorDiv(operator): ... class LShift(operator): ... class Mod(operator): ... class Mult(operator): ... class Pow(operator): ... class RShift(operator): ... class Sub(operator): ... class unaryop(AST): ... class Invert(unaryop): ... class Not(unaryop): ... class UAdd(unaryop): ... class USub(unaryop): ... class cmpop(AST): ... class Eq(cmpop): ... class Gt(cmpop): ... class GtE(cmpop): ... class In(cmpop): ... class Is(cmpop): ... class IsNot(cmpop): ... class Lt(cmpop): ... class LtE(cmpop): ... class NotEq(cmpop): ... class NotIn(cmpop): ... class comprehension(AST): target = ... # type: expr iter = ... # type: expr ifs = ... # type: typing.List[expr] class ExceptHandler(AST): type = ... # type: Optional[expr] name = ... # type: Optional[expr] body = ... # type: typing.List[stmt] lineno = ... # type: int col_offset = ... # type: int class arguments(AST): args = ... # type: typing.List[expr] vararg = ... # type: Optional[_identifier] kwarg = ... # type: Optional[_identifier] defaults = ... # type: typing.List[expr] class keyword(AST): arg = ... # type: _identifier value = ... # type: expr class alias(AST): name = ... # type: _identifier asname = ... # type: Optional[_identifier] mypy-0.560/typeshed/stdlib/2/_collections.pyi0000644€tŠÔÚ€2›s®0000000301713215007212025335 0ustar jukkaDROPBOX\Domain Users00000000000000"""Stub file for the '_collections' module.""" from typing import Any, Generic, Iterator, TypeVar, Optional, Union class defaultdict(dict): default_factory = ... # type: None def __init__(self, default: Any = ..., init: Any = ...) -> None: ... def __missing__(self, key) -> Any: raise KeyError() def __copy__(self) -> "defaultdict": ... def copy(self) -> "defaultdict": ... _T = TypeVar('_T') _T2 = TypeVar('_T2') class deque(Generic[_T]): maxlen = ... # type: Optional[int] def __init__(self, iterable: Iterator[_T] = ..., maxlen: int = ...) -> None: ... def append(self, x: _T) -> None: ... def appendleft(self, x: _T) -> None: ... def clear(self) -> None: ... def count(self, x: Any) -> int: ... def extend(self, iterable: Iterator[_T]) -> None: ... def extendleft(self, iterable: Iterator[_T]) -> None: ... def pop(self) -> _T: raise IndexError() def popleft(self) -> _T: raise IndexError() def remove(self, value: _T) -> None: raise IndexError() def reverse(self) -> None: ... def rotate(self, n: int = ...) -> None: ... def __contains__(self, o: Any) -> bool: ... def __copy__(self) -> "deque[_T]": ... def __getitem__(self, i: int) -> _T: raise IndexError() def __iadd__(self, other: "deque[_T2]") -> "deque[Union[_T, _T2]]": ... def __iter__(self) -> Iterator[_T]: ... def __len__(self) -> int: ... def __reversed__(self) -> Iterator[_T]: ... def __setitem__(self, i: int, x: _T) -> None: ... mypy-0.560/typeshed/stdlib/2/_functools.pyi0000644€tŠÔÚ€2›s®0000000125713215007212025037 0ustar jukkaDROPBOX\Domain Users00000000000000"""Stub file for the '_functools' module.""" from typing import Any, Callable, Dict, Iterable, Optional, TypeVar, Tuple, overload _T = TypeVar("_T") _S = TypeVar("_S") @overload def reduce(function: Callable[[_T, _T], _T], sequence: Iterable[_T]) -> _T: ... @overload def reduce(function: Callable[[_T, _S], _T], sequence: Iterable[_S], initial: _T) -> _T: ... class partial(object): func = ... # type: Callable[..., Any] args = ... # type: Tuple[Any, ...] keywords = ... # type: Dict[str, Any] def __init__(self, func: Callable[..., Any], *args: Any, **kwargs: Any) -> None: ... def __call__(self, *args: Any, **kwargs: Any) -> Any: ... mypy-0.560/typeshed/stdlib/2/_hotshot.pyi0000644€tŠÔÚ€2›s®0000000173613215007212024515 0ustar jukkaDROPBOX\Domain Users00000000000000"""Stub file for the '_hotshot' module.""" # This is an autogenerated file. It serves as a starting point # for a more precise manual annotation of this module. # Feel free to edit the source below, but remove this header when you do. from typing import Any, List, Tuple, Dict, Generic def coverage(a: str) -> Any: ... def logreader(a: str) -> LogReaderType: raise IOError() raise RuntimeError() def profiler(a: str, *args, **kwargs) -> Any: raise IOError() def resolution() -> tuple: ... class LogReaderType(object): def close(self) -> None: ... def fileno(self) -> int: raise ValueError() class ProfilerType(object): def addinfo(self, a: str, b: str) -> None: ... def close(self) -> None: ... def fileno(self) -> int: raise ValueError() def runcall(self, *args, **kwargs) -> Any: ... def runcode(self, a, b, *args, **kwargs) -> Any: raise TypeError() def start(self) -> None: ... def stop(self) -> None: ... mypy-0.560/typeshed/stdlib/2/_io.pyi0000644€tŠÔÚ€2›s®0000001504313215007212023430 0ustar jukkaDROPBOX\Domain Users00000000000000from typing import Any, AnyStr, BinaryIO, IO, Text, TextIO, Iterable, Iterator, List, Optional, Type, Tuple, TypeVar, Union from types import TracebackType DEFAULT_BUFFER_SIZE = ... # type: int class BlockingIOError(IOError): characters_written = ... # type: int class UnsupportedOperation(ValueError, IOError): ... _T = TypeVar("_T") class _IOBase(BinaryIO): @property def closed(self) -> bool: ... def _checkClosed(self) -> None: ... def _checkReadable(self) -> None: ... def _checkSeekable(self) -> None: ... def _checkWritable(self) -> None: ... # All these methods are concrete here (you can instantiate this) def close(self) -> None: ... def fileno(self) -> int: ... def flush(self) -> None: ... def isatty(self) -> bool: ... def readable(self) -> bool: ... def seek(self, offset: int, whence: int = ...) -> int: ... def seekable(self) -> bool: ... def tell(self) -> int: ... def truncate(self, size: Optional[int] = ...) -> int: ... def writable(self) -> bool: ... def __enter__(self: _T) -> _T: ... def __exit__(self, t: Optional[Type[BaseException]], value: Optional[BaseException], traceback: Optional[Any]) -> bool: ... def __iter__(self: _T) -> _T: ... # The parameter type of writelines[s]() is determined by that of write(): def writelines(self, lines: Iterable[bytes]) -> None: ... # The return type of readline[s]() and next() is determined by that of read(): def readline(self, limit: int = ...) -> bytes: ... def readlines(self, hint: int = ...) -> list[bytes]: ... def next(self) -> bytes: ... class _BufferedIOBase(_IOBase): def read1(self, n: int) -> bytes: ... def read(self, size: int = ...) -> bytes: ... def readinto(self, buffer: bytearray) -> int: ... def write(self, s: bytes) -> int: ... def detach(self) -> _IOBase: ... class BufferedRWPair(_BufferedIOBase): def __init__(self, reader: _RawIOBase, writer: _RawIOBase, buffer_size: int = ..., max_buffer_size: int = ...) -> None: ... def peek(self, n: int = ...) -> bytes: ... def __enter__(self) -> BufferedRWPair: ... class BufferedRandom(_BufferedIOBase): mode = ... # type: str name = ... # type: str raw = ... # type: _IOBase def __init__(self, raw: _IOBase, buffer_size: int = ..., max_buffer_size: int = ...) -> None: ... def peek(self, n: int = ...) -> bytes: ... class BufferedReader(_BufferedIOBase): mode = ... # type: str name = ... # type: str raw = ... # type: _IOBase def __init__(self, raw: _IOBase, buffer_size: int = ...) -> None: ... def peek(self, n: int = ...) -> bytes: ... class BufferedWriter(_BufferedIOBase): name = ... # type: str raw = ... # type: _IOBase mode = ... # type: str def __init__(self, raw: _IOBase, buffer_size: int = ..., max_buffer_size: int = ...) -> None: ... class BytesIO(_BufferedIOBase): def __init__(self, initial_bytes: bytes = ...) -> None: ... def __setstate__(self, tuple) -> None: ... def __getstate__(self) -> tuple: ... def getvalue(self) -> bytes: ... def write(self, s: bytes) -> int: ... def writelines(self, lines: Iterable[bytes]) -> None: ... def read1(self, size: int) -> bytes: ... def next(self) -> bytes: ... class _RawIOBase(_IOBase): def readall(self) -> str: ... def read(self, n: int = ...) -> str: ... class FileIO(_RawIOBase, BytesIO): # type: ignore # for __enter__ mode = ... # type: str closefd = ... # type: bool def __init__(self, file: str, mode: str = ..., closefd: bool = ...) -> None: ... def readinto(self, buffer: bytearray)-> int: ... def write(self, pbuf: str) -> int: ... class IncrementalNewlineDecoder(object): newlines = ... # type: Union[str, unicode] def __init__(self, decoder, translate, z=...) -> None: ... def decode(self, input, final) -> Any: ... def getstate(self) -> Tuple[Any, int]: ... def setstate(self, state: Tuple[Any, int]) -> None: ... def reset(self) -> None: ... # Note: In the actual _io.py, _TextIOBase inherits from _IOBase. class _TextIOBase(TextIO): errors = ... # type: Optional[str] # TODO: On _TextIOBase, this is always None. But it's unicode/bytes in subclasses. newlines = ... # type: Union[None, unicode, bytes] encoding = ... # type: str @property def closed(self) -> bool: ... def _checkClosed(self) -> None: ... def _checkReadable(self) -> None: ... def _checkSeekable(self) -> None: ... def _checkWritable(self) -> None: ... def close(self) -> None: ... def detach(self) -> IO: ... def fileno(self) -> int: ... def flush(self) -> None: ... def isatty(self) -> bool: ... def next(self) -> unicode: ... def read(self, size: int = ...) -> unicode: ... def readable(self) -> bool: ... def readline(self, limit: int = ...) -> unicode: ... def readlines(self, hint: int = ...) -> list[unicode]: ... def seek(self, offset: int, whence: int = ...) -> int: ... def seekable(self) -> bool: ... def tell(self) -> int: ... def truncate(self, size: Optional[int] = ...) -> int: ... def writable(self) -> bool: ... def write(self, pbuf: unicode) -> int: ... def writelines(self, lines: Iterable[unicode]) -> None: ... def __enter__(self: _T) -> _T: ... def __exit__(self, t: Optional[Type[BaseException]], value: Optional[BaseException], traceback: Optional[Any]) -> bool: ... def __iter__(self: _T) -> _T: ... class StringIO(_TextIOBase): line_buffering = ... # type: bool def __init__(self, initial_value: Optional[unicode] = ..., newline: Optional[unicode] = ...) -> None: ... def __setstate__(self, state: tuple) -> None: ... def __getstate__(self) -> tuple: ... def getvalue(self) -> unicode: ... class TextIOWrapper(_TextIOBase): name = ... # type: str line_buffering = ... # type: bool buffer = ... # type: BinaryIO _CHUNK_SIZE = ... # type: int def __init__(self, buffer: IO, encoding: Optional[Text] = ..., errors: Optional[Text] = ..., newline: Optional[Text] = ..., line_buffering: bool = ..., write_through: bool = ...) -> None: ... def open(file: Union[str, unicode, int], mode: unicode = ..., buffering: int = ..., encoding: Optional[Text] = ..., errors: Optional[Text] = ..., newline: Optional[Text] = ..., closefd: bool = ...) -> IO[Any]: ... mypy-0.560/typeshed/stdlib/2/_json.pyi0000644€tŠÔÚ€2›s®0000000076213215007212023774 0ustar jukkaDROPBOX\Domain Users00000000000000"""Stub file for the '_json' module.""" # This is an autogenerated file. It serves as a starting point # for a more precise manual annotation of this module. # Feel free to edit the source below, but remove this header when you do. from typing import Any, List, Tuple, Dict, Generic def encode_basestring_ascii(*args, **kwargs) -> str: raise TypeError() def scanstring(a, b, *args, **kwargs) -> tuple: raise TypeError() class Encoder(object): pass class Scanner(object): pass mypy-0.560/typeshed/stdlib/2/_md5.pyi0000644€tŠÔÚ€2›s®0000000056413215007212023510 0ustar jukkaDROPBOX\Domain Users00000000000000blocksize = ... # type: int digest_size = ... # type: int class MD5Type(object): name = ... # type: str block_size = ... # type: int digest_size = ... # type: int def copy(self) -> "MD5Type": ... def digest(self) -> str: ... def hexdigest(self) -> str: ... def update(self, arg: str) -> None: ... def new(arg: str = ...) -> MD5Type: ... mypy-0.560/typeshed/stdlib/2/_sha.pyi0000644€tŠÔÚ€2›s®0000000070013215007212023566 0ustar jukkaDROPBOX\Domain Users00000000000000blocksize = ... # type: int block_size = ... # type: int digest_size = ... # type: int class sha(object): # not actually exposed name = ... # type: str block_size = ... # type: int digest_size = ... # type: int digestsize = ... # type: int def copy(self) -> "sha": ... def digest(self) -> str: ... def hexdigest(self) -> str: ... def update(self, arg: str) -> None: ... def new(arg: str = ...) -> sha: ... mypy-0.560/typeshed/stdlib/2/_sha256.pyi0000644€tŠÔÚ€2›s®0000000135413215007212024031 0ustar jukkaDROPBOX\Domain Users00000000000000from typing import Optional class sha224(object): name = ... # type: str block_size = ... # type: int digest_size = ... # type: int digestsize = ... # type: int def __init__(self, init: Optional[str]) -> None: ... def copy(self) -> "sha224": ... def digest(self) -> str: ... def hexdigest(self) -> str: ... def update(self, arg: str) -> None: ... class sha256(object): name = ... # type: str block_size = ... # type: int digest_size = ... # type: int digestsize = ... # type: int def __init__(self, init: Optional[str]) -> None: ... def copy(self) -> "sha256": ... def digest(self) -> str: ... def hexdigest(self) -> str: ... def update(self, arg: str) -> None: ... mypy-0.560/typeshed/stdlib/2/_sha512.pyi0000644€tŠÔÚ€2›s®0000000135413215007212024024 0ustar jukkaDROPBOX\Domain Users00000000000000from typing import Optional class sha384(object): name = ... # type: str block_size = ... # type: int digest_size = ... # type: int digestsize = ... # type: int def __init__(self, init: Optional[str]) -> None: ... def copy(self) -> "sha384": ... def digest(self) -> str: ... def hexdigest(self) -> str: ... def update(self, arg: str) -> None: ... class sha512(object): name = ... # type: str block_size = ... # type: int digest_size = ... # type: int digestsize = ... # type: int def __init__(self, init: Optional[str]) -> None: ... def copy(self) -> "sha512": ... def digest(self) -> str: ... def hexdigest(self) -> str: ... def update(self, arg: str) -> None: ... mypy-0.560/typeshed/stdlib/2/_socket.pyi0000644€tŠÔÚ€2›s®0000002272313215007212024314 0ustar jukkaDROPBOX\Domain Users00000000000000from typing import Tuple, Union, IO, Any, Optional, overload AF_APPLETALK = ... # type: int AF_ASH = ... # type: int AF_ATMPVC = ... # type: int AF_ATMSVC = ... # type: int AF_AX25 = ... # type: int AF_BLUETOOTH = ... # type: int AF_BRIDGE = ... # type: int AF_DECnet = ... # type: int AF_ECONET = ... # type: int AF_INET = ... # type: int AF_INET6 = ... # type: int AF_IPX = ... # type: int AF_IRDA = ... # type: int AF_KEY = ... # type: int AF_LLC = ... # type: int AF_NETBEUI = ... # type: int AF_NETLINK = ... # type: int AF_NETROM = ... # type: int AF_PACKET = ... # type: int AF_PPPOX = ... # type: int AF_ROSE = ... # type: int AF_ROUTE = ... # type: int AF_SECURITY = ... # type: int AF_SNA = ... # type: int AF_TIPC = ... # type: int AF_UNIX = ... # type: int AF_UNSPEC = ... # type: int AF_WANPIPE = ... # type: int AF_X25 = ... # type: int AI_ADDRCONFIG = ... # type: int AI_ALL = ... # type: int AI_CANONNAME = ... # type: int AI_NUMERICHOST = ... # type: int AI_NUMERICSERV = ... # type: int AI_PASSIVE = ... # type: int AI_V4MAPPED = ... # type: int BDADDR_ANY = ... # type: str BDADDR_LOCAL = ... # type: str BTPROTO_HCI = ... # type: int BTPROTO_L2CAP = ... # type: int BTPROTO_RFCOMM = ... # type: int BTPROTO_SCO = ... # type: int EAI_ADDRFAMILY = ... # type: int EAI_AGAIN = ... # type: int EAI_BADFLAGS = ... # type: int EAI_FAIL = ... # type: int EAI_FAMILY = ... # type: int EAI_MEMORY = ... # type: int EAI_NODATA = ... # type: int EAI_NONAME = ... # type: int EAI_OVERFLOW = ... # type: int EAI_SERVICE = ... # type: int EAI_SOCKTYPE = ... # type: int EAI_SYSTEM = ... # type: int EBADF = ... # type: int EINTR = ... # type: int HCI_DATA_DIR = ... # type: int HCI_FILTER = ... # type: int HCI_TIME_STAMP = ... # type: int INADDR_ALLHOSTS_GROUP = ... # type: int INADDR_ANY = ... # type: int INADDR_BROADCAST = ... # type: int INADDR_LOOPBACK = ... # type: int INADDR_MAX_LOCAL_GROUP = ... # type: int INADDR_NONE = ... # type: int INADDR_UNSPEC_GROUP = ... # type: int IPPORT_RESERVED = ... # type: int IPPORT_USERRESERVED = ... # type: int IPPROTO_AH = ... # type: int IPPROTO_DSTOPTS = ... # type: int IPPROTO_EGP = ... # type: int IPPROTO_ESP = ... # type: int IPPROTO_FRAGMENT = ... # type: int IPPROTO_GRE = ... # type: int IPPROTO_HOPOPTS = ... # type: int IPPROTO_ICMP = ... # type: int IPPROTO_ICMPV6 = ... # type: int IPPROTO_IDP = ... # type: int IPPROTO_IGMP = ... # type: int IPPROTO_IP = ... # type: int IPPROTO_IPIP = ... # type: int IPPROTO_IPV6 = ... # type: int IPPROTO_NONE = ... # type: int IPPROTO_PIM = ... # type: int IPPROTO_PUP = ... # type: int IPPROTO_RAW = ... # type: int IPPROTO_ROUTING = ... # type: int IPPROTO_RSVP = ... # type: int IPPROTO_TCP = ... # type: int IPPROTO_TP = ... # type: int IPPROTO_UDP = ... # type: int IPV6_CHECKSUM = ... # type: int IPV6_DSTOPTS = ... # type: int IPV6_HOPLIMIT = ... # type: int IPV6_HOPOPTS = ... # type: int IPV6_JOIN_GROUP = ... # type: int IPV6_LEAVE_GROUP = ... # type: int IPV6_MULTICAST_HOPS = ... # type: int IPV6_MULTICAST_IF = ... # type: int IPV6_MULTICAST_LOOP = ... # type: int IPV6_NEXTHOP = ... # type: int IPV6_PKTINFO = ... # type: int IPV6_RECVDSTOPTS = ... # type: int IPV6_RECVHOPLIMIT = ... # type: int IPV6_RECVHOPOPTS = ... # type: int IPV6_RECVPKTINFO = ... # type: int IPV6_RECVRTHDR = ... # type: int IPV6_RECVTCLASS = ... # type: int IPV6_RTHDR = ... # type: int IPV6_RTHDRDSTOPTS = ... # type: int IPV6_RTHDR_TYPE_0 = ... # type: int IPV6_TCLASS = ... # type: int IPV6_UNICAST_HOPS = ... # type: int IPV6_V6ONLY = ... # type: int IP_ADD_MEMBERSHIP = ... # type: int IP_DEFAULT_MULTICAST_LOOP = ... # type: int IP_DEFAULT_MULTICAST_TTL = ... # type: int IP_DROP_MEMBERSHIP = ... # type: int IP_HDRINCL = ... # type: int IP_MAX_MEMBERSHIPS = ... # type: int IP_MULTICAST_IF = ... # type: int IP_MULTICAST_LOOP = ... # type: int IP_MULTICAST_TTL = ... # type: int IP_OPTIONS = ... # type: int IP_RECVOPTS = ... # type: int IP_RECVRETOPTS = ... # type: int IP_RETOPTS = ... # type: int IP_TOS = ... # type: int IP_TTL = ... # type: int MSG_CTRUNC = ... # type: int MSG_DONTROUTE = ... # type: int MSG_DONTWAIT = ... # type: int MSG_EOR = ... # type: int MSG_OOB = ... # type: int MSG_PEEK = ... # type: int MSG_TRUNC = ... # type: int MSG_WAITALL = ... # type: int MethodType = ... # type: type NETLINK_DNRTMSG = ... # type: int NETLINK_FIREWALL = ... # type: int NETLINK_IP6_FW = ... # type: int NETLINK_NFLOG = ... # type: int NETLINK_ROUTE = ... # type: int NETLINK_USERSOCK = ... # type: int NETLINK_XFRM = ... # type: int NI_DGRAM = ... # type: int NI_MAXHOST = ... # type: int NI_MAXSERV = ... # type: int NI_NAMEREQD = ... # type: int NI_NOFQDN = ... # type: int NI_NUMERICHOST = ... # type: int NI_NUMERICSERV = ... # type: int PACKET_BROADCAST = ... # type: int PACKET_FASTROUTE = ... # type: int PACKET_HOST = ... # type: int PACKET_LOOPBACK = ... # type: int PACKET_MULTICAST = ... # type: int PACKET_OTHERHOST = ... # type: int PACKET_OUTGOING = ... # type: int PF_PACKET = ... # type: int SHUT_RD = ... # type: int SHUT_RDWR = ... # type: int SHUT_WR = ... # type: int SOCK_DGRAM = ... # type: int SOCK_RAW = ... # type: int SOCK_RDM = ... # type: int SOCK_SEQPACKET = ... # type: int SOCK_STREAM = ... # type: int SOL_HCI = ... # type: int SOL_IP = ... # type: int SOL_SOCKET = ... # type: int SOL_TCP = ... # type: int SOL_TIPC = ... # type: int SOL_UDP = ... # type: int SOMAXCONN = ... # type: int SO_ACCEPTCONN = ... # type: int SO_BROADCAST = ... # type: int SO_DEBUG = ... # type: int SO_DONTROUTE = ... # type: int SO_ERROR = ... # type: int SO_KEEPALIVE = ... # type: int SO_LINGER = ... # type: int SO_OOBINLINE = ... # type: int SO_RCVBUF = ... # type: int SO_RCVLOWAT = ... # type: int SO_RCVTIMEO = ... # type: int SO_REUSEADDR = ... # type: int SO_REUSEPORT = ... # type: int SO_SNDBUF = ... # type: int SO_SNDLOWAT = ... # type: int SO_SNDTIMEO = ... # type: int SO_TYPE = ... # type: int SSL_ERROR_EOF = ... # type: int SSL_ERROR_INVALID_ERROR_CODE = ... # type: int SSL_ERROR_SSL = ... # type: int SSL_ERROR_SYSCALL = ... # type: int SSL_ERROR_WANT_CONNECT = ... # type: int SSL_ERROR_WANT_READ = ... # type: int SSL_ERROR_WANT_WRITE = ... # type: int SSL_ERROR_WANT_X509_LOOKUP = ... # type: int SSL_ERROR_ZERO_RETURN = ... # type: int TCP_CORK = ... # type: int TCP_DEFER_ACCEPT = ... # type: int TCP_INFO = ... # type: int TCP_KEEPCNT = ... # type: int TCP_KEEPIDLE = ... # type: int TCP_KEEPINTVL = ... # type: int TCP_LINGER2 = ... # type: int TCP_MAXSEG = ... # type: int TCP_NODELAY = ... # type: int TCP_QUICKACK = ... # type: int TCP_SYNCNT = ... # type: int TCP_WINDOW_CLAMP = ... # type: int TIPC_ADDR_ID = ... # type: int TIPC_ADDR_NAME = ... # type: int TIPC_ADDR_NAMESEQ = ... # type: int TIPC_CFG_SRV = ... # type: int TIPC_CLUSTER_SCOPE = ... # type: int TIPC_CONN_TIMEOUT = ... # type: int TIPC_CRITICAL_IMPORTANCE = ... # type: int TIPC_DEST_DROPPABLE = ... # type: int TIPC_HIGH_IMPORTANCE = ... # type: int TIPC_IMPORTANCE = ... # type: int TIPC_LOW_IMPORTANCE = ... # type: int TIPC_MEDIUM_IMPORTANCE = ... # type: int TIPC_NODE_SCOPE = ... # type: int TIPC_PUBLISHED = ... # type: int TIPC_SRC_DROPPABLE = ... # type: int TIPC_SUBSCR_TIMEOUT = ... # type: int TIPC_SUB_CANCEL = ... # type: int TIPC_SUB_PORTS = ... # type: int TIPC_SUB_SERVICE = ... # type: int TIPC_TOP_SRV = ... # type: int TIPC_WAIT_FOREVER = ... # type: int TIPC_WITHDRAWN = ... # type: int TIPC_ZONE_SCOPE = ... # type: int # PyCapsule CAPI = ... # type: Any has_ipv6 = ... # type: bool class error(IOError): ... class gaierror(error): ... class timeout(error): ... class SocketType(object): family = ... # type: int type = ... # type: int proto = ... # type: int timeout = ... # type: float def __init__(self, family: int = ..., type: int = ..., proto: int = ...) -> None: ... def accept(self) -> Tuple['SocketType', tuple]: ... def bind(self, address: tuple) -> None: ... def close(self) -> None: ... def connect(self, address: tuple) -> None: raise gaierror raise timeout def connect_ex(self, address: tuple) -> int: ... def dup(self) -> "SocketType": ... def fileno(self) -> int: ... def getpeername(self) -> tuple: ... def getsockname(self) -> tuple: ... def getsockopt(self, level: int, option: int, buffersize: int = ...) -> str: ... def gettimeout(self) -> float: ... def listen(self, backlog: int) -> None: raise error def makefile(self, mode: str = ..., buffersize: int = ...) -> IO[Any]: ... def recv(self, buffersize: int, flags: int = ...) -> str: ... def recv_into(self, buffer: bytearray, nbytes: int = ..., flags: int = ...) -> int: ... def recvfrom(self, buffersize: int, flags: int = ...) -> tuple: raise error def recvfrom_into(self, buffer: bytearray, nbytes: int = ..., flags: int = ...) -> int: ... def send(self, data: str, flags: int =...) -> int: ... def sendall(self, data: str, flags: int = ...) -> None: ... @overload def sendto(self, data: str, address: tuple) -> int: ... @overload def sendto(self, data: str, flags: int, address: tuple) -> int: ... def setblocking(self, flag: bool) -> None: ... def setsockopt(self, level: int, option: int, value: Union[int, str]) -> None: ... def settimeout(self, value: Optional[float]) -> None: ... def shutdown(self, flag: int) -> None: ... mypy-0.560/typeshed/stdlib/2/_sre.pyi0000644€tŠÔÚ€2›s®0000000405313215007212023611 0ustar jukkaDROPBOX\Domain Users00000000000000"""Stub file for the '_sre' module.""" from typing import Any, Union, Iterable, Optional, Mapping, Sequence, Dict, List, Tuple, overload CODESIZE = ... # type: int MAGIC = ... # type: int MAXREPEAT = ... # type: long copyright = ... # type: str class SRE_Match(object): def start(self, group: int = ...) -> int: raise IndexError() def end(self, group: int = ...) -> int: raise IndexError() def expand(self, s: str) -> Any: ... @overload def group(self) -> str: ... @overload def group(self, group: int = ...) -> Optional[str]: ... def groupdict(self) -> Dict[int, Optional[str]]: ... def groups(self) -> Tuple[Optional[str], ...]: ... def span(self) -> Tuple[int, int]: raise IndexError() class SRE_Scanner(object): pattern = ... # type: str def match(self) -> SRE_Match: ... def search(self) -> SRE_Match: ... class SRE_Pattern(object): pattern = ... # type: str flags = ... # type: int groups = ... # type: int groupindex = ... # type: Mapping[str, int] indexgroup = ... # type: Sequence[int] def findall(self, source: str, pos: int = ..., endpos: int = ...) -> List[Union[tuple, str]]: ... def finditer(self, source: str, pos: int = ..., endpos: int = ...) -> Iterable[Union[tuple, str]]: ... def match(self, pattern, pos: int = ..., endpos: int = ...) -> SRE_Match: ... def scanner(self, s: str, start: int = ..., end: int = ...) -> SRE_Scanner: ... def search(self, pattern, pos: int = ..., endpos: int = ...) -> SRE_Match: ... def split(self, source: str, maxsplit: int = ...) -> List[Optional[str]]: ... def sub(self, repl: str, string: str, count: int = ...) -> tuple: ... def subn(self, repl: str, string: str, count: int = ...) -> tuple: ... def compile(pattern: str, flags: int, code: List[int], groups: int = ..., groupindex: Mapping[str, int] = ..., indexgroup: Sequence[int] = ...) -> SRE_Pattern: raise OverflowError() def getcodesize() -> int: ... def getlower(a: int, b: int) -> int: ... mypy-0.560/typeshed/stdlib/2/_struct.pyi0000644€tŠÔÚ€2›s®0000000150713215007212024345 0ustar jukkaDROPBOX\Domain Users00000000000000"""Stub file for the '_struct' module.""" from typing import Any, AnyStr, Tuple class error(Exception): ... class Struct(object): size = ... # type: int format = ... # type: str def __init__(self, fmt: str) -> None: ... def pack_into(self, buffer: bytearray, offset: int, obj: Any) -> None: ... def pack(self, *args) -> str: ... def unpack(self, s: str) -> Tuple[Any, ...]: ... def unpack_from(self, buffer: bytearray, offset: int = ...) -> Tuple[Any, ...]: ... def _clearcache() -> None: ... def calcsize(fmt: str) -> int: ... def pack(fmt: AnyStr, obj: Any) -> str: ... def pack_into(fmt: AnyStr, buffer: bytearray, offset: int, obj: Any) -> None: ... def unpack(fmt: AnyStr, data: str) -> Tuple[Any, ...]: ... def unpack_from(fmt: AnyStr, buffer: bytearray, offset: int = ...) -> Tuple[Any, ...]: ... mypy-0.560/typeshed/stdlib/2/_symtable.pyi0000644€tŠÔÚ€2›s®0000000211613215007212024636 0ustar jukkaDROPBOX\Domain Users00000000000000from typing import List, Dict CELL = ... # type: int DEF_BOUND = ... # type: int DEF_FREE = ... # type: int DEF_FREE_CLASS = ... # type: int DEF_GLOBAL = ... # type: int DEF_IMPORT = ... # type: int DEF_LOCAL = ... # type: int DEF_PARAM = ... # type: int FREE = ... # type: int GLOBAL_EXPLICIT = ... # type: int GLOBAL_IMPLICIT = ... # type: int LOCAL = ... # type: int OPT_BARE_EXEC = ... # type: int OPT_EXEC = ... # type: int OPT_IMPORT_STAR = ... # type: int SCOPE_MASK = ... # type: int SCOPE_OFF = ... # type: int TYPE_CLASS = ... # type: int TYPE_FUNCTION = ... # type: int TYPE_MODULE = ... # type: int USE = ... # type: int class _symtable_entry(object): ... class symtable(object): children = ... # type: List[_symtable_entry] id = ... # type: int lineno = ... # type: int name = ... # type: str nested = ... # type: int optimized = ... # type: int symbols = ... # type: Dict[str, int] type = ... # type: int varnames = ... # type: List[str] def __init__(self, src: str, filename: str, startstr: str) -> None: ... mypy-0.560/typeshed/stdlib/2/_threading_local.pyi0000644€tŠÔÚ€2›s®0000000064213215007212026137 0ustar jukkaDROPBOX\Domain Users00000000000000# Source: https://hg.python.org/cpython/file/2.7/Lib/_threading_local.py from typing import Any, List __all__: List[str] class _localbase(object): ... class local(_localbase): def __getattribute__(self, name: str) -> Any: ... def __setattr__(self, name: str, value: Any) -> None: ... def __delattr__(self, name: str) -> None: ... def __del__(self) -> None: ... def _patch(self: local) -> None: ... mypy-0.560/typeshed/stdlib/2/_warnings.pyi0000644€tŠÔÚ€2›s®0000000075113215007212024651 0ustar jukkaDROPBOX\Domain Users00000000000000from typing import Any, List, Optional, Type default_action = ... # type: str filters = ... # type: List[tuple] once_registry = ... # type: dict def warn(message: Warning, category: Optional[Type[Warning]] = ..., stacklevel: int = ...) -> None: ... def warn_explicit(message: Warning, category: Optional[Type[Warning]], filename: str, lineno: int, module: Any = ..., registry: dict = ..., module_globals: dict = ...) -> None: ... mypy-0.560/typeshed/stdlib/2/abc.pyi0000644€tŠÔÚ€2›s®0000000227713215007212023414 0ustar jukkaDROPBOX\Domain Users00000000000000from typing import Any, Dict, Set, Tuple, Type import _weakrefset # NOTE: mypy has special processing for ABCMeta and abstractmethod. def abstractmethod(funcobj: Any) -> Any: ... class ABCMeta(type): # TODO: FrozenSet __abstractmethods__ = ... # type: Set[Any] _abc_cache = ... # type: _weakrefset.WeakSet _abc_invalidation_counter = ... # type: int _abc_negative_cache = ... # type: _weakrefset.WeakSet _abc_negative_cache_version = ... # type: int _abc_registry = ... # type: _weakrefset.WeakSet def __init__(self, name: str, bases: Tuple[type, ...], namespace: Dict[Any, Any]) -> None: ... def __instancecheck__(cls: "ABCMeta", instance: Any) -> Any: ... def __subclasscheck__(cls: "ABCMeta", subclass: Any) -> Any: ... def _dump_registry(cls: "ABCMeta", *args: Any, **kwargs: Any) -> None: ... def register(cls: "ABCMeta", subclass: Type[Any]) -> None: ... # TODO: The real abc.abstractproperty inherits from "property". class abstractproperty(object): def __new__(cls, func: Any) -> Any: ... __isabstractmethod__ = ... # type: bool doc = ... # type: Any fdel = ... # type: Any fget = ... # type: Any fset = ... # type: Any mypy-0.560/typeshed/stdlib/2/ast.pyi0000644€tŠÔÚ€2›s®0000000206113215007212023445 0ustar jukkaDROPBOX\Domain Users00000000000000# Python 2.7 ast import typing from typing import Any, Iterator, Union from _ast import * from _ast import AST, Module __version__ = ... # type: str PyCF_ONLY_AST = ... # type: int def parse(source: Union[str, unicode], filename: Union[str, unicode] = ..., mode: Union[str, unicode] = ...) -> Module: ... def copy_location(new_node: AST, old_node: AST) -> AST: ... def dump(node: AST, annotate_fields: bool = ..., include_attributes: bool = ...) -> str: ... def fix_missing_locations(node: AST) -> AST: ... def get_docstring(node: AST, clean: bool = ...) -> str: ... def increment_lineno(node: AST, n: int = ...) -> AST: ... def iter_child_nodes(node: AST) -> Iterator[AST]: ... def iter_fields(node: AST) -> Iterator[typing.Tuple[str, Any]]: ... def literal_eval(node_or_string: Union[str, unicode, AST]) -> Any: ... def walk(node: AST) -> Iterator[AST]: ... class NodeVisitor(): def visit(self, node: AST) -> Any: ... def generic_visit(self, node: AST) -> None: ... class NodeTransformer(NodeVisitor): def generic_visit(self, node: AST) -> None: ... mypy-0.560/typeshed/stdlib/2/atexit.pyi0000644€tŠÔÚ€2›s®0000000016513215007212024157 0ustar jukkaDROPBOX\Domain Users00000000000000from typing import TypeVar, Any _FT = TypeVar('_FT') def register(func: _FT, *args: Any, **kargs: Any) -> _FT: ... mypy-0.560/typeshed/stdlib/2/BaseHTTPServer.pyi0000644€tŠÔÚ€2›s®0000000376613215007212025434 0ustar jukkaDROPBOX\Domain Users00000000000000# Stubs for BaseHTTPServer (Python 2.7) from typing import Any, BinaryIO, Mapping, Optional, Tuple, Union import SocketServer import mimetools class HTTPServer(SocketServer.TCPServer): server_name = ... # type: str server_port = ... # type: int def __init__(self, server_address: Tuple[str, int], RequestHandlerClass: type) -> None: ... class BaseHTTPRequestHandler: client_address = ... # type: Tuple[str, int] server = ... # type: SocketServer.BaseServer close_connection = ... # type: bool command = ... # type: str path = ... # type: str request_version = ... # type: str headers = ... # type: mimetools.Message rfile = ... # type: BinaryIO wfile = ... # type: BinaryIO server_version = ... # type: str sys_version = ... # type: str error_message_format = ... # type: str error_content_type = ... # type: str protocol_version = ... # type: str MessageClass = ... # type: type responses = ... # type: Mapping[int, Tuple[str, str]] def __init__(self, request: bytes, client_address: Tuple[str, int], server: SocketServer.BaseServer) -> None: ... def handle(self) -> None: ... def handle_one_request(self) -> None: ... def send_error(self, code: int, message: Optional[str] = ...) -> None: ... def send_response(self, code: int, message: Optional[str] = ...) -> None: ... def send_header(self, keyword: str, value: str) -> None: ... def end_headers(self) -> None: ... def flush_headers(self) -> None: ... def log_request(self, code: Union[int, str] = ..., size: Union[int, str] = ...) -> None: ... def log_error(self, format: str, *args: Any) -> None: ... def log_message(self, format: str, *args: Any) -> None: ... def version_string(self) -> str: ... def date_time_string(self, timestamp: Optional[int] = ...) -> str: ... def log_date_time_string(self) -> str: ... def address_string(self) -> str: ... mypy-0.560/typeshed/stdlib/2/builtins.pyi0000644€tŠÔÚ€2›s®0000012232013215007212024510 0ustar jukkaDROPBOX\Domain Users00000000000000# Stubs for builtins (Python 2.7) # True and False are deliberately omitted because they are keywords in # Python 3, and stub files conform to Python 3 syntax. from typing import ( TypeVar, Iterator, Iterable, overload, Sequence, Mapping, Tuple, List, Any, Dict, Callable, Generic, Set, AbstractSet, FrozenSet, Sized, Reversible, SupportsInt, SupportsFloat, SupportsAbs, SupportsRound, IO, BinaryIO, Union, AnyStr, MutableSequence, MutableMapping, MutableSet, ItemsView, KeysView, ValuesView, Optional, Container, Type ) from abc import abstractmethod, ABCMeta from mypy_extensions import NoReturn _T = TypeVar('_T') _T_co = TypeVar('_T_co', covariant=True) _KT = TypeVar('_KT') _VT = TypeVar('_VT') _S = TypeVar('_S') _T1 = TypeVar('_T1') _T2 = TypeVar('_T2') _T3 = TypeVar('_T3') _T4 = TypeVar('_T4') _T5 = TypeVar('_T5') _TT = TypeVar('_TT', bound='type') class object: __doc__ = ... # type: Optional[str] __class__ = ... # type: type __dict__ = ... # type: Dict[str, Any] __slots__ = ... # type: Optional[Union[str, unicode, Iterable[Union[str, unicode]]]] __module__ = ... # type: str def __init__(self) -> None: ... def __new__(cls) -> Any: ... def __setattr__(self, name: str, value: Any) -> None: ... def __eq__(self, o: object) -> bool: ... def __ne__(self, o: object) -> bool: ... def __str__(self) -> str: ... def __repr__(self) -> str: ... def __hash__(self) -> int: ... def __format__(self, format_spec: str) -> str: ... def __getattribute__(self, name: str) -> Any: ... def __delattr__(self, name: str) -> None: ... def __sizeof__(self) -> int: ... def __reduce__(self) -> tuple: ... def __reduce_ex__(self, protocol: int) -> tuple: ... class staticmethod(object): # Special, only valid as a decorator. __func__ = ... # type: function def __init__(self, f: function) -> None: ... def __new__(cls: Type[_T], *args: Any, **kwargs: Any) -> _T: ... def __get__(self, obj: _T, type: Optional[Type[_T]]=...) -> function: ... class classmethod(object): # Special, only valid as a decorator. __func__ = ... # type: function def __init__(self, f: function) -> None: ... def __new__(cls: Type[_T], *args: Any, **kwargs: Any) -> _T: ... def __get__(self, obj: _T, type: Optional[Type[_T]]=...) -> function: ... class type(object): __bases__ = ... # type: Tuple[type, ...] __name__ = ... # type: str __module__ = ... # type: str @overload def __init__(self, o: object) -> None: ... @overload def __init__(self, name: str, bases: Tuple[type, ...], dict: Dict[str, Any]) -> None: ... # TODO: __new__ may have to be special and not a static method. @overload def __new__(cls, o: object) -> type: ... @overload def __new__(cls, name: str, bases: Tuple[type, ...], namespace: Dict[str, Any]) -> type: ... def __call__(self, *args: Any, **kwds: Any) -> Any: ... # Only new-style classes __mro__ = ... # type: Tuple[type, ...] # Note: the documentation doesnt specify what the return type is, the standard # implementation seems to be returning a list. def mro(self) -> List[type]: ... def __subclasses__(self: _TT) -> List[_TT]: ... def __instancecheck__(self, instance: Any) -> bool: ... def __subclasscheck__(self, subclass: type) -> bool: ... class int: @overload def __init__(self, x: SupportsInt = ...) -> None: ... @overload def __init__(self, x: Union[str, unicode, bytearray], base: int = ...) -> None: ... def bit_length(self) -> int: ... def __add__(self, x: int) -> int: ... def __sub__(self, x: int) -> int: ... def __mul__(self, x: int) -> int: ... def __floordiv__(self, x: int) -> int: ... def __div__(self, x: int) -> int: ... def __truediv__(self, x: int) -> float: ... def __mod__(self, x: int) -> int: ... def __radd__(self, x: int) -> int: ... def __rsub__(self, x: int) -> int: ... def __rmul__(self, x: int) -> int: ... def __rfloordiv__(self, x: int) -> int: ... def __rdiv__(self, x: int) -> int: ... def __rtruediv__(self, x: int) -> float: ... def __rmod__(self, x: int) -> int: ... def __pow__(self, x: int) -> Any: ... # Return type can be int or float, depending on x. def __rpow__(self, x: int) -> Any: ... def __and__(self, n: int) -> int: ... def __or__(self, n: int) -> int: ... def __xor__(self, n: int) -> int: ... def __lshift__(self, n: int) -> int: ... def __rshift__(self, n: int) -> int: ... def __rand__(self, n: int) -> int: ... def __ror__(self, n: int) -> int: ... def __rxor__(self, n: int) -> int: ... def __rlshift__(self, n: int) -> int: ... def __rrshift__(self, n: int) -> int: ... def __neg__(self) -> int: ... def __pos__(self) -> int: ... def __invert__(self) -> int: ... def __eq__(self, x: object) -> bool: ... def __ne__(self, x: object) -> bool: ... def __lt__(self, x: int) -> bool: ... def __le__(self, x: int) -> bool: ... def __gt__(self, x: int) -> bool: ... def __ge__(self, x: int) -> bool: ... def __str__(self) -> str: ... def __float__(self) -> float: ... def __int__(self) -> int: ... def __abs__(self) -> int: ... def __hash__(self) -> int: ... def __nonzero__(self) -> bool: ... class float: def __init__(self, x: Union[SupportsFloat, str, unicode, bytearray] = ...) -> None: ... def as_integer_ratio(self) -> Tuple[int, int]: ... def hex(self) -> str: ... def is_integer(self) -> bool: ... @classmethod def fromhex(cls, s: str) -> float: ... def __add__(self, x: float) -> float: ... def __sub__(self, x: float) -> float: ... def __mul__(self, x: float) -> float: ... def __floordiv__(self, x: float) -> float: ... def __div__(self, x: float) -> float: ... def __truediv__(self, x: float) -> float: ... def __mod__(self, x: float) -> float: ... def __pow__(self, x: float) -> float: ... def __radd__(self, x: float) -> float: ... def __rsub__(self, x: float) -> float: ... def __rmul__(self, x: float) -> float: ... def __rfloordiv__(self, x: float) -> float: ... def __rdiv__(self, x: float) -> float: ... def __rtruediv__(self, x: float) -> float: ... def __rmod__(self, x: float) -> float: ... def __rpow__(self, x: float) -> float: ... def __eq__(self, x: object) -> bool: ... def __ne__(self, x: object) -> bool: ... def __lt__(self, x: float) -> bool: ... def __le__(self, x: float) -> bool: ... def __gt__(self, x: float) -> bool: ... def __ge__(self, x: float) -> bool: ... def __neg__(self) -> float: ... def __pos__(self) -> float: ... def __str__(self) -> str: ... def __int__(self) -> int: ... def __float__(self) -> float: ... def __abs__(self) -> float: ... def __hash__(self) -> int: ... def __nonzero__(self) -> bool: ... class complex: @overload def __init__(self, re: float = ..., im: float = ...) -> None: ... @overload def __init__(self, s: str) -> None: ... @property def real(self) -> float: ... @property def imag(self) -> float: ... def conjugate(self) -> complex: ... def __add__(self, x: complex) -> complex: ... def __sub__(self, x: complex) -> complex: ... def __mul__(self, x: complex) -> complex: ... def __pow__(self, x: complex) -> complex: ... def __div__(self, x: complex) -> complex: ... def __truediv__(self, x: complex) -> complex: ... def __radd__(self, x: complex) -> complex: ... def __rsub__(self, x: complex) -> complex: ... def __rmul__(self, x: complex) -> complex: ... def __rpow__(self, x: complex) -> complex: ... def __rdiv__(self, x: complex) -> complex: ... def __rtruediv__(self, x: complex) -> complex: ... def __eq__(self, x: object) -> bool: ... def __ne__(self, x: object) -> bool: ... def __neg__(self) -> complex: ... def __pos__(self) -> complex: ... def __str__(self) -> str: ... def __abs__(self) -> float: ... def __hash__(self) -> int: ... def __nonzero__(self) -> bool: ... class super(object): @overload def __init__(self, t: Any, obj: Any) -> None: ... @overload def __init__(self, t: Any) -> None: ... class basestring(metaclass=ABCMeta): ... class unicode(basestring, Sequence[unicode]): @overload def __init__(self) -> None: ... @overload def __init__(self, o: object) -> None: ... @overload def __init__(self, o: str, encoding: unicode = ..., errors: unicode = ...) -> None: ... def capitalize(self) -> unicode: ... def center(self, width: int, fillchar: unicode = ...) -> unicode: ... def count(self, x: unicode) -> int: ... def decode(self, encoding: unicode = ..., errors: unicode = ...) -> unicode: ... def encode(self, encoding: unicode = ..., errors: unicode = ...) -> str: ... def endswith(self, suffix: Union[unicode, Tuple[unicode, ...]], start: int = ..., end: int = ...) -> bool: ... def expandtabs(self, tabsize: int = ...) -> unicode: ... def find(self, sub: unicode, start: int = ..., end: int = ...) -> int: ... def format(self, *args: Any, **kwargs: Any) -> unicode: ... def format_map(self, map: Mapping[unicode, Any]) -> unicode: ... def index(self, sub: unicode, start: int = ..., end: int = ...) -> int: ... def isalnum(self) -> bool: ... def isalpha(self) -> bool: ... def isdecimal(self) -> bool: ... def isdigit(self) -> bool: ... def isidentifier(self) -> bool: ... def islower(self) -> bool: ... def isnumeric(self) -> bool: ... def isprintable(self) -> bool: ... def isspace(self) -> bool: ... def istitle(self) -> bool: ... def isupper(self) -> bool: ... def join(self, iterable: Iterable[unicode]) -> unicode: ... def ljust(self, width: int, fillchar: unicode = ...) -> unicode: ... def lower(self) -> unicode: ... def lstrip(self, chars: unicode = ...) -> unicode: ... def partition(self, sep: unicode) -> Tuple[unicode, unicode, unicode]: ... def replace(self, old: unicode, new: unicode, count: int = ...) -> unicode: ... def rfind(self, sub: unicode, start: int = ..., end: int = ...) -> int: ... def rindex(self, sub: unicode, start: int = ..., end: int = ...) -> int: ... def rjust(self, width: int, fillchar: unicode = ...) -> unicode: ... def rpartition(self, sep: unicode) -> Tuple[unicode, unicode, unicode]: ... def rsplit(self, sep: Optional[unicode] = ..., maxsplit: int = ...) -> List[unicode]: ... def rstrip(self, chars: unicode = ...) -> unicode: ... def split(self, sep: Optional[unicode] = ..., maxsplit: int = ...) -> List[unicode]: ... def splitlines(self, keepends: bool = ...) -> List[unicode]: ... def startswith(self, prefix: Union[unicode, Tuple[unicode, ...]], start: int = ..., end: int = ...) -> bool: ... def strip(self, chars: unicode = ...) -> unicode: ... def swapcase(self) -> unicode: ... def title(self) -> unicode: ... def translate(self, table: Union[Dict[int, Any], unicode]) -> unicode: ... def upper(self) -> unicode: ... def zfill(self, width: int) -> unicode: ... @overload def __getitem__(self, i: int) -> unicode: ... @overload def __getitem__(self, s: slice) -> unicode: ... def __getslice__(self, start: int, stop: int) -> unicode: ... def __add__(self, s: unicode) -> unicode: ... def __mul__(self, n: int) -> unicode: ... def __rmul__(self, n: int) -> unicode: ... def __mod__(self, x: Any) -> unicode: ... def __eq__(self, x: object) -> bool: ... def __ne__(self, x: object) -> bool: ... def __lt__(self, x: unicode) -> bool: ... def __le__(self, x: unicode) -> bool: ... def __gt__(self, x: unicode) -> bool: ... def __ge__(self, x: unicode) -> bool: ... def __len__(self) -> int: ... def __contains__(self, s: object) -> bool: ... def __iter__(self) -> Iterator[unicode]: ... def __str__(self) -> str: ... def __repr__(self) -> str: ... def __int__(self) -> int: ... def __float__(self) -> float: ... def __hash__(self) -> int: ... class str(basestring, Sequence[str]): def __init__(self, object: object = ...) -> None: ... def capitalize(self) -> str: ... def center(self, width: int, fillchar: str = ...) -> str: ... def count(self, x: unicode, __start: Optional[int] = ..., __end: Optional[int] = ...) -> int: ... def decode(self, encoding: unicode = ..., errors: unicode = ...) -> unicode: ... def encode(self, encoding: unicode = ..., errors: unicode = ...) -> str: ... def endswith(self, suffix: Union[unicode, Tuple[unicode, ...]]) -> bool: ... def expandtabs(self, tabsize: int = ...) -> str: ... def find(self, sub: unicode, __start: Optional[int] = ..., __end: Optional[int] = ...) -> int: ... def format(self, *args: Any, **kwargs: Any) -> str: ... def index(self, sub: unicode, __start: Optional[int] = ..., __end: Optional[int] = ...) -> int: ... def isalnum(self) -> bool: ... def isalpha(self) -> bool: ... def isdigit(self) -> bool: ... def islower(self) -> bool: ... def isspace(self) -> bool: ... def istitle(self) -> bool: ... def isupper(self) -> bool: ... def join(self, iterable: Iterable[AnyStr]) -> AnyStr: ... def ljust(self, width: int, fillchar: str = ...) -> str: ... def lower(self) -> str: ... @overload def lstrip(self, chars: str = ...) -> str: ... @overload def lstrip(self, chars: unicode) -> unicode: ... @overload def partition(self, sep: bytearray) -> Tuple[str, bytearray, str]: ... @overload def partition(self, sep: str) -> Tuple[str, str, str]: ... @overload def partition(self, sep: unicode) -> Tuple[unicode, unicode, unicode]: ... def replace(self, old: AnyStr, new: AnyStr, count: int = ...) -> AnyStr: ... def rfind(self, sub: unicode, __start: Optional[int] = ..., __end: Optional[int] = ...) -> int: ... def rindex(self, sub: unicode, __start: Optional[int] = ..., __end: Optional[int] = ...) -> int: ... def rjust(self, width: int, fillchar: str = ...) -> str: ... @overload def rpartition(self, sep: bytearray) -> Tuple[str, bytearray, str]: ... @overload def rpartition(self, sep: str) -> Tuple[str, str, str]: ... @overload def rpartition(self, sep: unicode) -> Tuple[unicode, unicode, unicode]: ... @overload def rsplit(self, sep: Optional[str] = ..., maxsplit: int = ...) -> List[str]: ... @overload def rsplit(self, sep: unicode, maxsplit: int = ...) -> List[unicode]: ... @overload def rstrip(self, chars: str = ...) -> str: ... @overload def rstrip(self, chars: unicode) -> unicode: ... @overload def split(self, sep: Optional[str] = ..., maxsplit: int = ...) -> List[str]: ... @overload def split(self, sep: unicode, maxsplit: int = ...) -> List[unicode]: ... def splitlines(self, keepends: bool = ...) -> List[str]: ... def startswith(self, prefix: Union[unicode, Tuple[unicode, ...]]) -> bool: ... @overload def strip(self, chars: str = ...) -> str: ... @overload def strip(self, chars: unicode) -> unicode: ... def swapcase(self) -> str: ... def title(self) -> str: ... def translate(self, table: Optional[AnyStr], deletechars: AnyStr = ...) -> AnyStr: ... def upper(self) -> str: ... def zfill(self, width: int) -> str: ... def __len__(self) -> int: ... def __iter__(self) -> Iterator[str]: ... def __str__(self) -> str: ... def __repr__(self) -> str: ... def __int__(self) -> int: ... def __float__(self) -> float: ... def __hash__(self) -> int: ... @overload def __getitem__(self, i: int) -> str: ... @overload def __getitem__(self, s: slice) -> str: ... def __getslice__(self, start: int, stop: int) -> str: ... def __add__(self, s: AnyStr) -> AnyStr: ... def __mul__(self, n: int) -> str: ... def __rmul__(self, n: int) -> str: ... def __contains__(self, o: object) -> bool: ... def __eq__(self, x: object) -> bool: ... def __ne__(self, x: object) -> bool: ... def __lt__(self, x: unicode) -> bool: ... def __le__(self, x: unicode) -> bool: ... def __gt__(self, x: unicode) -> bool: ... def __ge__(self, x: unicode) -> bool: ... def __mod__(self, x: Any) -> str: ... class bytearray(MutableSequence[int]): @overload def __init__(self) -> None: ... @overload def __init__(self, x: Union[Iterable[int], str]) -> None: ... @overload def __init__(self, x: unicode, encoding: unicode, errors: unicode = ...) -> None: ... @overload def __init__(self, length: int) -> None: ... def capitalize(self) -> bytearray: ... def center(self, width: int, fillchar: str = ...) -> bytearray: ... def count(self, x: str) -> int: ... def decode(self, encoding: unicode = ..., errors: unicode = ...) -> str: ... def endswith(self, suffix: Union[str, Tuple[str, ...]]) -> bool: ... def expandtabs(self, tabsize: int = ...) -> bytearray: ... def find(self, sub: str, start: int = ..., end: int = ...) -> int: ... def index(self, sub: str, start: int = ..., end: int = ...) -> int: ... def insert(self, index: int, object: int) -> None: ... def isalnum(self) -> bool: ... def isalpha(self) -> bool: ... def isdigit(self) -> bool: ... def islower(self) -> bool: ... def isspace(self) -> bool: ... def istitle(self) -> bool: ... def isupper(self) -> bool: ... def join(self, iterable: Iterable[str]) -> bytearray: ... def ljust(self, width: int, fillchar: str = ...) -> bytearray: ... def lower(self) -> bytearray: ... def lstrip(self, chars: str = ...) -> bytearray: ... def partition(self, sep: str) -> Tuple[bytearray, bytearray, bytearray]: ... def replace(self, old: str, new: str, count: int = ...) -> bytearray: ... def rfind(self, sub: str, start: int = ..., end: int = ...) -> int: ... def rindex(self, sub: str, start: int = ..., end: int = ...) -> int: ... def rjust(self, width: int, fillchar: str = ...) -> bytearray: ... def rpartition(self, sep: str) -> Tuple[bytearray, bytearray, bytearray]: ... def rsplit(self, sep: Optional[str] = ..., maxsplit: int = ...) -> List[bytearray]: ... def rstrip(self, chars: str = ...) -> bytearray: ... def split(self, sep: Optional[str] = ..., maxsplit: int = ...) -> List[bytearray]: ... def splitlines(self, keepends: bool = ...) -> List[bytearray]: ... def startswith(self, prefix: Union[str, Tuple[str, ...]]) -> bool: ... def strip(self, chars: str = ...) -> bytearray: ... def swapcase(self) -> bytearray: ... def title(self) -> bytearray: ... def translate(self, table: str) -> bytearray: ... def upper(self) -> bytearray: ... def zfill(self, width: int) -> bytearray: ... @staticmethod def fromhex(x: str) -> bytearray: ... def __len__(self) -> int: ... def __iter__(self) -> Iterator[int]: ... def __str__(self) -> str: ... def __repr__(self) -> str: ... def __int__(self) -> int: ... def __float__(self) -> float: ... def __hash__(self) -> int: ... @overload def __getitem__(self, i: int) -> int: ... @overload def __getitem__(self, s: slice) -> bytearray: ... def __getslice__(self, start: int, stop: int) -> bytearray: ... @overload def __setitem__(self, i: int, x: int) -> None: ... @overload def __setitem__(self, s: slice, x: Union[Iterable[int], str]) -> None: ... def __setslice__(self, start: int, stop: int, x: Union[Sequence[int], str]) -> None: ... def __delitem__(self, i: Union[int, slice]) -> None: ... def __delslice__(self, start: int, stop: int) -> None: ... def __add__(self, s: str) -> bytearray: ... def __mul__(self, n: int) -> bytearray: ... def __contains__(self, o: object) -> bool: ... def __eq__(self, x: object) -> bool: ... def __ne__(self, x: object) -> bool: ... def __lt__(self, x: str) -> bool: ... def __le__(self, x: str) -> bool: ... def __gt__(self, x: str) -> bool: ... def __ge__(self, x: str) -> bool: ... class bool(int): def __init__(self, o: object = ...) -> None: ... class slice(object): start = ... # type: Optional[int] step = ... # type: Optional[int] stop = ... # type: Optional[int] @overload def __init__(self, stop: Optional[int]) -> None: ... @overload def __init__(self, start: Optional[int], stop: Optional[int], step: Optional[int] = ...) -> None: ... def indices(self, len: int) -> Tuple[int, int, int]: ... class tuple(Sequence[_T_co], Generic[_T_co]): def __init__(self, iterable: Iterable[_T_co] = ...) -> None: ... def __len__(self) -> int: ... def __contains__(self, x: object) -> bool: ... @overload def __getitem__(self, x: int) -> _T_co: ... @overload def __getitem__(self, x: slice) -> Tuple[_T_co, ...]: ... def __iter__(self) -> Iterator[_T_co]: ... def __lt__(self, x: Tuple[_T_co, ...]) -> bool: ... def __le__(self, x: Tuple[_T_co, ...]) -> bool: ... def __gt__(self, x: Tuple[_T_co, ...]) -> bool: ... def __ge__(self, x: Tuple[_T_co, ...]) -> bool: ... def __add__(self, x: Tuple[_T_co, ...]) -> Tuple[_T_co, ...]: ... def __mul__(self, n: int) -> Tuple[_T_co, ...]: ... def __rmul__(self, n: int) -> Tuple[_T_co, ...]: ... def count(self, x: Any) -> int: ... def index(self, x: Any) -> int: ... class function: # TODO name of the class (corresponds to Python 'function' class) __name__ = ... # type: str __module__ = ... # type: str class list(MutableSequence[_T], Generic[_T]): @overload def __init__(self) -> None: ... @overload def __init__(self, iterable: Iterable[_T]) -> None: ... def append(self, object: _T) -> None: ... def extend(self, iterable: Iterable[_T]) -> None: ... def pop(self, index: int = ...) -> _T: ... def index(self, object: _T, start: int = ..., stop: int = ...) -> int: ... def count(self, object: _T) -> int: ... def insert(self, index: int, object: _T) -> None: ... def remove(self, object: _T) -> None: ... def reverse(self) -> None: ... def sort(self, cmp: Callable[[_T, _T], Any] = ..., key: Callable[[_T], Any] = ..., reverse: bool = ...) -> None: ... def __len__(self) -> int: ... def __iter__(self) -> Iterator[_T]: ... def __str__(self) -> str: ... def __hash__(self) -> int: ... @overload def __getitem__(self, i: int) -> _T: ... @overload def __getitem__(self, s: slice) -> List[_T]: ... def __getslice__(self, start: int, stop: int) -> List[_T]: ... @overload def __setitem__(self, i: int, o: _T) -> None: ... @overload def __setitem__(self, s: slice, o: Iterable[_T]) -> None: ... def __setslice__(self, start: int, stop: int, o: Sequence[_T]) -> None: ... def __delitem__(self, i: Union[int, slice]) -> None: ... def __delslice__(self, start: int, stop: int) -> None: ... def __add__(self, x: List[_T]) -> List[_T]: ... def __iadd__(self, x: Iterable[_T]) -> List[_T]: ... def __mul__(self, n: int) -> List[_T]: ... def __rmul__(self, n: int) -> List[_T]: ... def __contains__(self, o: object) -> bool: ... def __reversed__(self) -> Iterator[_T]: ... def __gt__(self, x: List[_T]) -> bool: ... def __ge__(self, x: List[_T]) -> bool: ... def __lt__(self, x: List[_T]) -> bool: ... def __le__(self, x: List[_T]) -> bool: ... class dict(MutableMapping[_KT, _VT], Generic[_KT, _VT]): # NOTE: Keyword arguments are special. If they are used, _KT must include # str, but we have no way of enforcing it here. @overload def __init__(self, **kwargs: _VT) -> None: ... @overload def __init__(self, map: Mapping[_KT, _VT], **kwargs: _VT) -> None: ... @overload def __init__(self, iterable: Iterable[Tuple[_KT, _VT]], **kwargs: _VT) -> None: ... def __new__(cls: Type[_T1], *args: Any, **kwargs: Any) -> _T1: ... def has_key(self, k: _KT) -> bool: ... def clear(self) -> None: ... def copy(self) -> Dict[_KT, _VT]: ... def popitem(self) -> Tuple[_KT, _VT]: ... def setdefault(self, k: _KT, default: _VT = ...) -> _VT: ... @overload def update(self, __m: Mapping[_KT, _VT], **kwargs: _VT) -> None: ... @overload def update(self, __m: Iterable[Tuple[_KT, _VT]], **kwargs: _VT) -> None: ... @overload def update(self, **kwargs: _VT) -> None: ... def iterkeys(self) -> Iterator[_KT]: ... def itervalues(self) -> Iterator[_VT]: ... def iteritems(self) -> Iterator[Tuple[_KT, _VT]]: ... def viewkeys(self) -> KeysView[_KT]: ... def viewvalues(self) -> ValuesView[_VT]: ... def viewitems(self) -> ItemsView[_KT, _VT]: ... @staticmethod @overload def fromkeys(seq: Sequence[_T]) -> Dict[_T, Any]: ... # TODO: Actually a class method (mypy/issues#328) @staticmethod @overload def fromkeys(seq: Sequence[_T], value: _S) -> Dict[_T, _S]: ... def __len__(self) -> int: ... def __getitem__(self, k: _KT) -> _VT: ... def __setitem__(self, k: _KT, v: _VT) -> None: ... def __delitem__(self, v: _KT) -> None: ... def __iter__(self) -> Iterator[_KT]: ... def __str__(self) -> str: ... class set(MutableSet[_T], Generic[_T]): def __init__(self, iterable: Iterable[_T] = ...) -> None: ... def add(self, element: _T) -> None: ... def clear(self) -> None: ... def copy(self) -> Set[_T]: ... def difference(self, *s: Iterable[Any]) -> Set[_T]: ... def difference_update(self, *s: Iterable[Any]) -> None: ... def discard(self, element: _T) -> None: ... def intersection(self, *s: Iterable[Any]) -> Set[_T]: ... def intersection_update(self, *s: Iterable[Any]) -> None: ... def isdisjoint(self, s: Iterable[object]) -> bool: ... def issubset(self, s: Iterable[object]) -> bool: ... def issuperset(self, s: Iterable[object]) -> bool: ... def pop(self) -> _T: ... def remove(self, element: _T) -> None: ... def symmetric_difference(self, s: Iterable[_T]) -> Set[_T]: ... def symmetric_difference_update(self, s: Iterable[_T]) -> None: ... def union(self, *s: Iterable[_T]) -> Set[_T]: ... def update(self, *s: Iterable[_T]) -> None: ... def __len__(self) -> int: ... def __contains__(self, o: object) -> bool: ... def __iter__(self) -> Iterator[_T]: ... def __str__(self) -> str: ... def __and__(self, s: AbstractSet[object]) -> Set[_T]: ... def __iand__(self, s: AbstractSet[object]) -> Set[_T]: ... def __or__(self, s: AbstractSet[_S]) -> Set[Union[_T, _S]]: ... def __ior__(self, s: AbstractSet[_S]) -> Set[Union[_T, _S]]: ... def __sub__(self, s: AbstractSet[object]) -> Set[_T]: ... def __isub__(self, s: AbstractSet[object]) -> Set[_T]: ... def __xor__(self, s: AbstractSet[_S]) -> Set[Union[_T, _S]]: ... def __ixor__(self, s: AbstractSet[_S]) -> Set[Union[_T, _S]]: ... def __le__(self, s: AbstractSet[object]) -> bool: ... def __lt__(self, s: AbstractSet[object]) -> bool: ... def __ge__(self, s: AbstractSet[object]) -> bool: ... def __gt__(self, s: AbstractSet[object]) -> bool: ... # TODO more set operations class frozenset(AbstractSet[_T], Generic[_T]): @overload def __init__(self) -> None: ... @overload def __init__(self, iterable: Iterable[_T]) -> None: ... def copy(self) -> FrozenSet[_T]: ... def difference(self, *s: Iterable[object]) -> FrozenSet[_T]: ... def intersection(self, *s: Iterable[object]) -> FrozenSet[_T]: ... def isdisjoint(self, s: Iterable[_T]) -> bool: ... def issubset(self, s: Iterable[object]) -> bool: ... def issuperset(self, s: Iterable[object]) -> bool: ... def symmetric_difference(self, s: Iterable[_T]) -> FrozenSet[_T]: ... def union(self, *s: Iterable[_T]) -> FrozenSet[_T]: ... def __len__(self) -> int: ... def __contains__(self, o: object) -> bool: ... def __iter__(self) -> Iterator[_T]: ... def __str__(self) -> str: ... def __and__(self, s: AbstractSet[_T]) -> FrozenSet[_T]: ... def __or__(self, s: AbstractSet[_S]) -> FrozenSet[Union[_T, _S]]: ... def __sub__(self, s: AbstractSet[_T]) -> FrozenSet[_T]: ... def __xor__(self, s: AbstractSet[_S]) -> FrozenSet[Union[_T, _S]]: ... def __le__(self, s: AbstractSet[object]) -> bool: ... def __lt__(self, s: AbstractSet[object]) -> bool: ... def __ge__(self, s: AbstractSet[object]) -> bool: ... def __gt__(self, s: AbstractSet[object]) -> bool: ... class enumerate(Iterator[Tuple[int, _T]], Generic[_T]): def __init__(self, iterable: Iterable[_T], start: int = ...) -> None: ... def __iter__(self) -> Iterator[Tuple[int, _T]]: ... def next(self) -> Tuple[int, _T]: ... # TODO __getattribute__ class xrange(Sized, Iterable[int], Reversible[int]): @overload def __init__(self, stop: int) -> None: ... @overload def __init__(self, start: int, stop: int, step: int = ...) -> None: ... def __len__(self) -> int: ... def __iter__(self) -> Iterator[int]: ... def __getitem__(self, i: int) -> int: ... def __reversed__(self) -> Iterator[int]: ... class property(object): def __init__(self, fget: Optional[Callable[[Any], Any]] = ..., fset: Optional[Callable[[Any, Any], None]] = ..., fdel: Optional[Callable[[Any], None]] = ..., doc: Optional[str] = ...) -> None: ... def getter(self, fget: Callable[[Any], Any]) -> property: ... def setter(self, fset: Callable[[Any, Any], None]) -> property: ... def deleter(self, fdel: Callable[[Any], None]) -> property: ... def __get__(self, obj: Any, type: Optional[type] = ...) -> Any: ... def __set__(self, obj: Any, value: Any) -> None: ... def __delete__(self, obj: Any) -> None: ... def fget(self) -> Any: ... def fset(self, value: Any) -> None: ... def fdel(self) -> None: ... long = int bytes = str NotImplemented = ... # type: Any def abs(n: SupportsAbs[_T]) -> _T: ... def all(i: Iterable[object]) -> bool: ... def any(i: Iterable[object]) -> bool: ... def bin(number: int) -> str: ... def callable(o: object) -> bool: ... def chr(code: int) -> str: ... def compile(source: Any, filename: unicode, mode: str, flags: int = ..., dont_inherit: int = ...) -> Any: ... def delattr(o: Any, name: unicode) -> None: ... def dir(o: object = ...) -> List[str]: ... @overload def divmod(a: int, b: int) -> Tuple[int, int]: ... @overload def divmod(a: float, b: float) -> Tuple[float, float]: ... def exit(code: Any = ...) -> NoReturn: ... @overload def filter(function: Callable[[_T], Any], iterable: Iterable[_T]) -> List[_T]: ... @overload def filter(function: None, iterable: Iterable[Optional[_T]]) -> List[_T]: ... def format(o: object, format_spec: str = ...) -> str: ... # TODO unicode def getattr(o: Any, name: unicode, default: Optional[Any] = ...) -> Any: ... def hasattr(o: Any, name: unicode) -> bool: ... def hash(o: object) -> int: ... def hex(i: int) -> str: ... # TODO __index__ def id(o: object) -> int: ... def input(prompt: Any = ...) -> Any: ... def intern(string: str) -> str: ... @overload def iter(iterable: Iterable[_T]) -> Iterator[_T]: ... @overload def iter(function: Callable[[], _T], sentinel: _T) -> Iterator[_T]: ... def isinstance(o: object, t: Union[type, Tuple[Union[type, Tuple], ...]]) -> bool: ... def issubclass(cls: type, classinfo: Union[type, Tuple[Union[type, Tuple], ...]]) -> bool: ... def len(o: Sized) -> int: ... @overload def map(func: Callable[[_T1], _S], iter1: Iterable[_T1]) -> List[_S]: ... @overload def map(func: Callable[[_T1, _T2], _S], iter1: Iterable[_T1], iter2: Iterable[_T2]) -> List[_S]: ... # TODO more than two iterables @overload def map(func: None, iter1: Iterable[_T1]) -> List[_T1]: ... @overload def map(func: None, iter1: Iterable[_T1], iter2: Iterable[_T2]) -> List[Tuple[_T1, _T2]]: ... # TODO more than two iterables @overload def max(arg1: _T, arg2: _T, *args: _T, key: Callable[[_T], Any] = ...) -> _T: ... @overload def max(iterable: Iterable[_T], key: Callable[[_T], Any] = ...) -> _T: ... @overload def min(arg1: _T, arg2: _T, *args: _T, key: Callable[[_T], Any] = ...) -> _T: ... @overload def min(iterable: Iterable[_T], key: Callable[[_T], Any] = ...) -> _T: ... @overload def next(i: Iterator[_T]) -> _T: ... @overload def next(i: Iterator[_T], default: _VT) -> Union[_T, _VT]: ... def oct(i: int) -> str: ... # TODO __index__ @overload def open(file: str, mode: unicode = ..., buffering: int = ...) -> BinaryIO: ... @overload def open(file: unicode, mode: unicode = ..., buffering: int = ...) -> BinaryIO: ... @overload def open(file: int, mode: unicode = ..., buffering: int = ...) -> BinaryIO: ... def ord(c: unicode) -> int: ... # This is only available after from __future__ import print_function. def print(*values: Any, sep: unicode = ..., end: unicode = ..., file: IO[Any] = ...) -> None: ... @overload def pow(x: int, y: int) -> Any: ... # The return type can be int or float, depending on y. @overload def pow(x: int, y: int, z: int) -> Any: ... @overload def pow(x: float, y: float) -> float: ... @overload def pow(x: float, y: float, z: float) -> float: ... def quit(code: int = ...) -> None: ... def range(x: int, y: int = ..., step: int = ...) -> List[int]: ... def raw_input(prompt: Any = ...) -> str: ... @overload def reduce(function: Callable[[_T, _S], _T], iterable: Iterable[_S], initializer: _T) -> _T: ... @overload def reduce(function: Callable[[_T, _T], _T], iterable: Iterable[_T]) -> _T: ... def reload(module: Any) -> Any: ... @overload def reversed(object: Reversible[_T]) -> Iterator[_T]: ... @overload def reversed(object: Sequence[_T]) -> Iterator[_T]: ... def repr(o: object) -> str: ... @overload def round(number: float) -> float: ... @overload def round(number: float, ndigits: int) -> float: ... # Always return a float if given ndigits. @overload def round(number: SupportsRound[_T]) -> _T: ... @overload def round(number: SupportsRound[_T], ndigits: int) -> _T: ... def setattr(object: Any, name: unicode, value: Any) -> None: ... def sorted(iterable: Iterable[_T], *, cmp: Callable[[_T, _T], int] = ..., key: Callable[[_T], Any] = ..., reverse: bool = ...) -> List[_T]: ... @overload def sum(iterable: Iterable[_T]) -> Union[_T, int]: ... @overload def sum(iterable: Iterable[_T], start: _S) -> Union[_T, _S]: ... def unichr(i: int) -> unicode: ... def vars(object: Any = ...) -> Dict[str, Any]: ... @overload def zip(iter1: Iterable[_T1]) -> List[Tuple[_T1]]: ... @overload def zip(iter1: Iterable[_T1], iter2: Iterable[_T2]) -> List[Tuple[_T1, _T2]]: ... @overload def zip(iter1: Iterable[_T1], iter2: Iterable[_T2], iter3: Iterable[_T3]) -> List[Tuple[_T1, _T2, _T3]]: ... @overload def zip(iter1: Iterable[_T1], iter2: Iterable[_T2], iter3: Iterable[_T3], iter4: Iterable[_T4]) -> List[Tuple[_T1, _T2, _T3, _T4]]: ... @overload def zip(iter1: Iterable[_T1], iter2: Iterable[_T2], iter3: Iterable[_T3], iter4: Iterable[_T4], iter5: Iterable[_T5]) -> List[Tuple[_T1, _T2, _T3, _T4, _T5]]: ... @overload def zip(iter1: Iterable[Any], iter2: Iterable[Any], iter3: Iterable[Any], iter4: Iterable[Any], iter5: Iterable[Any], iter6: Iterable[Any], *iterables: Iterable[Any]) -> List[Tuple[Any, ...]]: ... def __import__(name: unicode, globals: Dict[str, Any] = ..., locals: Dict[str, Any] = ..., fromlist: List[str] = ..., level: int = ...) -> Any: ... def globals() -> Dict[str, Any]: ... def locals() -> Dict[str, Any]: ... # Actually the type of Ellipsis is , but since it's # not exposed anywhere under that name, we make it private here. class ellipsis: ... Ellipsis = ... # type: ellipsis # TODO: buffer support is incomplete; e.g. some_string.startswith(some_buffer) doesn't type check. _AnyBuffer = TypeVar('_AnyBuffer', str, unicode, bytearray, buffer) class buffer(Sized): def __init__(self, object: _AnyBuffer, offset: int = ..., size: int = ...) -> None: ... def __add__(self, other: _AnyBuffer) -> str: ... def __cmp__(self, other: _AnyBuffer) -> bool: ... def __getitem__(self, key: Union[int, slice]) -> str: ... def __getslice__(self, i: int, j: int) -> str: ... def __len__(self) -> int: ... def __mul__(self, x: int) -> str: ... class memoryview(Sized, Container[bytes]): format = ... # type: str itemsize = ... # type: int shape = ... # type: Optional[Tuple[int, ...]] strides = ... # type: Optional[Tuple[int, ...]] suboffsets = ... # type: Optional[Tuple[int, ...]] readonly = ... # type: bool ndim = ... # type: int def __init__(self, obj: Union[str, bytearray, buffer, memoryview]) -> None: ... @overload def __getitem__(self, i: int) -> bytes: ... @overload def __getitem__(self, s: slice) -> memoryview: ... def __contains__(self, x: object) -> bool: ... def __iter__(self) -> Iterator[bytes]: ... def __len__(self) -> int: ... @overload def __setitem__(self, i: int, o: bytes) -> None: ... @overload def __setitem__(self, s: slice, o: Sequence[bytes]) -> None: ... @overload def __setitem__(self, s: slice, o: memoryview) -> None: ... def tobytes(self) -> bytes: ... def tolist(self) -> List[int]: ... class BaseException(object): args = ... # type: Tuple[Any, ...] message = ... # type: str def __init__(self, *args: object, **kwargs: object) -> None: ... def __getitem__(self, i: int) -> Any: ... def __getslice__(self, start: int, stop: int) -> Tuple[Any, ...]: ... class GeneratorExit(BaseException): ... class KeyboardInterrupt(BaseException): ... class SystemExit(BaseException): code = 0 class Exception(BaseException): ... class StopIteration(Exception): ... class StandardError(Exception): ... class ArithmeticError(StandardError): ... class BufferError(StandardError): ... class EnvironmentError(StandardError): errno = 0 strerror = ... # type: str # TODO can this be unicode? filename = ... # type: str class LookupError(StandardError): ... class RuntimeError(StandardError): ... class ValueError(StandardError): ... class AssertionError(StandardError): ... class AttributeError(StandardError): ... class EOFError(StandardError): ... class FloatingPointError(ArithmeticError): ... class IOError(EnvironmentError): ... class ImportError(StandardError): ... class IndexError(LookupError): ... class KeyError(LookupError): ... class MemoryError(StandardError): ... class NameError(StandardError): ... class NotImplementedError(RuntimeError): ... class OSError(EnvironmentError): ... class WindowsError(OSError): winerror = ... # type: int class OverflowError(ArithmeticError): ... class ReferenceError(StandardError): ... class SyntaxError(StandardError): msg = ... # type: str lineno = ... # type: int offset = ... # type: int text = ... # type: str filename = ... # type: str class IndentationError(SyntaxError): ... class TabError(IndentationError): ... class SystemError(StandardError): ... class TypeError(StandardError): ... class UnboundLocalError(NameError): ... class UnicodeError(ValueError): ... class UnicodeDecodeError(UnicodeError): ... class UnicodeEncodeError(UnicodeError): ... class UnicodeTranslateError(UnicodeError): ... class ZeroDivisionError(ArithmeticError): ... class Warning(Exception): ... class UserWarning(Warning): ... class DeprecationWarning(Warning): ... class SyntaxWarning(Warning): ... class RuntimeWarning(Warning): ... class FutureWarning(Warning): ... class PendingDeprecationWarning(Warning): ... class ImportWarning(Warning): ... class UnicodeWarning(Warning): ... class BytesWarning(Warning): ... class ResourceWarning(Warning): ... def eval(s: Union[str, unicode], globals: Dict[str, Any] = ..., locals: Dict[str, Any] = ...) -> Any: ... def exec(object: str, globals: Optional[Dict[str, Any]] = ..., locals: Optional[Dict[str, Any]] = ...) -> Any: ... # TODO code object as source def cmp(x: Any, y: Any) -> int: ... def execfile(filename: str, globals: Optional[Dict[str, Any]] = ..., locals: Optional[Dict[str, Any]] = ...) -> None: ... class file(BinaryIO): @overload def __init__(self, file: str, mode: str = ..., buffering: int = ...) -> None: ... @overload def __init__(self, file: unicode, mode: str = ..., buffering: int = ...) -> None: ... @overload def __init__(self, file: int, mode: str = ..., buffering: int = ...) -> None: ... def __iter__(self) -> Iterator[str]: ... def read(self, n: int = ...) -> str: ... def __enter__(self) -> BinaryIO: ... def __exit__(self, t: Optional[type] = ..., exc: Optional[BaseException] = ..., tb: Optional[Any] = ...) -> bool: ... def flush(self) -> None: ... def fileno(self) -> int: ... def isatty(self) -> bool: ... def close(self) -> None: ... def readable(self) -> bool: ... def writable(self) -> bool: ... def seekable(self) -> bool: ... def seek(self, offset: int, whence: int = ...) -> int: ... def tell(self) -> int: ... def readline(self, limit: int = ...) -> str: ... def readlines(self, hint: int = ...) -> List[str]: ... def write(self, data: str) -> int: ... def writelines(self, data: Iterable[str]) -> None: ... def truncate(self, pos: Optional[int] = ...) -> int: ... # Very old builtins def apply(func: Callable[..., _T], args: Optional[Sequence[Any]] = ..., kwds: Optional[Mapping[str, Any]] = ...) -> _T: ... _N = TypeVar('_N', bool, int, float, complex) def coerce(x: _N, y: _N) -> Tuple[_N, _N]: ... mypy-0.560/typeshed/stdlib/2/collections.pyi0000644€tŠÔÚ€2›s®0000001066513215007212025205 0ustar jukkaDROPBOX\Domain Users00000000000000# Stubs for collections # Based on http://docs.python.org/2.7/library/collections.html # These are not exported. import typing from typing import Dict, Generic, TypeVar, Tuple, overload, Type, Optional, List, Union, Reversible # These are exported. from typing import ( Callable as Callable, Container as Container, Hashable as Hashable, ItemsView as ItemsView, Iterable as Iterable, Iterator as Iterator, KeysView as KeysView, Mapping as Mapping, MappingView as MappingView, MutableMapping as MutableMapping, MutableSequence as MutableSequence, MutableSet as MutableSet, Sequence as Sequence, AbstractSet as Set, Sized as Sized, ValuesView as ValuesView, ) _T = TypeVar('_T') _KT = TypeVar('_KT') _VT = TypeVar('_VT') # namedtuple is special-cased in the type checker; the initializer is ignored. def namedtuple(typename: Union[str, unicode], field_names: Union[str, unicode, Iterable[Union[str, unicode]]], *, verbose: bool = ..., rename: bool = ...) -> Type[tuple]: ... class deque(Sized, Iterable[_T], Reversible[_T], Generic[_T]): def __init__(self, iterable: Iterable[_T] = ..., maxlen: int = ...) -> None: ... @property def maxlen(self) -> Optional[int]: ... def append(self, x: _T) -> None: ... def appendleft(self, x: _T) -> None: ... def clear(self) -> None: ... def count(self, x: _T) -> int: ... def extend(self, iterable: Iterable[_T]) -> None: ... def extendleft(self, iterable: Iterable[_T]) -> None: ... def pop(self) -> _T: ... def popleft(self) -> _T: ... def remove(self, value: _T) -> None: ... def reverse(self) -> None: ... def rotate(self, n: int) -> None: ... def __len__(self) -> int: ... def __iter__(self) -> Iterator[_T]: ... def __str__(self) -> str: ... def __hash__(self) -> int: ... def __getitem__(self, i: int) -> _T: ... def __setitem__(self, i: int, x: _T) -> None: ... def __contains__(self, o: _T) -> bool: ... def __reversed__(self) -> Iterator[_T]: ... class Counter(Dict[_T, int], Generic[_T]): @overload def __init__(self, **kwargs: int) -> None: ... @overload def __init__(self, mapping: Mapping[_T, int]) -> None: ... @overload def __init__(self, iterable: Iterable[_T]) -> None: ... def elements(self) -> Iterator[_T]: ... def most_common(self, n: Optional[int] = ...) -> List[Tuple[_T, int]]: ... @overload def subtract(self, __mapping: Mapping[_T, int]) -> None: ... @overload def subtract(self, iterable: Iterable[_T]) -> None: ... # The Iterable[Tuple[...]] argument type is not actually desirable # (the tuples will be added as keys, breaking type safety) but # it's included so that the signature is compatible with # Dict.update. Not sure if we should use '# type: ignore' instead # and omit the type from the union. @overload def update(self, __m: Mapping[_T, int], **kwargs: int) -> None: ... @overload def update(self, __m: Union[Iterable[_T], Iterable[Tuple[_T, int]]], **kwargs: int) -> None: ... @overload def update(self, **kwargs: int) -> None: ... def __add__(self, other: Counter[_T]) -> Counter[_T]: ... def __sub__(self, other: Counter[_T]) -> Counter[_T]: ... def __and__(self, other: Counter[_T]) -> Counter[_T]: ... def __or__(self, other: Counter[_T]) -> Counter[_T]: ... def __iadd__(self, other: Counter[_T]) -> Counter[_T]: ... def __isub__(self, other: Counter[_T]) -> Counter[_T]: ... def __iand__(self, other: Counter[_T]) -> Counter[_T]: ... def __ior__(self, other: Counter[_T]) -> Counter[_T]: ... class OrderedDict(Dict[_KT, _VT], Reversible[_KT], Generic[_KT, _VT]): def popitem(self, last: bool = ...) -> Tuple[_KT, _VT]: ... def __reversed__(self) -> Iterator[_KT]: ... def __copy__(self) -> OrderedDict[_KT, _VT]: ... class defaultdict(Dict[_KT, _VT], Generic[_KT, _VT]): default_factory = ... # type: Callable[[], _VT] @overload def __init__(self, **kwargs: _VT) -> None: ... @overload def __init__(self, default_factory: Optional[Callable[[], _VT]]) -> None: ... @overload def __init__(self, default_factory: Optional[Callable[[], _VT]], map: Mapping[_KT, _VT]) -> None: ... @overload def __init__(self, default_factory: Optional[Callable[[], _VT]], iterable: Iterable[Tuple[_KT, _VT]]) -> None: ... def __missing__(self, key: _KT) -> _VT: ... mypy-0.560/typeshed/stdlib/2/commands.pyi0000644€tŠÔÚ€2›s®0000000051313215007212024457 0ustar jukkaDROPBOX\Domain Users00000000000000from typing import overload, AnyStr, Text, Tuple def getstatus(file: Text) -> str: ... def getoutput(cmd: Text) -> str: ... def getstatusoutput(cmd: Text) -> Tuple[int, str]: ... @overload def mk2arg(head: bytes, x: bytes) -> bytes: ... @overload def mk2arg(head: Text, x: Text) -> Text: ... def mkarg(x: AnyStr) -> AnyStr: ... mypy-0.560/typeshed/stdlib/2/compileall.pyi0000644€tŠÔÚ€2›s®0000000114313215007212024777 0ustar jukkaDROPBOX\Domain Users00000000000000# Stubs for compileall (Python 2) from typing import Optional, Pattern, Union _Path = Union[str, bytes] # rx can be any object with a 'search' method; once we have Protocols we can change the type def compile_dir(dir: _Path, maxlevels: int = ..., ddir: Optional[_Path] = ..., force: bool = ..., rx: Optional[Pattern] = ..., quiet: int = ...) -> int: ... def compile_file(fullname: _Path, ddir: Optional[_Path] = ..., force: bool = ..., rx: Optional[Pattern] = ..., quiet: int = ...) -> int: ... def compile_path(skip_curdir: bool = ..., maxlevels: int = ..., force: bool = ..., quiet: int = ...) -> int: ... mypy-0.560/typeshed/stdlib/2/ConfigParser.pyi0000644€tŠÔÚ€2›s®0000000777313215007212025257 0ustar jukkaDROPBOX\Domain Users00000000000000from typing import Any, IO, Sequence, Tuple, Union, List, Dict __all__ = ... # type: List[str] DEFAULTSECT = ... # type: str MAX_INTERPOLATION_DEPTH = ... # type: int class Error(Exception): message = ... # type: Any def __init__(self, msg: str = ...) -> None: ... def _get_message(self) -> None: ... def _set_message(self, value: str) -> None: ... def __repr__(self) -> str: ... def __str__(self) -> str: ... class NoSectionError(Error): section = ... # type: str def __init__(self, section: str) -> None: ... class DuplicateSectionError(Error): section = ... # type: str def __init__(self, section: str) -> None: ... class NoOptionError(Error): section = ... # type: str option = ... # type: str def __init__(self, option: str, section: str) -> None: ... class InterpolationError(Error): section = ... # type: str option = ... # type: str msg = ... # type: str def __init__(self, option: str, section: str, msg: str) -> None: ... class InterpolationMissingOptionError(InterpolationError): reference = ... # type: str def __init__(self, option: str, section: str, rawval: str, reference: str) -> None: ... class InterpolationSyntaxError(InterpolationError): ... class InterpolationDepthError(InterpolationError): def __init__(self, option: str, section: str, rawval: str) -> None: ... class ParsingError(Error): filename = ... # type: str errors = ... # type: List[Tuple[Any, Any]] def __init__(self, filename: str) -> None: ... def append(self, lineno: Any, line: Any) -> None: ... class MissingSectionHeaderError(ParsingError): lineno = ... # type: Any line = ... # type: Any def __init__(self, filename: str, lineno: Any, line: Any) -> None: ... class RawConfigParser: _dict = ... # type: Any _sections = ... # type: dict _defaults = ... # type: dict _optcre = ... # type: Any SECTCRE = ... # type: Any OPTCRE = ... # type: Any OPTCRE_NV = ... # type: Any def __init__(self, defaults: Dict[Any, Any] = ..., dict_type: Any = ..., allow_no_value: bool = ...) -> None: ... def defaults(self) -> Dict[Any, Any]: ... def sections(self) -> List[str]: ... def add_section(self, section: str) -> None: ... def has_section(self, section: str) -> bool: ... def options(self, section: str) -> List[str]: ... def read(self, filenames: Union[str, Sequence[str]]) -> List[str]: ... def readfp(self, fp: IO[str], filename: str = ...) -> None: ... def get(self, section: str, option: str) -> str: ... def items(self, section: str) -> List[Tuple[Any, Any]]: ... def _get(self, section: str, conv: type, option: str) -> Any: ... def getint(self, section: str, option: str) -> int: ... def getfloat(self, section: str, option: str) -> float: ... _boolean_states = ... # type: Dict[str, bool] def getboolean(self, section: str, option: str) -> bool: ... def optionxform(self, optionstr: str) -> str: ... def has_option(self, section: str, option: str) -> bool: ... def set(self, section: str, option: str, value: Any = ...) -> None: ... def write(self, fp: IO[str]) -> None: ... def remove_option(self, section: str, option: Any) -> bool: ... def remove_section(self, section: str) -> bool: ... def _read(self, fp: IO[str], fpname: str) -> None: ... class ConfigParser(RawConfigParser): _KEYCRE = ... # type: Any def get(self, section: str, option: str, raw: bool = ..., vars: dict = ...) -> Any: ... def items(self, section: str, raw: bool = ..., vars: dict = ...) -> List[Tuple[str, Any]]: ... def _interpolate(self, section: str, option: str, rawval: Any, vars: Any) -> str: ... def _interpolation_replace(self, match: Any) -> str: ... class SafeConfigParser(ConfigParser): _interpvar_re = ... # type: Any def _interpolate(self, section: str, option: str, rawval: Any, vars: Any) -> str: ... def _interpolate_some(self, option: str, accum: list, rest: str, section: str, map: dict, depth: int) -> None: ... mypy-0.560/typeshed/stdlib/2/Cookie.pyi0000644€tŠÔÚ€2›s®0000000232213215007212024067 0ustar jukkaDROPBOX\Domain Users00000000000000from typing import Any class CookieError(Exception): ... class Morsel(dict): key = ... # type: Any def __init__(self): ... def __setitem__(self, K, V): ... def isReservedKey(self, K): ... value = ... # type: Any coded_value = ... # type: Any def set(self, key, val, coded_val, LegalChars=..., idmap=..., translate=...): ... def output(self, attrs=None, header=...): ... def js_output(self, attrs=None): ... def OutputString(self, attrs=None): ... class BaseCookie(dict): def value_decode(self, val): ... def value_encode(self, val): ... def __init__(self, input=None): ... def __setitem__(self, key, value): ... def output(self, attrs=None, header=..., sep=...): ... def js_output(self, attrs=None): ... def load(self, rawdata): ... class SimpleCookie(BaseCookie): def value_decode(self, val): ... def value_encode(self, val): ... class SerialCookie(BaseCookie): def __init__(self, input=None): ... def value_decode(self, val): ... def value_encode(self, val): ... class SmartCookie(BaseCookie): def __init__(self, input=None): ... def value_decode(self, val): ... def value_encode(self, val): ... Cookie = ... # type: Any mypy-0.560/typeshed/stdlib/2/cookielib.pyi0000644€tŠÔÚ€2›s®0000001070713215007212024624 0ustar jukkaDROPBOX\Domain Users00000000000000from typing import Any class Cookie: version = ... # type: Any name = ... # type: Any value = ... # type: Any port = ... # type: Any port_specified = ... # type: Any domain = ... # type: Any domain_specified = ... # type: Any domain_initial_dot = ... # type: Any path = ... # type: Any path_specified = ... # type: Any secure = ... # type: Any expires = ... # type: Any discard = ... # type: Any comment = ... # type: Any comment_url = ... # type: Any rfc2109 = ... # type: Any def __init__(self, version, name, value, port, port_specified, domain, domain_specified, domain_initial_dot, path, path_specified, secure, expires, discard, comment, comment_url, rest, rfc2109=False): ... def has_nonstandard_attr(self, name): ... def get_nonstandard_attr(self, name, default=None): ... def set_nonstandard_attr(self, name, value): ... def is_expired(self, now=None): ... class CookiePolicy: def set_ok(self, cookie, request): ... def return_ok(self, cookie, request): ... def domain_return_ok(self, domain, request): ... def path_return_ok(self, path, request): ... class DefaultCookiePolicy(CookiePolicy): DomainStrictNoDots = ... # type: Any DomainStrictNonDomain = ... # type: Any DomainRFC2965Match = ... # type: Any DomainLiberal = ... # type: Any DomainStrict = ... # type: Any netscape = ... # type: Any rfc2965 = ... # type: Any rfc2109_as_netscape = ... # type: Any hide_cookie2 = ... # type: Any strict_domain = ... # type: Any strict_rfc2965_unverifiable = ... # type: Any strict_ns_unverifiable = ... # type: Any strict_ns_domain = ... # type: Any strict_ns_set_initial_dollar = ... # type: Any strict_ns_set_path = ... # type: Any def __init__(self, blocked_domains=None, allowed_domains=None, netscape=True, rfc2965=False, rfc2109_as_netscape=None, hide_cookie2=False, strict_domain=False, strict_rfc2965_unverifiable=True, strict_ns_unverifiable=False, strict_ns_domain=..., strict_ns_set_initial_dollar=False, strict_ns_set_path=False): ... def blocked_domains(self): ... def set_blocked_domains(self, blocked_domains): ... def is_blocked(self, domain): ... def allowed_domains(self): ... def set_allowed_domains(self, allowed_domains): ... def is_not_allowed(self, domain): ... def set_ok(self, cookie, request): ... def set_ok_version(self, cookie, request): ... def set_ok_verifiability(self, cookie, request): ... def set_ok_name(self, cookie, request): ... def set_ok_path(self, cookie, request): ... def set_ok_domain(self, cookie, request): ... def set_ok_port(self, cookie, request): ... def return_ok(self, cookie, request): ... def return_ok_version(self, cookie, request): ... def return_ok_verifiability(self, cookie, request): ... def return_ok_secure(self, cookie, request): ... def return_ok_expires(self, cookie, request): ... def return_ok_port(self, cookie, request): ... def return_ok_domain(self, cookie, request): ... def domain_return_ok(self, domain, request): ... def path_return_ok(self, path, request): ... class Absent: ... class CookieJar: non_word_re = ... # type: Any quote_re = ... # type: Any strict_domain_re = ... # type: Any domain_re = ... # type: Any dots_re = ... # type: Any magic_re = ... # type: Any def __init__(self, policy=None): ... def set_policy(self, policy): ... def add_cookie_header(self, request): ... def make_cookies(self, response, request): ... def set_cookie_if_ok(self, cookie, request): ... def set_cookie(self, cookie): ... def extract_cookies(self, response, request): ... def clear(self, domain=None, path=None, name=None): ... def clear_session_cookies(self): ... def clear_expired_cookies(self): ... def __iter__(self): ... def __len__(self): ... class LoadError(IOError): ... class FileCookieJar(CookieJar): filename = ... # type: Any delayload = ... # type: Any def __init__(self, filename=None, delayload=False, policy=None): ... def save(self, filename=None, ignore_discard=False, ignore_expires=False): ... def load(self, filename=None, ignore_discard=False, ignore_expires=False): ... def revert(self, filename=None, ignore_discard=False, ignore_expires=False): ... MozillaCookieJar = FileCookieJar LWPCookieJar = FileCookieJar def lwp_cookie_str(cookie: Cookie) -> str: ... mypy-0.560/typeshed/stdlib/2/cPickle.pyi0000644€tŠÔÚ€2›s®0000000151313215007212024231 0ustar jukkaDROPBOX\Domain Users00000000000000from typing import Any, IO, List HIGHEST_PROTOCOL = ... # type: int compatible_formats = ... # type: List[str] format_version = ... # type: str class Pickler: def __init__(self, file: IO[str], protocol: int = ...) -> None: ... def dump(self, obj: Any) -> None: ... def clear_memo(self) -> None: ... class Unpickler: def __init__(self, file: IO[str]) -> None: ... def load(self) -> Any: ... def noload(self) -> Any: ... def dump(obj: Any, file: IO[str], protocol: int = ...) -> None: ... def dumps(obj: Any, protocol: int = ...) -> str: ... def load(file: IO[str]) -> Any: ... def loads(str: str) -> Any: ... class PickleError(Exception): ... class UnpicklingError(PickleError): ... class BadPickleGet(UnpicklingError): ... class PicklingError(PickleError): ... class UnpickleableError(PicklingError): ... mypy-0.560/typeshed/stdlib/2/cStringIO.pyi0000644€tŠÔÚ€2›s®0000000346113215007212024524 0ustar jukkaDROPBOX\Domain Users00000000000000# Stubs for cStringIO (Python 2.7) # See https://docs.python.org/2/library/stringio.html from typing import overload, IO, List, Iterable, Iterator, Optional, Union from types import TracebackType # TODO the typing.IO[] generics should be split into input and output. class InputType(IO[str], Iterator[str]): def getvalue(self) -> str: ... def close(self) -> None: ... @property def closed(self) -> bool: ... def flush(self) -> None: ... def isatty(self) -> bool: ... def read(self, size: int = ...) -> str: ... def readline(self, size: int = ...) -> str: ... def readlines(self, hint: int = ...) -> List[str]: ... def seek(self, offset: int, whence: int = ...) -> int: ... def tell(self) -> int: ... def truncate(self, size: Optional[int] = ...) -> int: ... def __iter__(self) -> 'InputType': ... def next(self) -> str: ... def reset(self) -> None: ... class OutputType(IO[str], Iterator[str]): @property def softspace(self) -> int: ... def getvalue(self) -> str: ... def close(self) -> None: ... @property def closed(self) -> bool: ... def flush(self) -> None: ... def isatty(self) -> bool: ... def read(self, size: int = ...) -> str: ... def readline(self, size: int = ...) -> str: ... def readlines(self, hint: int = ...) -> List[str]: ... def seek(self, offset: int, whence: int = ...) -> int: ... def tell(self) -> int: ... def truncate(self, size: Optional[int] = ...) -> int: ... def __iter__(self) -> 'OutputType': ... def next(self) -> str: ... def reset(self) -> None: ... def write(self, b: Union[str, unicode]) -> int: ... def writelines(self, lines: Iterable[Union[str, unicode]]) -> None: ... @overload def StringIO() -> OutputType: ... @overload def StringIO(s: str) -> InputType: ... mypy-0.560/typeshed/stdlib/2/datetime.pyi0000644€tŠÔÚ€2›s®0000001677413215007212024472 0ustar jukkaDROPBOX\Domain Users00000000000000# Stubs for datetime # NOTE: These are incomplete! from time import struct_time from typing import AnyStr, Optional, SupportsAbs, Tuple, Union, overload MINYEAR = 0 MAXYEAR = 0 class tzinfo(object): def tzname(self, dt: Optional[datetime]) -> str: ... def utcoffset(self, dt: Optional[datetime]) -> Optional[timedelta]: ... def dst(self, dt: Optional[datetime]) -> Optional[timedelta]: ... def fromutc(self, dt: datetime) -> datetime: ... _tzinfo = tzinfo class date(object): min = ... # type: date max = ... # type: date resolution = ... # type: timedelta def __init__(self, year: int, month: int, day: int) -> None: ... @classmethod def fromtimestamp(cls, t: float) -> date: ... @classmethod def today(cls) -> date: ... @classmethod def fromordinal(cls, n: int) -> date: ... @property def year(self) -> int: ... @property def month(self) -> int: ... @property def day(self) -> int: ... def ctime(self) -> str: ... def strftime(self, fmt: Union[str, unicode]) -> str: ... def __format__(self, fmt: AnyStr) -> AnyStr: ... def isoformat(self) -> str: ... def timetuple(self) -> struct_time: ... def toordinal(self) -> int: ... def replace(self, year: int = ..., month: int = ..., day: int = ...) -> date: ... def __le__(self, other: date) -> bool: ... def __lt__(self, other: date) -> bool: ... def __ge__(self, other: date) -> bool: ... def __gt__(self, other: date) -> bool: ... def __add__(self, other: timedelta) -> date: ... @overload def __sub__(self, other: timedelta) -> date: ... @overload def __sub__(self, other: date) -> timedelta: ... def __hash__(self) -> int: ... def weekday(self) -> int: ... def isoweekday(self) -> int: ... def isocalendar(self) -> Tuple[int, int, int]: ... class time: min = ... # type: time max = ... # type: time resolution = ... # type: timedelta def __init__(self, hour: int = ..., minute: int = ..., second: int = ..., microsecond: int = ..., tzinfo: tzinfo = ...) -> None: ... @property def hour(self) -> int: ... @property def minute(self) -> int: ... @property def second(self) -> int: ... @property def microsecond(self) -> int: ... @property def tzinfo(self) -> _tzinfo: ... def __le__(self, other: time) -> bool: ... def __lt__(self, other: time) -> bool: ... def __ge__(self, other: time) -> bool: ... def __gt__(self, other: time) -> bool: ... def __hash__(self) -> int: ... def isoformat(self) -> str: ... def strftime(self, fmt: Union[str, unicode]) -> str: ... def __format__(self, fmt: AnyStr) -> AnyStr: ... def utcoffset(self) -> Optional[timedelta]: ... def tzname(self) -> Optional[str]: ... def dst(self) -> Optional[int]: ... def replace(self, hour: int = ..., minute: int = ..., second: int = ..., microsecond: int = ..., tzinfo: Optional[_tzinfo] = ...) -> time: ... _date = date _time = time class timedelta(SupportsAbs[timedelta]): min = ... # type: timedelta max = ... # type: timedelta resolution = ... # type: timedelta def __init__(self, days: float = ..., seconds: float = ..., microseconds: float = ..., milliseconds: float = ..., minutes: float = ..., hours: float = ..., weeks: float = ...) -> None: ... @property def days(self) -> int: ... @property def seconds(self) -> int: ... @property def microseconds(self) -> int: ... def total_seconds(self) -> float: ... def __add__(self, other: timedelta) -> timedelta: ... def __radd__(self, other: timedelta) -> timedelta: ... def __sub__(self, other: timedelta) -> timedelta: ... def __rsub__(self, other: timedelta) -> timedelta: ... def __neg__(self) -> timedelta: ... def __pos__(self) -> timedelta: ... def __abs__(self) -> timedelta: ... def __mul__(self, other: float) -> timedelta: ... def __rmul__(self, other: float) -> timedelta: ... @overload def __floordiv__(self, other: timedelta) -> int: ... @overload def __floordiv__(self, other: int) -> timedelta: ... @overload def __div__(self, other: timedelta) -> float: ... @overload def __div__(self, other: float) -> timedelta: ... def __le__(self, other: timedelta) -> bool: ... def __lt__(self, other: timedelta) -> bool: ... def __ge__(self, other: timedelta) -> bool: ... def __gt__(self, other: timedelta) -> bool: ... def __hash__(self) -> int: ... class datetime(object): # TODO: is actually subclass of date, but __le__, __lt__, __ge__, __gt__, __sub__ don't work with date. min = ... # type: datetime max = ... # type: datetime resolution = ... # type: timedelta def __init__(self, year: int, month: int, day: int, hour: int = ..., minute: int = ..., second: int = ..., microsecond: int = ..., tzinfo: tzinfo = ...) -> None: ... def __new__(cls, year: int, month: int, day: int, hour: int = ..., minute: int = ..., second: int = ..., microsecond: int = ..., tzinfo: tzinfo = ...) -> datetime: ... @property def year(self) -> int: ... @property def month(self) -> int: ... @property def day(self) -> int: ... @property def hour(self) -> int: ... @property def minute(self) -> int: ... @property def second(self) -> int: ... @property def microsecond(self) -> int: ... @property def tzinfo(self) -> Optional[_tzinfo]: ... @classmethod def fromtimestamp(cls, t: float, tz: _tzinfo = ...) -> datetime: ... @classmethod def utcfromtimestamp(cls, t: float) -> datetime: ... @classmethod def today(cls) -> datetime: ... @classmethod def fromordinal(cls, n: int) -> datetime: ... @classmethod def now(cls, tz: _tzinfo = ...) -> datetime: ... @classmethod def utcnow(cls) -> datetime: ... @classmethod def combine(cls, date: date, time: time) -> datetime: ... def strftime(self, fmt: Union[str, unicode]) -> str: ... def __format__(self, fmt: AnyStr) -> AnyStr: ... def toordinal(self) -> int: ... def timetuple(self) -> struct_time: ... def utctimetuple(self) -> struct_time: ... def date(self) -> _date: ... def time(self) -> _time: ... def timetz(self) -> _time: ... def replace(self, year: int = ..., month: int = ..., day: int = ..., hour: int = ..., minute: int = ..., second: int = ..., microsecond: int = ..., tzinfo: Optional[_tzinfo] = ...) -> datetime: ... def astimezone(self, tz: _tzinfo) -> datetime: ... def ctime(self) -> str: ... def isoformat(self, sep: str = ...) -> str: ... @classmethod def strptime(cls, date_string: Union[str, unicode], format: Union[str, unicode]) -> datetime: ... def utcoffset(self) -> Optional[timedelta]: ... def tzname(self) -> Optional[str]: ... def dst(self) -> Optional[int]: ... def __le__(self, other: datetime) -> bool: ... def __lt__(self, other: datetime) -> bool: ... def __ge__(self, other: datetime) -> bool: ... def __gt__(self, other: datetime) -> bool: ... def __add__(self, other: timedelta) -> datetime: ... @overload def __sub__(self, other: datetime) -> timedelta: ... @overload def __sub__(self, other: timedelta) -> datetime: ... def __hash__(self) -> int: ... def weekday(self) -> int: ... def isoweekday(self) -> int: ... def isocalendar(self) -> Tuple[int, int, int]: ... mypy-0.560/typeshed/stdlib/2/decimal.pyi0000644€tŠÔÚ€2›s®0000002350213215007212024257 0ustar jukkaDROPBOX\Domain Users00000000000000# Stubs for decimal (Python 2) from typing import ( Any, Dict, NamedTuple, Optional, Sequence, Tuple, Union, SupportsAbs, SupportsFloat, SupportsInt, ) _Decimal = Union[Decimal, int] _ComparableNum = Union[Decimal, int, float] DecimalTuple = NamedTuple('DecimalTuple', [('sign', int), ('digits', Sequence[int]), # TODO: Use Tuple[int, ...] ('exponent', int)]) ROUND_DOWN = ... # type: str ROUND_HALF_UP = ... # type: str ROUND_HALF_EVEN = ... # type: str ROUND_CEILING = ... # type: str ROUND_FLOOR = ... # type: str ROUND_UP = ... # type: str ROUND_HALF_DOWN = ... # type: str ROUND_05UP = ... # type: str class DecimalException(ArithmeticError): def handle(self, context, *args): ... class Clamped(DecimalException): ... class InvalidOperation(DecimalException): ... class ConversionSyntax(InvalidOperation): ... class DivisionByZero(DecimalException, ZeroDivisionError): ... class DivisionImpossible(InvalidOperation): ... class DivisionUndefined(InvalidOperation, ZeroDivisionError): ... class Inexact(DecimalException): ... class InvalidContext(InvalidOperation): ... class Rounded(DecimalException): ... class Subnormal(DecimalException): ... class Overflow(Inexact, Rounded): ... class Underflow(Inexact, Rounded, Subnormal): ... def setcontext(context: Context): ... def getcontext() -> Context: ... def localcontext(ctx: Optional[Context] = ...) -> _ContextManager: ... class Decimal(SupportsAbs[Decimal], SupportsFloat, SupportsInt): def __init__(cls, value: Union[_Decimal, float, str, unicode, Tuple[int, Sequence[int], int]] = ..., context: Context = ...) -> None: ... @classmethod def from_float(cls, f: float) -> Decimal: ... def __nonzero__(self) -> bool: ... def __eq__(self, other: object) -> bool: ... def __ne__(self, other: object) -> bool: ... def __lt__(self, other: _ComparableNum) -> bool: ... def __le__(self, other: _ComparableNum) -> bool: ... def __gt__(self, other: _ComparableNum) -> bool: ... def __ge__(self, other: _ComparableNum) -> bool: ... def compare(self, other: _Decimal) -> Decimal: ... def __hash__(self) -> int: ... def as_tuple(self) -> DecimalTuple: ... def to_eng_string(self, context: Context = ...) -> str: ... def __neg__(self) -> Decimal: ... def __pos__(self) -> Decimal: ... def __abs__(self, round: bool = ...) -> Decimal: ... def __add__(self, other: _Decimal) -> Decimal: ... def __radd__(self, other: int) -> Decimal: ... def __sub__(self, other: _Decimal) -> Decimal: ... def __rsub__(self, other: int) -> Decimal: ... def __mul__(self, other: _Decimal) -> Decimal: ... def __rmul__(self, other: int) -> Decimal: ... def __truediv__(self, other: _Decimal) -> Decimal: ... def __rtruediv__(self, other: int) -> Decimal: ... def __div__(self, other: _Decimal) -> Decimal: ... def __rdiv__(self, other: int) -> Decimal: ... def __divmod__(self, other: _Decimal) -> Tuple[Decimal, Decimal]: ... def __rdivmod__(self, other: int) -> Tuple[Decimal, Decimal]: ... def __mod__(self, other: _Decimal) -> Decimal: ... def __rmod__(self, other: int) -> Decimal: ... def remainder_near(self, other: _Decimal, context: Context = ...) -> Decimal: ... def __floordiv__(self, other: _Decimal) -> Decimal: ... def __rfloordiv__(self, other: int) -> Decimal: ... def __float__(self) -> float: ... def __int__(self) -> int: ... def __trunc__(self) -> int: ... @property def imag(self) -> Decimal: ... @property def real(self) -> Decimal: ... def conjugate(self) -> Decimal: ... def __complex__(self) -> complex: ... def __long__(self) -> long: ... def fma(self, other: _Decimal, third: _Decimal, context: Context = ...) -> Decimal: ... def __pow__(self, other: _Decimal) -> Decimal: ... def __rpow__(self, other: int) -> Decimal: ... def normalize(self, context: Context = ...) -> Decimal: ... def quantize(self, exp: _Decimal, rounding: str = ..., context: Context = ...) -> Decimal: ... def same_quantum(self, other: Decimal) -> bool: ... def to_integral(self, rounding: str = ..., context: Context = ...) -> Decimal: ... def to_integral_exact(self, rounding: str = ..., context: Context = ...) -> Decimal: ... def to_integral_value(self, rounding: str = ..., context: Context = ...) -> Decimal: ... def sqrt(self, context: Context = ...) -> Decimal: ... def max(self, other: _Decimal, context: Context = ...) -> Decimal: ... def min(self, other: _Decimal, context: Context = ...) -> Decimal: ... def adjusted(self) -> int: ... def canonical(self, context: Context = ...) -> Decimal: ... def compare_signal(self, other: _Decimal, context: Context = ...) -> Decimal: ... def compare_total(self, other: _Decimal) -> Decimal: ... def compare_total_mag(self, other: _Decimal) -> Decimal: ... def copy_abs(self) -> Decimal: ... def copy_negate(self) -> Decimal: ... def copy_sign(self, other: _Decimal) -> Decimal: ... def exp(self, context: Context = ...) -> Decimal: ... def is_canonical(self) -> bool: ... def is_finite(self) -> bool: ... def is_infinite(self) -> bool: ... def is_nan(self) -> bool: ... def is_normal(self, context: Context = ...) -> bool: ... def is_qnan(self) -> bool: ... def is_signed(self) -> bool: ... def is_snan(self) -> bool: ... def is_subnormal(self, context: Context = ...) -> bool: ... def is_zero(self) -> bool: ... def ln(self, context: Context = ...) -> Decimal: ... def log10(self, context: Context = ...) -> Decimal: ... def logb(self, context: Context = ...) -> Decimal: ... def logical_and(self, other: _Decimal, context: Context = ...) -> Decimal: ... def logical_invert(self, context: Context = ...) -> Decimal: ... def logical_or(self, other: _Decimal, context: Context = ...) -> Decimal: ... def logical_xor(self, other: _Decimal, context: Context = ...) -> Decimal: ... def max_mag(self, other: _Decimal, context: Context = ...) -> Decimal: ... def min_mag(self, other: _Decimal, context: Context = ...) -> Decimal: ... def next_minus(self, context: Context = ...) -> Decimal: ... def next_plus(self, context: Context = ...) -> Decimal: ... def next_toward(self, other: _Decimal, context: Context = ...) -> Decimal: ... def number_class(self, context: Context = ...) -> str: ... def radix(self) -> Decimal: ... def rotate(self, other: _Decimal, context: Context = ...) -> Decimal: ... def scaleb(self, other: _Decimal, context: Context = ...) -> Decimal: ... def shift(self, other: _Decimal, context: Context = ...) -> Decimal: ... def __reduce__(self): ... def __copy__(self): ... def __deepcopy__(self, memo): ... def __format__(self, specifier, context=None, _localeconv=None) -> str: ... class _ContextManager: new_context = ... # type: Context saved_context = ... # type: Context def __init__(self, new_context: Context) -> None: ... def __enter__(self): ... def __exit__(self, t, v, tb): ... class Context: prec = ... # type: int rounding = ... # type: str Emin = ... # type: int Emax = ... # type: int capitals = ... # type: int traps = ... # type: Dict[type, bool] flags = ... # type: Any def __init__(self, prec=None, rounding=None, traps=None, flags=None, Emin=None, Emax=None, capitals=None, _clamp=0, _ignored_flags=None): ... def clear_flags(self): ... def copy(self): ... __copy__ = ... # type: Any __hash__ = ... # type: Any def Etiny(self): ... def Etop(self): ... def create_decimal(self, num=...): ... def create_decimal_from_float(self, f): ... def abs(self, a): ... def add(self, a, b): ... def canonical(self, a): ... def compare(self, a, b): ... def compare_signal(self, a, b): ... def compare_total(self, a, b): ... def compare_total_mag(self, a, b): ... def copy_abs(self, a): ... def copy_decimal(self, a): ... def copy_negate(self, a): ... def copy_sign(self, a, b): ... def divide(self, a, b): ... def divide_int(self, a, b): ... def divmod(self, a, b): ... def exp(self, a): ... def fma(self, a, b, c): ... def is_canonical(self, a): ... def is_finite(self, a): ... def is_infinite(self, a): ... def is_nan(self, a): ... def is_normal(self, a): ... def is_qnan(self, a): ... def is_signed(self, a): ... def is_snan(self, a): ... def is_subnormal(self, a): ... def is_zero(self, a): ... def ln(self, a): ... def log10(self, a): ... def logb(self, a): ... def logical_and(self, a, b): ... def logical_invert(self, a): ... def logical_or(self, a, b): ... def logical_xor(self, a, b): ... def max(self, a, b): ... def max_mag(self, a, b): ... def min(self, a, b): ... def min_mag(self, a, b): ... def minus(self, a): ... def multiply(self, a, b): ... def next_minus(self, a): ... def next_plus(self, a): ... def next_toward(self, a, b): ... def normalize(self, a): ... def number_class(self, a): ... def plus(self, a): ... def power(self, a, b, modulo=None): ... def quantize(self, a, b): ... def radix(self): ... def remainder(self, a, b): ... def remainder_near(self, a, b): ... def rotate(self, a, b): ... def same_quantum(self, a, b): ... def scaleb(self, a, b): ... def shift(self, a, b): ... def sqrt(self, a): ... def subtract(self, a, b): ... def to_eng_string(self, a): ... def to_sci_string(self, a): ... def to_integral_exact(self, a): ... def to_integral_value(self, a): ... def to_integral(self, a): ... DefaultContext = ... # type: Context BasicContext = ... # type: Context ExtendedContext = ... # type: Context mypy-0.560/typeshed/stdlib/2/distutils/0000755€tŠÔÚ€2›s®0000000000013215007244024165 5ustar jukkaDROPBOX\Domain Users00000000000000mypy-0.560/typeshed/stdlib/2/distutils/__init__.pyi0000644€tŠÔÚ€2›s®0000000000013215007212026430 0ustar jukkaDROPBOX\Domain Users00000000000000mypy-0.560/typeshed/stdlib/2/distutils/emxccompiler.pyi0000644€tŠÔÚ€2›s®0000000016413215007212027373 0ustar jukkaDROPBOX\Domain Users00000000000000# Stubs for emxccompiler from distutils.unixccompiler import UnixCCompiler class EMXCCompiler(UnixCCompiler): ... mypy-0.560/typeshed/stdlib/2/dummy_thread.pyi0000644€tŠÔÚ€2›s®0000000146513215007212025347 0ustar jukkaDROPBOX\Domain Users00000000000000from mypy_extensions import NoReturn from typing import Any, Callable, Dict, Optional, Tuple class error(Exception): def __init__(self, *args: Any) -> None: ... def start_new_thread(function: Callable[..., Any], args: Tuple[Any, ...], kwargs: Dict[str, Any] = ...) -> None: ... def exit() -> NoReturn: ... def get_ident() -> int: ... def allocate_lock() -> LockType: ... def stack_size(size: Optional[int] = ...) -> int: ... class LockType(object): locked_status: bool def __init__(self) -> None: ... def acquire(self, waitflag: Optional[bool] = ...) -> bool: ... def __enter__(self, waitflag: Optional[bool] = ...) -> bool: ... def __exit__(self, typ: Any, val: Any, tb: Any) -> None: ... def release(self) -> bool: ... def locked(self) -> bool: ... def interrupt_main() -> None: ... mypy-0.560/typeshed/stdlib/2/email/0000755€tŠÔÚ€2›s®0000000000013215007244023230 5ustar jukkaDROPBOX\Domain Users00000000000000mypy-0.560/typeshed/stdlib/2/email/__init__.pyi0000644€tŠÔÚ€2›s®0000000041613215007212025506 0ustar jukkaDROPBOX\Domain Users00000000000000from typing import IO, Any, AnyStr def message_from_string(s: AnyStr, *args, **kwargs): ... def message_from_bytes(s: str, *args, **kwargs): ... def message_from_file(fp: IO[AnyStr], *args, **kwargs): ... def message_from_binary_file(fp: IO[str], *args, **kwargs): ... mypy-0.560/typeshed/stdlib/2/email/_parseaddr.pyi0000644€tŠÔÚ€2›s®0000000223313215007212026052 0ustar jukkaDROPBOX\Domain Users00000000000000from typing import Any def parsedate_tz(data): ... def parsedate(data): ... def mktime_tz(data): ... def quote(str): ... class AddrlistClass: specials = ... # type: Any pos = ... # type: Any LWS = ... # type: Any CR = ... # type: Any FWS = ... # type: Any atomends = ... # type: Any phraseends = ... # type: Any field = ... # type: Any commentlist = ... # type: Any def __init__(self, field): ... def gotonext(self): ... def getaddrlist(self): ... def getaddress(self): ... def getrouteaddr(self): ... def getaddrspec(self): ... def getdomain(self): ... def getdelimited(self, beginchar, endchars, allowcomments=True): ... def getquote(self): ... def getcomment(self): ... def getdomainliteral(self): ... def getatom(self, atomends=None): ... def getphraselist(self): ... class AddressList(AddrlistClass): addresslist = ... # type: Any def __init__(self, field): ... def __len__(self): ... def __add__(self, other): ... def __iadd__(self, other): ... def __sub__(self, other): ... def __isub__(self, other): ... def __getitem__(self, index): ... mypy-0.560/typeshed/stdlib/2/email/mime/0000755€tŠÔÚ€2›s®0000000000013215007244024157 5ustar jukkaDROPBOX\Domain Users00000000000000mypy-0.560/typeshed/stdlib/2/email/mime/__init__.pyi0000644€tŠÔÚ€2›s®0000000000013215007212026422 0ustar jukkaDROPBOX\Domain Users00000000000000mypy-0.560/typeshed/stdlib/2/email/mime/application.pyi0000644€tŠÔÚ€2›s®0000000065313215007212027204 0ustar jukkaDROPBOX\Domain Users00000000000000# Stubs for email.mime.application from typing import Callable, Optional, Tuple, Union from email.mime.nonmultipart import MIMENonMultipart _ParamsType = Union[str, None, Tuple[str, Optional[str], str]] class MIMEApplication(MIMENonMultipart): def __init__(self, _data: bytes, _subtype: str = ..., _encoder: Callable[[MIMEApplication], None] = ..., **_params: _ParamsType) -> None: ... mypy-0.560/typeshed/stdlib/2/email/mime/base.pyi0000644€tŠÔÚ€2›s®0000000026213215007212025607 0ustar jukkaDROPBOX\Domain Users00000000000000# NOTE: This stub is incomplete. # import message # TODO # class MIMEBase(message.Message): class MIMEBase: def __init__(self, _maintype, _subtype, **_params) -> None: ... mypy-0.560/typeshed/stdlib/2/email/mime/multipart.pyi0000644€tŠÔÚ€2›s®0000000023713215007212026720 0ustar jukkaDROPBOX\Domain Users00000000000000from email.mime.base import MIMEBase class MIMEMultipart(MIMEBase): def __init__(self, _subtype=..., boundary=..., _subparts=..., **_params) -> None: ... mypy-0.560/typeshed/stdlib/2/email/mime/nonmultipart.pyi0000644€tŠÔÚ€2›s®0000000015313215007212027430 0ustar jukkaDROPBOX\Domain Users00000000000000from email.mime.base import MIMEBase class MIMENonMultipart(MIMEBase): def attach(self, payload): ... mypy-0.560/typeshed/stdlib/2/email/mime/text.pyi0000644€tŠÔÚ€2›s®0000000023713215007212025663 0ustar jukkaDROPBOX\Domain Users00000000000000from email.mime.nonmultipart import MIMENonMultipart class MIMEText(MIMENonMultipart): def __init__(self, _text, _subtype=..., _charset=...) -> None: ... mypy-0.560/typeshed/stdlib/2/email/MIMEText.pyi0000644€tŠÔÚ€2›s®0000000023713215007212025344 0ustar jukkaDROPBOX\Domain Users00000000000000from email.mime.nonmultipart import MIMENonMultipart class MIMEText(MIMENonMultipart): def __init__(self, _text, _subtype=..., _charset=...) -> None: ... mypy-0.560/typeshed/stdlib/2/email/utils.pyi0000644€tŠÔÚ€2›s®0000000131213215007212025103 0ustar jukkaDROPBOX\Domain Users00000000000000from email._parseaddr import AddressList as _AddressList from email._parseaddr import mktime_tz as mktime_tz from email._parseaddr import parsedate as _parsedate from email._parseaddr import parsedate_tz as _parsedate_tz from quopri import decodestring as _qdecode def formataddr(pair): ... def getaddresses(fieldvalues): ... def formatdate(timeval=None, localtime=False, usegmt=False): ... def make_msgid(idstring=None): ... def parsedate(data): ... def parsedate_tz(data): ... def parseaddr(addr): ... def unquote(str): ... def decode_rfc2231(s): ... def encode_rfc2231(s, charset=None, language=None): ... def decode_params(params): ... def collapse_rfc2231_value(value, errors=..., fallback_charset=...): ... mypy-0.560/typeshed/stdlib/2/encodings/0000755€tŠÔÚ€2›s®0000000000013215007244024112 5ustar jukkaDROPBOX\Domain Users00000000000000mypy-0.560/typeshed/stdlib/2/encodings/__init__.pyi0000644€tŠÔÚ€2›s®0000000013613215007212026367 0ustar jukkaDROPBOX\Domain Users00000000000000import codecs import typing def search_function(encoding: str) -> codecs.CodecInfo: ... mypy-0.560/typeshed/stdlib/2/encodings/utf_8.pyi0000644€tŠÔÚ€2›s®0000000107513215007212025660 0ustar jukkaDROPBOX\Domain Users00000000000000import codecs from typing import Text, Tuple class IncrementalEncoder(codecs.IncrementalEncoder): def encode(self, input: Text, final: bool = ...) -> bytes: ... class IncrementalDecoder(codecs.BufferedIncrementalDecoder): def _buffer_decode(self, input: bytes, errors: str, final: bool) -> Tuple[Text, int]: ... class StreamWriter(codecs.StreamWriter): ... class StreamReader(codecs.StreamReader): ... def getregentry() -> codecs.CodecInfo: ... def encode(input: Text, errors: Text = ...) -> bytes: ... def decode(input: bytes, errors: Text = ...) -> Text: ... mypy-0.560/typeshed/stdlib/2/exceptions.pyi0000644€tŠÔÚ€2›s®0000000512013215007212025036 0ustar jukkaDROPBOX\Domain Users00000000000000from __builtin__ import ArithmeticError as ArithmeticError from __builtin__ import AssertionError as AssertionError from __builtin__ import AttributeError as AttributeError from __builtin__ import BaseException as BaseException from __builtin__ import BufferError as BufferError from __builtin__ import BytesWarning as BytesWarning from __builtin__ import DeprecationWarning as DeprecationWarning from __builtin__ import EOFError as EOFError from __builtin__ import EnvironmentError as EnvironmentError from __builtin__ import Exception as Exception from __builtin__ import FloatingPointError as FloatingPointError from __builtin__ import FutureWarning as FutureWarning from __builtin__ import GeneratorExit as GeneratorExit from __builtin__ import IOError as IOError from __builtin__ import ImportError as ImportError from __builtin__ import ImportWarning as ImportWarning from __builtin__ import IndentationError as IndentationError from __builtin__ import IndexError as IndexError from __builtin__ import KeyError as KeyError from __builtin__ import KeyboardInterrupt as KeyboardInterrupt from __builtin__ import LookupError as LookupError from __builtin__ import MemoryError as MemoryError from __builtin__ import NameError as NameError from __builtin__ import NotImplementedError as NotImplementedError from __builtin__ import OSError as OSError from __builtin__ import OverflowError as OverflowError from __builtin__ import PendingDeprecationWarning as PendingDeprecationWarning from __builtin__ import ReferenceError as ReferenceError from __builtin__ import RuntimeError as RuntimeError from __builtin__ import RuntimeWarning as RuntimeWarning from __builtin__ import StandardError as StandardError from __builtin__ import StopIteration as StopIteration from __builtin__ import SyntaxError as SyntaxError from __builtin__ import SyntaxWarning as SyntaxWarning from __builtin__ import SystemError as SystemError from __builtin__ import SystemExit as SystemExit from __builtin__ import TabError as TabError from __builtin__ import TypeError as TypeError from __builtin__ import UnboundLocalError as UnboundLocalError from __builtin__ import UnicodeError as UnicodeError from __builtin__ import UnicodeDecodeError as UnicodeDecodeError from __builtin__ import UnicodeEncodeError as UnicodeEncodeError from __builtin__ import UnicodeTranslateError as UnicodeTranslateError from __builtin__ import UnicodeWarning as UnicodeWarning from __builtin__ import UserWarning as UserWarning from __builtin__ import ValueError as ValueError from __builtin__ import Warning as Warning from __builtin__ import ZeroDivisionError as ZeroDivisionError mypy-0.560/typeshed/stdlib/2/fcntl.pyi0000644€tŠÔÚ€2›s®0000000473713215007212024000 0ustar jukkaDROPBOX\Domain Users00000000000000from typing import Any, Union, IO import io FASYNC = ... # type: int FD_CLOEXEC = ... # type: int DN_ACCESS = ... # type: int DN_ATTRIB = ... # type: int DN_CREATE = ... # type: int DN_DELETE = ... # type: int DN_MODIFY = ... # type: int DN_MULTISHOT = ... # type: int DN_RENAME = ... # type: int F_DUPFD = ... # type: int F_EXLCK = ... # type: int F_GETFD = ... # type: int F_GETFL = ... # type: int F_GETLEASE = ... # type: int F_GETLK = ... # type: int F_GETLK64 = ... # type: int F_GETOWN = ... # type: int F_GETSIG = ... # type: int F_NOTIFY = ... # type: int F_RDLCK = ... # type: int F_SETFD = ... # type: int F_SETFL = ... # type: int F_SETLEASE = ... # type: int F_SETLK = ... # type: int F_SETLK64 = ... # type: int F_SETLKW = ... # type: int F_SETLKW64 = ... # type: int F_SETOWN = ... # type: int F_SETSIG = ... # type: int F_SHLCK = ... # type: int F_UNLCK = ... # type: int F_WRLCK = ... # type: int I_ATMARK = ... # type: int I_CANPUT = ... # type: int I_CKBAND = ... # type: int I_FDINSERT = ... # type: int I_FIND = ... # type: int I_FLUSH = ... # type: int I_FLUSHBAND = ... # type: int I_GETBAND = ... # type: int I_GETCLTIME = ... # type: int I_GETSIG = ... # type: int I_GRDOPT = ... # type: int I_GWROPT = ... # type: int I_LINK = ... # type: int I_LIST = ... # type: int I_LOOK = ... # type: int I_NREAD = ... # type: int I_PEEK = ... # type: int I_PLINK = ... # type: int I_POP = ... # type: int I_PUNLINK = ... # type: int I_PUSH = ... # type: int I_RECVFD = ... # type: int I_SENDFD = ... # type: int I_SETCLTIME = ... # type: int I_SETSIG = ... # type: int I_SRDOPT = ... # type: int I_STR = ... # type: int I_SWROPT = ... # type: int I_UNLINK = ... # type: int LOCK_EX = ... # type: int LOCK_MAND = ... # type: int LOCK_NB = ... # type: int LOCK_READ = ... # type: int LOCK_RW = ... # type: int LOCK_SH = ... # type: int LOCK_UN = ... # type: int LOCK_WRITE = ... # type: int _ANYFILE = Union[int, IO] # TODO All these return either int or bytes depending on the value of # cmd (not on the type of arg). def fcntl(fd: _ANYFILE, op: int, arg: Union[int, bytes] = ...) -> Any: ... # TODO: arg: int or read-only buffer interface or read-write buffer interface def ioctl(fd: _ANYFILE, op: int, arg: Union[int, bytes] = ..., mutate_flag: bool = ...) -> Any: ... def flock(fd: _ANYFILE, op: int) -> None: ... def lockf(fd: _ANYFILE, op: int, length: int = ..., start: int = ..., whence: int = ...) -> Any: ... mypy-0.560/typeshed/stdlib/2/fnmatch.pyi0000644€tŠÔÚ€2›s®0000000053413215007212024301 0ustar jukkaDROPBOX\Domain Users00000000000000from typing import AnyStr, Iterable, List, Union _EitherStr = Union[str, unicode] def fnmatch(filename: _EitherStr, pattern: _EitherStr) -> bool: ... def fnmatchcase(filename: _EitherStr, pattern: _EitherStr) -> bool: ... def filter(names: Iterable[AnyStr], pattern: _EitherStr) -> List[AnyStr]: ... def translate(pattern: AnyStr) -> AnyStr: ... mypy-0.560/typeshed/stdlib/2/functools.pyi0000644€tŠÔÚ€2›s®0000000251113215007212024672 0ustar jukkaDROPBOX\Domain Users00000000000000# Stubs for functools (Python 2.7) # NOTE: These are incomplete! from abc import ABCMeta, abstractmethod from typing import Any, Callable, Generic, Dict, Iterable, Optional, Sequence, Tuple, TypeVar, overload from collections import namedtuple _AnyCallable = Callable[..., Any] _T = TypeVar("_T") _S = TypeVar("_S") @overload def reduce(function: Callable[[_T, _T], _T], sequence: Iterable[_T]) -> _T: ... @overload def reduce(function: Callable[[_T, _S], _T], sequence: Iterable[_S], initial: _T) -> _T: ... WRAPPER_ASSIGNMENTS = ... # type: Sequence[str] WRAPPER_UPDATES = ... # type: Sequence[str] def update_wrapper(wrapper: _AnyCallable, wrapped: _AnyCallable, assigned: Sequence[str] = ..., updated: Sequence[str] = ...) -> _AnyCallable: ... def wraps(wrapped: _AnyCallable, assigned: Sequence[str] = ..., updated: Sequence[str] = ...) -> Callable[[_AnyCallable], _AnyCallable]: ... def total_ordering(cls: type) -> type: ... def cmp_to_key(mycmp: Callable[[_T, _T], int]) -> Callable[[_T], Any]: ... class partial(Generic[_T]): func = ... # Callable[..., _T] args = ... # type: Tuple[Any, ...] keywords = ... # type: Dict[str, Any] def __init__(self, func: Callable[..., _T], *args: Any, **kwargs: Any) -> None: ... def __call__(self, *args: Any, **kwargs: Any) -> _T: ... mypy-0.560/typeshed/stdlib/2/future_builtins.pyi0000644€tŠÔÚ€2›s®0000000034213215007212026101 0ustar jukkaDROPBOX\Domain Users00000000000000from typing import Any from itertools import ifilter as filter from itertools import imap as map from itertools import izip as zip def ascii(obj: Any) -> str: ... def hex(x: int) -> str: ... def oct(x: int) -> str: ... mypy-0.560/typeshed/stdlib/2/gc.pyi0000644€tŠÔÚ€2›s®0000000160313215007212023250 0ustar jukkaDROPBOX\Domain Users00000000000000# Stubs for gc from typing import Any, List, Tuple def enable() -> None: ... def disable() -> None: ... def isenabled() -> bool: ... def collect(generation: int = ...) -> int: ... def set_debug(flags: int) -> None: ... def get_debug() -> int: ... def get_objects() -> List[Any]: ... def set_threshold(threshold0: int, threshold1: int = ..., threshold2: int = ...) -> None: ... def get_count() -> Tuple[int, int, int]: ... def get_threshold() -> Tuple[int, int, int]: ... def get_referrers(*objs: Any) -> List[Any]: ... def get_referents(*objs: Any) -> List[Any]: ... def is_tracked(obj: Any) -> bool: ... garbage = ... # type: List[Any] DEBUG_STATS = ... # type: int DEBUG_COLLECTABLE = ... # type: int DEBUG_UNCOLLECTABLE = ... # type: int DEBUG_INSTANCES = ... # type: int DEBUG_OBJECTS = ... # type: int DEBUG_SAVEALL = ... # type: int DEBUG_LEAK = ... # type: int mypy-0.560/typeshed/stdlib/2/genericpath.pyi0000644€tŠÔÚ€2›s®0000000060313215007212025147 0ustar jukkaDROPBOX\Domain Users00000000000000from typing import AnyStr, List class _unicode: ... def commonprefix(list: List[AnyStr]) -> AnyStr: ... def exists(path: unicode) -> bool: ... def getatime(path: unicode) -> float: ... def getmtime(path: unicode) -> float: ... def getctime(path: unicode) -> float: ... def getsize(path: unicode) -> int: ... def isfile(path: unicode) -> bool: ... def isdir(path: unicode) -> bool: ... mypy-0.560/typeshed/stdlib/2/getopt.pyi0000644€tŠÔÚ€2›s®0000000110713215007212024160 0ustar jukkaDROPBOX\Domain Users00000000000000from typing import List, Tuple class GetoptError(Exception): opt = ... # type: str msg = ... # type: str def __init__(self, msg: str, opt: str=...) -> None: ... def __str__(self) -> str: ... error = GetoptError def getopt(args: List[str], shortopts: str, longopts: List[str]=...) -> Tuple[List[Tuple[str, str]], List[str]]: ... def gnu_getopt(args: List[str], shortopts: str, longopts: List[str]=...) -> Tuple[List[Tuple[str, str]], List[str]]: ... mypy-0.560/typeshed/stdlib/2/getpass.pyi0000644€tŠÔÚ€2›s®0000000030013215007212024316 0ustar jukkaDROPBOX\Domain Users00000000000000# Stubs for getpass (Python 2) from typing import Any, IO class GetPassWarning(UserWarning): ... def getpass(prompt: str = ..., stream: IO[Any] = ...) -> str: ... def getuser() -> str: ... mypy-0.560/typeshed/stdlib/2/gettext.pyi0000644€tŠÔÚ€2›s®0000000414013215007212024342 0ustar jukkaDROPBOX\Domain Users00000000000000# TODO(MichalPokorny): better types from typing import Any, IO, List, Optional, Union def bindtextdomain(domain: str, localedir: str = ...) -> str: ... def bind_textdomain_codeset(domain: str, codeset: str = ...) -> str: ... def textdomain(domain: str = ...) -> str: ... def gettext(message: str) -> str: ... def lgettext(message: str) -> str: ... def dgettext(domain: str, message: str) -> str: ... def ldgettext(domain: str, message: str) -> str: ... def ngettext(singular: str, plural: str, n: int) -> str: ... def lngettext(singular: str, plural: str, n: int) -> str: ... def dngettext(domain: str, singular: str, plural: str, n: int) -> str: ... def ldngettext(domain: str, singular: str, plural: str, n: int) -> str: ... class NullTranslations(object): def __init__(self, fp: IO[str] = ...) -> None: ... def _parse(self, fp: IO[str]) -> None: ... def add_fallback(self, fallback: NullTranslations) -> None: ... def gettext(self, message: str) -> str: ... def lgettext(self, message: str) -> str: ... def ugettext(self, message: Union[str, unicode]) -> unicode: ... def ngettext(self, singular: str, plural: str, n: int) -> str: ... def lngettext(self, singular: str, plural: str, n: int) -> str: ... def ungettext(self, singular: Union[str, unicode], plural: Union[str, unicode], n: int) -> unicode: ... def info(self) -> Any: ... def charset(self) -> Any: ... def output_charset(self) -> Any: ... def set_output_charset(self, charset: Any) -> None: ... def install(self, unicode: bool = ..., names: Any = ...) -> None: ... class GNUTranslations(NullTranslations): LE_MAGIC = ... # type: int BE_MAGIC = ... # type: int def find(domain: str, localedir: str = ..., languages: List[str] = ..., all: Any = ...) -> Optional[Union[str, List[str]]]: ... def translation(domain: str, localedir: str = ..., languages: List[str] = ..., class_: Any = ..., fallback: Any = ..., codeset: Any = ...) -> NullTranslations: ... def install(domain: str, localedir: str = ..., unicode: Any = ..., codeset: Any = ..., names: Any = ...) -> None: ... mypy-0.560/typeshed/stdlib/2/glob.pyi0000644€tŠÔÚ€2›s®0000000046413215007212023606 0ustar jukkaDROPBOX\Domain Users00000000000000from typing import List, Iterator, Union, AnyStr def glob(pathname: AnyStr) -> List[AnyStr]: ... def iglob(pathname: AnyStr) -> Iterator[AnyStr]: ... def glob1(dirname: Union[str, unicode], pattern: AnyStr) -> List[AnyStr]: ... def glob0(dirname: Union[str, unicode], basename: AnyStr) -> List[AnyStr]: ... mypy-0.560/typeshed/stdlib/2/gzip.pyi0000644€tŠÔÚ€2›s®0000000225413215007212023633 0ustar jukkaDROPBOX\Domain Users00000000000000from typing import Any, IO, Text import io class GzipFile(io.BufferedIOBase): myfileobj = ... # type: Any max_read_chunk = ... # type: Any mode = ... # type: Any extrabuf = ... # type: Any extrasize = ... # type: Any extrastart = ... # type: Any name = ... # type: Any min_readsize = ... # type: Any compress = ... # type: Any fileobj = ... # type: Any offset = ... # type: Any mtime = ... # type: Any def __init__(self, filename: str = ..., mode: Text = ..., compresslevel: int = ..., fileobj: IO[str] = ..., mtime: float = ...) -> None: ... @property def filename(self): ... size = ... # type: Any crc = ... # type: Any def write(self, data): ... def read(self, size=...): ... @property def closed(self): ... def close(self): ... def flush(self, zlib_mode=...): ... def fileno(self): ... def rewind(self): ... def readable(self): ... def writable(self): ... def seekable(self): ... def seek(self, offset, whence=...): ... def readline(self, size=...): ... def open(filename: str, mode: Text = ..., compresslevel: int = ...) -> GzipFile: ... mypy-0.560/typeshed/stdlib/2/hashlib.pyi0000644€tŠÔÚ€2›s®0000000204113215007212024266 0ustar jukkaDROPBOX\Domain Users00000000000000# Stubs for hashlib (Python 2) from typing import Tuple, Union _DataType = Union[str, unicode, bytearray, buffer, memoryview] class _hash(object): # This is not actually in the module namespace. name = ... # type: str block_size = 0 digest_size = 0 digestsize = 0 def __init__(self, arg: _DataType = ...) -> None: ... def update(self, arg: _DataType) -> None: ... def digest(self) -> str: ... def hexdigest(self) -> str: ... def copy(self) -> _hash: ... def new(name: str, data: str = ...) -> _hash: ... def md5(s: _DataType = ...) -> _hash: ... def sha1(s: _DataType = ...) -> _hash: ... def sha224(s: _DataType = ...) -> _hash: ... def sha256(s: _DataType = ...) -> _hash: ... def sha384(s: _DataType = ...) -> _hash: ... def sha512(s: _DataType = ...) -> _hash: ... algorithms = ... # type: Tuple[str, ...] algorithms_guaranteed = ... # type: Tuple[str, ...] algorithms_available = ... # type: Tuple[str, ...] def pbkdf2_hmac(name: str, password: str, salt: str, rounds: int, dklen: int = ...) -> str: ... mypy-0.560/typeshed/stdlib/2/heapq.pyi0000644€tŠÔÚ€2›s®0000000123513215007212023756 0ustar jukkaDROPBOX\Domain Users00000000000000from typing import TypeVar, List, Iterable, Any, Callable, Optional _T = TypeVar('_T') def cmp_lt(x, y) -> bool: ... def heappush(heap: List[_T], item: _T) -> None: ... def heappop(heap: List[_T]) -> _T: raise IndexError() # if heap is empty def heappushpop(heap: List[_T], item: _T) -> _T: ... def heapify(x: List[_T]) -> None: ... def heapreplace(heap: List[_T], item: _T) -> _T: raise IndexError() # if heap is empty def merge(*iterables: Iterable[_T]) -> Iterable[_T]: ... def nlargest(n: int, iterable: Iterable[_T], key: Optional[Callable[[_T], Any]] = ...) -> List[_T]: ... def nsmallest(n: int, iterable: Iterable[_T]) -> List[_T]: ... mypy-0.560/typeshed/stdlib/2/htmlentitydefs.pyi0000644€tŠÔÚ€2›s®0000000025513215007212025724 0ustar jukkaDROPBOX\Domain Users00000000000000from typing import Any, Mapping name2codepoint = ... # type: Mapping[str, int] codepoint2name = ... # type: Mapping[int, str] entitydefs = ... # type: Mapping[str, str] mypy-0.560/typeshed/stdlib/2/HTMLParser.pyi0000644€tŠÔÚ€2›s®0000000212513215007212024600 0ustar jukkaDROPBOX\Domain Users00000000000000from typing import List, Tuple, AnyStr from markupbase import ParserBase class HTMLParser(ParserBase): def __init__(self) -> None: ... def feed(self, feed: AnyStr) -> None: ... def close(self) -> None: ... def reset(self) -> None: ... def get_starttag_text(self) -> AnyStr: ... def set_cdata_mode(self, AnyStr) -> None: ... def clear_cdata_mode(self) -> None: ... def handle_startendtag(self, tag: AnyStr, attrs: List[Tuple[AnyStr, AnyStr]]): ... def handle_starttag(self, tag: AnyStr, attrs: List[Tuple[AnyStr, AnyStr]]): ... def handle_endtag(self, tag: AnyStr): ... def handle_charref(self, name: AnyStr): ... def handle_entityref(self, name: AnyStr): ... def handle_data(self, data: AnyStr): ... def handle_comment(self, data: AnyStr): ... def handle_decl(self, decl: AnyStr): ... def handle_pi(self, data: AnyStr): ... def unknown_decl(self, data: AnyStr): ... def unescape(self, s: AnyStr) -> AnyStr: ... class HTMLParseError(Exception): msg = ... # type: str lineno = ... # type: int offset = ... # type: int mypy-0.560/typeshed/stdlib/2/httplib.pyi0000644€tŠÔÚ€2›s®0000001364513215007212024336 0ustar jukkaDROPBOX\Domain Users00000000000000# Stubs for httplib (Python 2) # # Generated by stubgen and manually massaged a bit. # Needs lots more work! from typing import Any, Dict import mimetools class HTTPMessage(mimetools.Message): def addheader(self, key: str, value: str) -> None: ... def addcontinue(self, key: str, more: str) -> None: ... dict = ... # type: Dict[str, str] unixfrom = ... # type: str headers = ... # type: Any status = ... # type: str seekable = ... # type: bool def readheaders(self) -> None: ... class HTTPResponse: fp = ... # type: Any debuglevel = ... # type: Any strict = ... # type: Any msg = ... # type: Any version = ... # type: Any status = ... # type: Any reason = ... # type: Any chunked = ... # type: Any chunk_left = ... # type: Any length = ... # type: Any will_close = ... # type: Any def __init__(self, sock, debuglevel=0, strict=0, method=None, buffering: bool=...) -> None: ... def begin(self): ... def close(self): ... def isclosed(self): ... def read(self, amt=None): ... def fileno(self): ... def getheader(self, name, default=None): ... def getheaders(self): ... class HTTPConnection: response_class = ... # type: Any default_port = ... # type: Any auto_open = ... # type: Any debuglevel = ... # type: Any strict = ... # type: Any timeout = ... # type: Any source_address = ... # type: Any sock = ... # type: Any def __init__(self, host, port=None, strict=None, timeout=..., source_address=None) -> None: ... def set_tunnel(self, host, port=None, headers=None): ... def set_debuglevel(self, level): ... def connect(self): ... def close(self): ... def send(self, data): ... def putrequest(self, method, url, skip_host=0, skip_accept_encoding=0): ... def putheader(self, header, *values): ... def endheaders(self, message_body=None): ... def request(self, method, url, body=None, headers=...): ... def getresponse(self, buffering: bool=...): ... class HTTP: debuglevel = ... # type: Any def __init__(self, host: str=..., port=None, strict=None) -> None: ... def connect(self, host=None, port=None): ... def getfile(self): ... file = ... # type: Any headers = ... # type: Any def getreply(self, buffering: bool=...): ... def close(self): ... class HTTPSConnection(HTTPConnection): default_port = ... # type: Any key_file = ... # type: Any cert_file = ... # type: Any def __init__(self, host, port=None, key_file=None, cert_file=None, strict=None, timeout=..., source_address=None, context=None) -> None: ... sock = ... # type: Any def connect(self): ... class HTTPS(HTTP): key_file = ... # type: Any cert_file = ... # type: Any def __init__(self, host: str=..., port=None, key_file=None, cert_file=None, strict=None, context=None) -> None: ... class HTTPException(Exception): ... class NotConnected(HTTPException): ... class InvalidURL(HTTPException): ... class UnknownProtocol(HTTPException): args = ... # type: Any version = ... # type: Any def __init__(self, version) -> None: ... class UnknownTransferEncoding(HTTPException): ... class UnimplementedFileMode(HTTPException): ... class IncompleteRead(HTTPException): args = ... # type: Any partial = ... # type: Any expected = ... # type: Any def __init__(self, partial, expected=None) -> None: ... class ImproperConnectionState(HTTPException): ... class CannotSendRequest(ImproperConnectionState): ... class CannotSendHeader(ImproperConnectionState): ... class ResponseNotReady(ImproperConnectionState): ... class BadStatusLine(HTTPException): args = ... # type: Any line = ... # type: Any def __init__(self, line) -> None: ... class LineTooLong(HTTPException): def __init__(self, line_type) -> None: ... error = ... # type: Any class LineAndFileWrapper: def __init__(self, line, file) -> None: ... def __getattr__(self, attr): ... def read(self, amt=None): ... def readline(self): ... def readlines(self, size=None): ... # Constants responses = ... # type: Dict[int, str] HTTP_PORT = ... # type: int HTTPS_PORT = ... # type: int # status codes # informational CONTINUE = ... # type: int SWITCHING_PROTOCOLS = ... # type: int PROCESSING = ... # type: int # successful OK = ... # type: int CREATED = ... # type: int ACCEPTED = ... # type: int NON_AUTHORITATIVE_INFORMATION = ... # type: int NO_CONTENT = ... # type: int RESET_CONTENT = ... # type: int PARTIAL_CONTENT = ... # type: int MULTI_STATUS = ... # type: int IM_USED = ... # type: int # redirection MULTIPLE_CHOICES = ... # type: int MOVED_PERMANENTLY = ... # type: int FOUND = ... # type: int SEE_OTHER = ... # type: int NOT_MODIFIED = ... # type: int USE_PROXY = ... # type: int TEMPORARY_REDIRECT = ... # type: int # client error BAD_REQUEST = ... # type: int UNAUTHORIZED = ... # type: int PAYMENT_REQUIRED = ... # type: int FORBIDDEN = ... # type: int NOT_FOUND = ... # type: int METHOD_NOT_ALLOWED = ... # type: int NOT_ACCEPTABLE = ... # type: int PROXY_AUTHENTICATION_REQUIRED = ... # type: int REQUEST_TIMEOUT = ... # type: int CONFLICT = ... # type: int GONE = ... # type: int LENGTH_REQUIRED = ... # type: int PRECONDITION_FAILED = ... # type: int REQUEST_ENTITY_TOO_LARGE = ... # type: int REQUEST_URI_TOO_LONG = ... # type: int UNSUPPORTED_MEDIA_TYPE = ... # type: int REQUESTED_RANGE_NOT_SATISFIABLE = ... # type: int EXPECTATION_FAILED = ... # type: int UNPROCESSABLE_ENTITY = ... # type: int LOCKED = ... # type: int FAILED_DEPENDENCY = ... # type: int UPGRADE_REQUIRED = ... # type: int # server error INTERNAL_SERVER_ERROR = ... # type: int NOT_IMPLEMENTED = ... # type: int BAD_GATEWAY = ... # type: int SERVICE_UNAVAILABLE = ... # type: int GATEWAY_TIMEOUT = ... # type: int HTTP_VERSION_NOT_SUPPORTED = ... # type: int INSUFFICIENT_STORAGE = ... # type: int NOT_EXTENDED = ... # type: int mypy-0.560/typeshed/stdlib/2/imp.pyi0000644€tŠÔÚ€2›s®0000000266513215007212023455 0ustar jukkaDROPBOX\Domain Users00000000000000"""Stubs for the 'imp' module.""" from typing import List, Optional, Tuple, Iterable, IO, Any import types C_BUILTIN = ... # type: int C_EXTENSION = ... # type: int IMP_HOOK = ... # type: int PKG_DIRECTORY = ... # type: int PY_CODERESOURCE = ... # type: int PY_COMPILED = ... # type: int PY_FROZEN = ... # type: int PY_RESOURCE = ... # type: int PY_SOURCE = ... # type: int SEARCH_ERROR = ... # type: int def acquire_lock() -> None: ... def find_module(name: str, path: Iterable[str] = ...) -> Optional[Tuple[str, str, Tuple[str, str, int]]]: ... def get_magic() -> str: ... def get_suffixes() -> List[Tuple[str, str, int]]: ... def init_builtin(name: str) -> types.ModuleType: ... def init_frozen(name: str) -> types.ModuleType: ... def is_builtin(name: str) -> int: ... def is_frozen(name: str) -> bool: ... def load_compiled(name: str, pathname: str, file: IO[Any] = ...) -> types.ModuleType: ... def load_dynamic(name: str, pathname: str, file: IO[Any] = ...) -> types.ModuleType: ... def load_module(name: str, file: str, pathname: str, description: Tuple[str, str, int]) -> types.ModuleType: ... def load_source(name: str, pathname: str, file: IO[Any] = ...) -> types.ModuleType: ... def lock_held() -> bool: ... def new_module(name: str) -> types.ModuleType: ... def release_lock() -> None: ... class NullImporter: def __init__(self, path_string: str) -> None: ... def find_module(self, fullname: str, path: str = ...) -> None: ... mypy-0.560/typeshed/stdlib/2/importlib.pyi0000644€tŠÔÚ€2›s®0000000013013215007212024652 0ustar jukkaDROPBOX\Domain Users00000000000000import types def import_module(name: str, package: str = ...) -> types.ModuleType: ... mypy-0.560/typeshed/stdlib/2/inspect.pyi0000644€tŠÔÚ€2›s®0000000746313215007212024336 0ustar jukkaDROPBOX\Domain Users00000000000000from types import TracebackType, FrameType, ModuleType from typing import Any, Dict, Callable, List, Optional, Tuple, Union, NamedTuple, Type # Types and members ModuleInfo = NamedTuple('ModuleInfo', [('name', str), ('suffix', str), ('mode', str), ('module_type', int), ]) def getmembers( object: object, predicate: Callable[[Any], bool] = ... ) -> List[Tuple[str, Any]]: ... def getmoduleinfo(path: str) -> Optional[ModuleInfo]: ... def getmodulename(path: str) -> Optional[str]: ... def ismodule(object: object) -> bool: ... def isclass(object: object) -> bool: ... def ismethod(object: object) -> bool: ... def isfunction(object: object) -> bool: ... def isgeneratorfunction(object: object) -> bool: ... def isgenerator(object: object) -> bool: ... def istraceback(object: object) -> bool: ... def isframe(object: object) -> bool: ... def iscode(object: object) -> bool: ... def isbuiltin(object: object) -> bool: ... def isroutine(object: object) -> bool: ... def isabstract(object: object) -> bool: ... def ismethoddescriptor(object: object) -> bool: ... def isdatadescriptor(object: object) -> bool: ... def isgetsetdescriptor(object: object) -> bool: ... def ismemberdescriptor(object: object) -> bool: ... # Retrieving source code def getdoc(object: object) -> str: ... def getcomments(object: object) -> str: ... def getfile(object: object) -> str: ... def getmodule(object: object) -> ModuleType: ... def getsourcefile(object: object) -> str: ... # TODO restrict to "module, class, method, function, traceback, frame, # or code object" def getsourcelines(object: object) -> Tuple[List[str], int]: ... # TODO restrict to "a module, class, method, function, traceback, frame, # or code object" def getsource(object: object) -> str: ... def cleandoc(doc: str) -> str: ... # Classes and functions def getclasstree(classes: List[type], unique: bool = ...) -> List[ Union[Tuple[type, Tuple[type, ...]], list]]: ... ArgSpec = NamedTuple('ArgSpec', [('args', List[str]), ('varargs', Optional[str]), ('keywords', Optional[str]), ('defaults', tuple), ]) ArgInfo = NamedTuple('ArgInfo', [('args', List[str]), ('varargs', Optional[str]), ('keywords', Optional[str]), ('locals', Dict[str, Any]), ]) def getargspec(func: object) -> ArgSpec: ... def getargvalues(frame: FrameType) -> ArgInfo: ... def formatargspec(args, varargs=..., varkw=..., defaults=..., formatarg=..., formatvarargs=..., formatvarkw=..., formatvalue=..., join=...) -> str: ... def formatargvalues(args, varargs=..., varkw=..., defaults=..., formatarg=..., formatvarargs=..., formatvarkw=..., formatvalue=..., join=...) -> str: ... def getmro(cls: type) -> Tuple[type, ...]: ... def getcallargs(func, *args, **kwds) -> Dict[str, Any]: ... # The interpreter stack Traceback = NamedTuple( 'Traceback', [ ('filename', str), ('lineno', int), ('function', str), ('code_context', List[str]), ('index', int), ] ) _FrameInfo = Tuple[FrameType, str, int, str, List[str], int] def getouterframes(frame: FrameType, context: int = ...) -> List[_FrameInfo]: ... def getframeinfo(frame: Union[FrameType, TracebackType], context: int = ...) -> Traceback: ... def getinnerframes(traceback: TracebackType, context: int = ...) -> List[_FrameInfo]: ... def currentframe(depth: int = ...) -> FrameType: ... def stack(context: int = ...) -> List[_FrameInfo]: ... def trace(context: int = ...) -> List[_FrameInfo]: ... mypy-0.560/typeshed/stdlib/2/io.pyi0000644€tŠÔÚ€2›s®0000000273013215007212023270 0ustar jukkaDROPBOX\Domain Users00000000000000# Stubs for io # Based on https://docs.python.org/2/library/io.html # Only a subset of functionality is included. from typing import List, BinaryIO, TextIO, IO, overload, Iterator, Iterable, Any, Union, Optional import _io from _io import BlockingIOError as BlockingIOError from _io import BufferedRWPair as BufferedRWPair from _io import BufferedRandom as BufferedRandom from _io import BufferedReader as BufferedReader from _io import BufferedWriter as BufferedWriter from _io import BytesIO as BytesIO from _io import DEFAULT_BUFFER_SIZE as DEFAULT_BUFFER_SIZE from _io import FileIO as FileIO from _io import IncrementalNewlineDecoder as IncrementalNewlineDecoder from _io import StringIO as StringIO from _io import TextIOWrapper as TextIOWrapper from _io import UnsupportedOperation as UnsupportedOperation from _io import open as open def _OpenWrapper(file: Union[str, unicode, int], mode: unicode = ..., buffering: int = ..., encoding: unicode = ..., errors: unicode = ..., newline: unicode = ..., closefd: bool = ...) -> IO[Any]: ... SEEK_SET = ... # type: int SEEK_CUR = ... # type: int SEEK_END = ... # type: int class IOBase(_io._IOBase): ... class RawIOBase(_io._RawIOBase, IOBase): pass class BufferedIOBase(_io._BufferedIOBase, IOBase): pass # Note: In the actual io.py, TextIOBase subclasses IOBase. # (Which we don't do here because we don't want to subclass both TextIO and BinaryIO.) class TextIOBase(_io._TextIOBase): pass mypy-0.560/typeshed/stdlib/2/itertools.pyi0000644€tŠÔÚ€2›s®0000001247713215007212024716 0ustar jukkaDROPBOX\Domain Users00000000000000# Stubs for itertools # Based on https://docs.python.org/2/library/itertools.html from typing import (Iterator, TypeVar, Iterable, overload, Any, Callable, Tuple, Union, Sequence, Generic, Optional) _T = TypeVar('_T') _S = TypeVar('_S') def count(start: int = ..., step: int = ...) -> Iterator[int]: ... # more general types? def cycle(iterable: Iterable[_T]) -> Iterator[_T]: ... def repeat(object: _T, times: int = ...) -> Iterator[_T]: ... def accumulate(iterable: Iterable[_T]) -> Iterator[_T]: ... class chain(Iterator[_T], Generic[_T]): def __init__(self, *iterables: Iterable[_T]) -> None: ... def next(self) -> _T: ... def __iter__(self) -> Iterator[_T]: ... @staticmethod def from_iterable(iterable: Iterable[Iterable[_S]]) -> Iterator[_S]: ... def compress(data: Iterable[_T], selectors: Iterable[Any]) -> Iterator[_T]: ... def dropwhile(predicate: Callable[[_T], Any], iterable: Iterable[_T]) -> Iterator[_T]: ... def ifilter(predicate: Optional[Callable[[_T], Any]], iterable: Iterable[_T]) -> Iterator[_T]: ... def ifilterfalse(predicate: Optional[Callable[[_T], Any]], iterable: Iterable[_T]) -> Iterator[_T]: ... @overload def groupby(iterable: Iterable[_T]) -> Iterator[Tuple[_T, Iterator[_T]]]: ... @overload def groupby(iterable: Iterable[_T], key: Callable[[_T], _S]) -> Iterator[Tuple[_S, Iterator[_T]]]: ... @overload def islice(iterable: Iterable[_T], stop: int) -> Iterator[_T]: ... @overload def islice(iterable: Iterable[_T], start: Optional[int], stop: Optional[int], step: int = ...) -> Iterator[_T]: ... _T1 = TypeVar('_T1') _T2 = TypeVar('_T2') _T3 = TypeVar('_T3') _T4 = TypeVar('_T4') _T5 = TypeVar('_T5') _T6 = TypeVar('_T6') @overload def imap(func: Callable[[_T1], _S], iter1: Iterable[_T1]) -> Iterator[_S]: ... @overload def imap(func: Callable[[_T1, _T2], _S], iter1: Iterable[_T1], iter2: Iterable[_T2]) -> Iterator[_S]: ... # TODO more than two iterables def starmap(func: Any, iterable: Iterable[Any]) -> Iterator[Any]: ... def takewhile(predicate: Callable[[_T], Any], iterable: Iterable[_T]) -> Iterator[_T]: ... def tee(iterable: Iterable[_T], n: int = ...) -> Tuple[Iterator[_T], ...]: ... @overload def izip(iter1: Iterable[_T1]) -> Iterator[Tuple[_T1]]: ... @overload def izip(iter1: Iterable[_T1], iter2: Iterable[_T2]) -> Iterator[Tuple[_T1, _T2]]: ... @overload def izip(iter1: Iterable[_T1], iter2: Iterable[_T2], iter3: Iterable[_T3]) -> Iterator[Tuple[_T1, _T2, _T3]]: ... @overload def izip(iter1: Iterable[_T1], iter2: Iterable[_T2], iter3: Iterable[_T3], iter4: Iterable[_T4]) -> Iterator[Tuple[_T1, _T2, _T3, _T4]]: ... @overload def izip(iter1: Iterable[_T1], iter2: Iterable[_T2], iter3: Iterable[_T3], iter4: Iterable[_T4], iter5: Iterable[_T5]) -> Iterator[Tuple[_T1, _T2, _T3, _T4, _T5]]: ... @overload def izip(iter1: Iterable[_T1], iter2: Iterable[_T2], iter3: Iterable[_T3], iter4: Iterable[_T4], iter5: Iterable[_T5], iter6: Iterable[_T6]) -> Iterator[Tuple[_T1, _T2, _T3, _T4, _T5, _T6]]: ... @overload def izip(iter1: Iterable[Any], iter2: Iterable[Any], iter3: Iterable[Any], iter4: Iterable[Any], iter5: Iterable[Any], iter6: Iterable[Any], iter7: Iterable[Any], *iterables: Iterable[Any]) -> Iterator[Tuple[Any, ...]]: ... def izip_longest(*p: Iterable[Any], fillvalue: Any = ...) -> Iterator[Any]: ... @overload def product(iter1: Iterable[_T1]) -> Iterator[Tuple[_T1]]: ... @overload def product(iter1: Iterable[_T1], iter2: Iterable[_T2]) -> Iterator[Tuple[_T1, _T2]]: ... @overload def product(iter1: Iterable[_T1], iter2: Iterable[_T2], iter3: Iterable[_T3]) -> Iterator[Tuple[_T1, _T2, _T3]]: ... @overload def product(iter1: Iterable[_T1], iter2: Iterable[_T2], iter3: Iterable[_T3], iter4: Iterable[_T4]) -> Iterator[Tuple[_T1, _T2, _T3, _T4]]: ... @overload def product(iter1: Iterable[_T1], iter2: Iterable[_T2], iter3: Iterable[_T3], iter4: Iterable[_T4], iter5: Iterable[_T5]) -> Iterator[Tuple[_T1, _T2, _T3, _T4, _T5]]: ... @overload def product(iter1: Iterable[_T1], iter2: Iterable[_T2], iter3: Iterable[_T3], iter4: Iterable[_T4], iter5: Iterable[_T5], iter6: Iterable[_T6]) -> Iterator[Tuple[_T1, _T2, _T3, _T4, _T5, _T6]]: ... @overload def product(iter1: Iterable[Any], iter2: Iterable[Any], iter3: Iterable[Any], iter4: Iterable[Any], iter5: Iterable[Any], iter6: Iterable[Any], iter7: Iterable[Any], *iterables: Iterable) -> Iterator[Tuple]: ... @overload def product(*iter: Iterable[_T], repeat: int) -> Iterator[Tuple[_T, ...]]: ... def permutations(iterable: Iterable[_T], r: int = ...) -> Iterator[Sequence[_T]]: ... def combinations(iterable: Iterable[_T], r: int) -> Iterator[Sequence[_T]]: ... def combinations_with_replacement(iterable: Iterable[_T], r: int) -> Iterator[Sequence[_T]]: ... mypy-0.560/typeshed/stdlib/2/json.pyi0000644€tŠÔÚ€2›s®0000000635413215007212023640 0ustar jukkaDROPBOX\Domain Users00000000000000from typing import Any, IO, Optional, Tuple, Callable, Dict, List, Union, Text class JSONDecodeError(ValueError): def dumps(self, obj: Any) -> str: ... def dump(self, obj: Any, fp: IO[str], *args: Any, **kwds: Any) -> None: ... def loads(self, s: str) -> Any: ... def load(self, fp: IO[str]) -> Any: ... def dumps(obj: Any, skipkeys: bool = ..., ensure_ascii: bool = ..., check_circular: bool = ..., allow_nan: bool = ..., cls: Any = ..., indent: Optional[int] = ..., separators: Optional[Tuple[str, str]] = ..., encoding: str = ..., default: Optional[Callable[[Any], Any]] = ..., sort_keys: bool = ..., **kwds: Any) -> str: ... def dump(obj: Any, fp: IO[str], skipkeys: bool = ..., ensure_ascii: bool = ..., check_circular: bool = ..., allow_nan: bool = ..., cls: Any = ..., indent: Optional[int] = ..., separators: Optional[Tuple[str, str]] = ..., encoding: str = ..., default: Optional[Callable[[Any], Any]] = ..., sort_keys: bool = ..., **kwds: Any) -> None: ... def loads(s: Union[Text, bytes], encoding: Any = ..., cls: Any = ..., object_hook: Optional[Callable[[Dict], Any]] = ..., parse_float: Optional[Callable[[str], Any]] = ..., parse_int: Optional[Callable[[str], Any]] = ..., parse_constant: Optional[Callable[[str], Any]] = ..., object_pairs_hook: Optional[Callable[[List[Tuple[Any, Any]]], Any]] = ..., **kwds: Any) -> Any: ... def load(fp: IO[str], encoding: Optional[str] = ..., cls: Any = ..., object_hook: Optional[Callable[[Dict], Any]] = ..., parse_float: Optional[Callable[[str], Any]] = ..., parse_int: Optional[Callable[[str], Any]] = ..., parse_constant: Optional[Callable[[str], Any]] = ..., object_pairs_hook: Optional[Callable[[List[Tuple[Any, Any]]], Any]] = ..., **kwds: Any) -> Any: ... class JSONDecoder(object): def __init__(self, encoding: Union[Text, bytes] = ..., object_hook: Callable[..., Any] = ..., parse_float: Callable[[str], float] = ..., parse_int: Callable[[str], int] = ..., parse_constant: Callable[[str], Any] = ..., strict: bool = ..., object_pairs_hook: Callable[..., Any] = ...) -> None: ... def decode(self, s: Union[Text, bytes], _w: Any = ...) -> Any: ... def raw_decode(self, s: Union[Text, bytes], idx: int = ...) -> Tuple[Any, Any]: ... class JSONEncoder(object): item_separator = ... # type: str key_separator = ... # type: str skipkeys = ... # type: bool ensure_ascii = ... # type: bool check_circular = ... # type: bool allow_nan = ... # type: bool sort_keys = ... # type: bool indent = ... # type: int def __init__(self, skipkeys: bool = ..., ensure_ascii: bool = ..., check_circular: bool = ..., allow_nan: bool = ..., sort_keys: bool = ..., indent: int = ..., separators: Tuple[Union[Text, bytes], Union[Text, bytes]] = ..., encoding: Union[Text, bytes] = ..., default: Callable[..., Any] = ...) -> None: ... def default(self, o: Any) -> Any: ... def encode(self, o: Any) -> str: ... def iterencode(self, o: Any, _one_shot: bool = ...) -> str: ... mypy-0.560/typeshed/stdlib/2/macpath.pyi0000644€tŠÔÚ€2›s®0000000147313215007212024301 0ustar jukkaDROPBOX\Domain Users00000000000000from typing import Any from genericpath import * # noqa: F403 curdir = ... # type: Any pardir = ... # type: Any extsep = ... # type: Any sep = ... # type: Any pathsep = ... # type: Any defpath = ... # type: Any altsep = ... # type: Any devnull = ... # type: Any def normcase(s): ... def isabs(s): ... def join(a, *p): ... def split(p): ... def splitext(p): ... def splitdrive(p): ... def basename(p): ... def dirname(p): ... def islink(path): ... def lexists(path): ... def samefile(f1, f2): ... def sameopenfile(fp1, fp2): ... def samestat(s1, s2): ... def ismount(path): ... def walk(top, func, arg): ... def expanduser(path): ... def expandvars(path): ... def normpath(path): ... def abspath(path): ... def realpath(filename): ... supports_unicode_filenames = ... # type: Any def relpath(path, start=...): ... mypy-0.560/typeshed/stdlib/2/markupbase.pyi0000644€tŠÔÚ€2›s®0000000041013215007212025004 0ustar jukkaDROPBOX\Domain Users00000000000000from typing import Tuple class ParserBase(object): def __init__(self) -> None: ... def error(self, message: str) -> None: ... def reset(self) -> None: ... def getpos(self) -> Tuple[int, int]: ... def unkown_decl(self, data: str) -> None: ... mypy-0.560/typeshed/stdlib/2/md5.pyi0000644€tŠÔÚ€2›s®0000000016413215007212023345 0ustar jukkaDROPBOX\Domain Users00000000000000# Stubs for Python 2.7 md5 stdlib module from hashlib import md5 as md5, md5 as new blocksize = 0 digest_size = 0 mypy-0.560/typeshed/stdlib/2/mimetools.pyi0000644€tŠÔÚ€2›s®0000000143013215007212024665 0ustar jukkaDROPBOX\Domain Users00000000000000from typing import Any import rfc822 class Message(rfc822.Message): encodingheader = ... # type: Any typeheader = ... # type: Any def __init__(self, fp, seekable=1): ... plisttext = ... # type: Any type = ... # type: Any maintype = ... # type: Any subtype = ... # type: Any def parsetype(self): ... plist = ... # type: Any def parseplist(self): ... def getplist(self): ... def getparam(self, name): ... def getparamnames(self): ... def getencoding(self): ... def gettype(self): ... def getmaintype(self): ... def getsubtype(self): ... def choose_boundary(): ... def decode(input, output, encoding): ... def encode(input, output, encoding): ... def copyliteral(input, output): ... def copybinary(input, output): ... mypy-0.560/typeshed/stdlib/2/multiprocessing/0000755€tŠÔÚ€2›s®0000000000013215007244025370 5ustar jukkaDROPBOX\Domain Users00000000000000mypy-0.560/typeshed/stdlib/2/multiprocessing/__init__.pyi0000644€tŠÔÚ€2›s®0000000204313215007212027644 0ustar jukkaDROPBOX\Domain Users00000000000000from multiprocessing.process import Process as Process, current_process as current_process, active_children as active_children from multiprocessing.util import SUBDEBUG as SUBDEBUG, SUBWARNING as SUBWARNING class ProcessError(Exception): ... class BufferTooShort(ProcessError): ... class TimeoutError(ProcessError): ... class AuthenticationError(ProcessError): ... def Manager(): ... def Pipe(duplex=True): ... def cpu_count() -> int: ... def freeze_support(): ... def get_logger(): ... def log_to_stderr(level=None): ... def allow_connection_pickling(): ... def Lock(): ... def RLock(): ... def Condition(lock=None): ... def Semaphore(value=1): ... def BoundedSemaphore(value=1): ... def Event(): ... def Queue(maxsize=0): ... def JoinableQueue(maxsize=0): ... def Pool(processes=None, initializer=None, initargs=..., maxtasksperchild=None): ... def RawValue(typecode_or_type, *args): ... def RawArray(typecode_or_type, size_or_initializer): ... def Value(typecode_or_type, *args, **kwds): ... def Array(typecode_or_type, size_or_initializer, **kwds): ... mypy-0.560/typeshed/stdlib/2/multiprocessing/process.pyi0000644€tŠÔÚ€2›s®0000000150013215007212027560 0ustar jukkaDROPBOX\Domain Users00000000000000from typing import Any def current_process(): ... def active_children(): ... class Process: def __init__(self, group=None, target=None, name=None, args=..., kwargs=...): ... def run(self): ... def start(self): ... def terminate(self): ... def join(self, timeout=None): ... def is_alive(self): ... @property def name(self): ... @name.setter def name(self, name): ... @property def daemon(self): ... @daemon.setter def daemon(self, daemonic): ... @property def authkey(self): ... @authkey.setter def authkey(self, authkey): ... @property def exitcode(self): ... @property def ident(self): ... pid = ... # type: Any class AuthenticationString(bytes): def __reduce__(self): ... class _MainProcess(Process): def __init__(self): ... mypy-0.560/typeshed/stdlib/2/multiprocessing/util.pyi0000644€tŠÔÚ€2›s®0000000131013215007212027056 0ustar jukkaDROPBOX\Domain Users00000000000000from typing import Any import threading SUBDEBUG = ... # type: Any SUBWARNING = ... # type: Any def sub_debug(msg, *args): ... def debug(msg, *args): ... def info(msg, *args): ... def sub_warning(msg, *args): ... def get_logger(): ... def log_to_stderr(level=None): ... def get_temp_dir(): ... def register_after_fork(obj, func): ... class Finalize: def __init__(self, obj, callback, args=..., kwargs=None, exitpriority=None): ... def __call__(self, wr=None): ... def cancel(self): ... def still_active(self): ... def is_exiting(): ... class ForkAwareThreadLock: def __init__(self): ... class ForkAwareLocal(threading.local): def __init__(self): ... def __reduce__(self): ... mypy-0.560/typeshed/stdlib/2/mutex.pyi0000644€tŠÔÚ€2›s®0000000073013215007212024021 0ustar jukkaDROPBOX\Domain Users00000000000000# Source: https://hg.python.org/cpython/file/2.7/Lib/mutex.py from collections import deque from typing import Any, Callable, TypeVar _ArgType = TypeVar('_ArgType') class mutex: locked = ... # type: bool queue = ... # type: deque def __init__(self) -> None: ... def test(self) -> bool: ... def testandset(self) -> bool: ... def lock(self, function: Callable[[_ArgType], Any], argument: _ArgType) -> None: ... def unlock(self) -> None: ... mypy-0.560/typeshed/stdlib/2/ntpath.pyi0000644€tŠÔÚ€2›s®0000000147313215007212024162 0ustar jukkaDROPBOX\Domain Users00000000000000from typing import Any from genericpath import * # noqa: F403 curdir = ... # type: Any pardir = ... # type: Any extsep = ... # type: Any sep = ... # type: Any pathsep = ... # type: Any defpath = ... # type: Any altsep = ... # type: Any devnull = ... # type: Any def normcase(s): ... def isabs(s): ... def join(a, *p): ... def split(p): ... def splitext(p): ... def splitdrive(p): ... def basename(p): ... def dirname(p): ... def islink(path): ... def lexists(path): ... def samefile(f1, f2): ... def sameopenfile(fp1, fp2): ... def samestat(s1, s2): ... def ismount(path): ... def walk(top, func, arg): ... def expanduser(path): ... def expandvars(path): ... def normpath(path): ... def abspath(path): ... def realpath(filename): ... supports_unicode_filenames = ... # type: Any def relpath(path, start=...): ... mypy-0.560/typeshed/stdlib/2/nturl2path.pyi0000644€tŠÔÚ€2›s®0000000016313215007212024762 0ustar jukkaDROPBOX\Domain Users00000000000000from typing import AnyStr def url2pathname(url: AnyStr) -> AnyStr: ... def pathname2url(p: AnyStr) -> AnyStr: ... mypy-0.560/typeshed/stdlib/2/os/0000755€tŠÔÚ€2›s®0000000000013215007244022562 5ustar jukkaDROPBOX\Domain Users00000000000000mypy-0.560/typeshed/stdlib/2/os/__init__.pyi0000644€tŠÔÚ€2›s®0000003362413215007212025047 0ustar jukkaDROPBOX\Domain Users00000000000000# Stubs for os # Ron Murawski from builtins import OSError as error from io import TextIOWrapper as _TextIOWrapper from posix import stat_result as stat_result # TODO: use this, see https://github.com/python/mypy/issues/3078 import sys from typing import ( Mapping, MutableMapping, Dict, List, Any, Tuple, Iterator, overload, Union, AnyStr, Optional, Generic, Set, Callable, Text, Sequence, IO, NamedTuple, TypeVar ) from . import path as path from mypy_extensions import NoReturn _T = TypeVar('_T') # ----- os variables ----- if sys.version_info >= (3, 2): supports_bytes_environ: bool if sys.version_info >= (3, 3): supports_dir_fd: Set[Callable[..., Any]] supports_fd: Set[Callable[..., Any]] supports_effective_ids: Set[Callable[..., Any]] supports_follow_symlinks: Set[Callable[..., Any]] SEEK_SET: int SEEK_CUR: int SEEK_END: int O_RDONLY: int O_WRONLY: int O_RDWR: int O_APPEND: int O_CREAT: int O_EXCL: int O_TRUNC: int O_DSYNC: int # Unix only O_RSYNC: int # Unix only O_SYNC: int # Unix only O_NDELAY: int # Unix only O_NONBLOCK: int # Unix only O_NOCTTY: int # Unix only O_SHLOCK: int # Unix only O_EXLOCK: int # Unix only O_BINARY: int # Windows only O_NOINHERIT: int # Windows only O_SHORT_LIVED: int # Windows only O_TEMPORARY: int # Windows only O_RANDOM: int # Windows only O_SEQUENTIAL: int # Windows only O_TEXT: int # Windows only O_ASYNC: int # Gnu extension if in C library O_DIRECT: int # Gnu extension if in C library O_DIRECTORY: int # Gnu extension if in C library O_NOFOLLOW: int # Gnu extension if in C library O_NOATIME: int # Gnu extension if in C library O_LARGEFILE: int # Gnu extension if in C library curdir: str pardir: str sep: str altsep: str extsep: str pathsep: str defpath: str linesep: str devnull: str name: str F_OK: int R_OK: int W_OK: int X_OK: int class _Environ(MutableMapping[AnyStr, AnyStr], Generic[AnyStr]): def copy(self) -> Dict[AnyStr, AnyStr]: ... environ: _Environ[str] if sys.version_info >= (3, 2): environb: _Environ[bytes] confstr_names: Dict[str, int] # Unix only pathconf_names: Dict[str, int] # Unix only sysconf_names: Dict[str, int] # Unix only EX_OK: int # Unix only EX_USAGE: int # Unix only EX_DATAERR: int # Unix only EX_NOINPUT: int # Unix only EX_NOUSER: int # Unix only EX_NOHOST: int # Unix only EX_UNAVAILABLE: int # Unix only EX_SOFTWARE: int # Unix only EX_OSERR: int # Unix only EX_OSFILE: int # Unix only EX_CANTCREAT: int # Unix only EX_IOERR: int # Unix only EX_TEMPFAIL: int # Unix only EX_PROTOCOL: int # Unix only EX_NOPERM: int # Unix only EX_CONFIG: int # Unix only EX_NOTFOUND: int # Unix only P_NOWAIT: int P_NOWAITO: int P_WAIT: int if sys.platform == 'win32': P_DETACH: int # Windows only P_OVERLAY: int # Windows only # wait()/waitpid() options WNOHANG: int # Unix only WCONTINUED: int # some Unix systems WUNTRACED: int # Unix only TMP_MAX: int # Undocumented, but used by tempfile # ----- os classes (structures) ----- if sys.version_info >= (3, 6): from builtins import _PathLike as PathLike # See comment in builtins _PathType = path._PathType _StatVFS = NamedTuple('_StatVFS', [('f_bsize', int), ('f_frsize', int), ('f_blocks', int), ('f_bfree', int), ('f_bavail', int), ('f_files', int), ('f_ffree', int), ('f_favail', int), ('f_flag', int), ('f_namemax', int)]) def ctermid() -> str: ... # Unix only def getegid() -> int: ... # Unix only def geteuid() -> int: ... # Unix only def getgid() -> int: ... # Unix only def getgroups() -> List[int]: ... # Unix only, behaves differently on Mac def initgroups(username: str, gid: int) -> None: ... # Unix only def getlogin() -> str: ... def getpgid(pid: int) -> int: ... # Unix only def getpgrp() -> int: ... # Unix only def getpid() -> int: ... def getppid() -> int: ... def getresuid() -> Tuple[int, int, int]: ... # Unix only def getresgid() -> Tuple[int, int, int]: ... # Unix only def getuid() -> int: ... # Unix only def setegid(egid: int) -> None: ... # Unix only def seteuid(euid: int) -> None: ... # Unix only def setgid(gid: int) -> None: ... # Unix only def setgroups(groups: Sequence[int]) -> None: ... # Unix only def setpgrp() -> None: ... # Unix only def setpgid(pid: int, pgrp: int) -> None: ... # Unix only def setregid(rgid: int, egid: int) -> None: ... # Unix only def setresgid(rgid: int, egid: int, sgid: int) -> None: ... # Unix only def setresuid(ruid: int, euid: int, suid: int) -> None: ... # Unix only def setreuid(ruid: int, euid: int) -> None: ... # Unix only def getsid(pid: int) -> int: ... # Unix only def setsid() -> None: ... # Unix only def setuid(uid: int) -> None: ... # Unix only def strerror(code: int) -> str: ... def umask(mask: int) -> int: ... def uname() -> Tuple[str, str, str, str, str]: ... # Unix only @overload def getenv(key: Text) -> Optional[str]: ... @overload def getenv(key: Text, default: _T) -> Union[str, _T]: ... def putenv(key: Union[bytes, Text], value: Union[bytes, Text]) -> None: ... def unsetenv(key: Union[bytes, Text]) -> None: ... def fdopen(fd: int, *args, **kwargs) -> IO[Any]: ... def close(fd: int) -> None: ... def closerange(fd_low: int, fd_high: int) -> None: ... def dup(fd: int) -> int: ... def dup2(fd: int, fd2: int) -> None: ... def fchmod(fd: int, mode: int) -> None: ... # Unix only def fchown(fd: int, uid: int, gid: int) -> None: ... # Unix only def fdatasync(fd: int) -> None: ... # Unix only, not Mac def fpathconf(fd: int, name: Union[str, int]) -> int: ... # Unix only def fstat(fd: int) -> Any: ... def fstatvfs(fd: int) -> _StatVFS: ... # Unix only def fsync(fd: int) -> None: ... def ftruncate(fd: int, length: int) -> None: ... # Unix only def isatty(fd: int) -> bool: ... # Unix only def lseek(fd: int, pos: int, how: int) -> int: ... def open(file: _PathType, flags: int, mode: int = ...) -> int: ... def openpty() -> Tuple[int, int]: ... # some flavors of Unix def pipe() -> Tuple[int, int]: ... def read(fd: int, n: int) -> bytes: ... def tcgetpgrp(fd: int) -> int: ... # Unix only def tcsetpgrp(fd: int, pg: int) -> None: ... # Unix only def ttyname(fd: int) -> str: ... # Unix only def write(fd: int, string: bytes) -> int: ... def access(path: _PathType, mode: int) -> bool: ... def chdir(path: _PathType) -> None: ... def fchdir(fd: int) -> None: ... def getcwd() -> str: ... def getcwdu() -> unicode: ... def chflags(path: _PathType, flags: int) -> None: ... # Unix only def chroot(path: _PathType) -> None: ... # Unix only def chmod(path: _PathType, mode: int) -> None: ... def chown(path: _PathType, uid: int, gid: int) -> None: ... # Unix only def lchflags(path: _PathType, flags: int) -> None: ... # Unix only def lchmod(path: _PathType, mode: int) -> None: ... # Unix only def lchown(path: _PathType, uid: int, gid: int) -> None: ... # Unix only def link(src: _PathType, link_name: _PathType) -> None: ... def listdir(path: AnyStr) -> List[AnyStr]: ... def lstat(path: _PathType) -> Any: ... def mkfifo(path: _PathType, mode: int = ...) -> None: ... # Unix only def mknod(filename: _PathType, mode: int = ..., device: int = ...) -> None: ... def major(device: int) -> int: ... def minor(device: int) -> int: ... def makedev(major: int, minor: int) -> int: ... def mkdir(path: _PathType, mode: int = ...) -> None: ... def makedirs(path: _PathType, mode: int = ...) -> None: ... def pathconf(path: _PathType, name: Union[str, int]) -> int: ... # Unix only def readlink(path: AnyStr) -> AnyStr: ... def remove(path: _PathType) -> None: ... def removedirs(path: _PathType) -> None: ... def rename(src: _PathType, dst: _PathType) -> None: ... def renames(old: _PathType, new: _PathType) -> None: ... def rmdir(path: _PathType) -> None: ... def stat(path: _PathType) -> Any: ... @overload def stat_float_times(newvalue: bool = ...) -> None: ... @overload def stat_float_times() -> bool: ... def statvfs(path: _PathType) -> _StatVFS: ... # Unix only def symlink(source: _PathType, link_name: _PathType) -> None: ... def unlink(path: _PathType) -> None: ... # TODO: add ns, dir_fd, follow_symlinks argument if sys.version_info >= (3, 0): def utime(path: _PathType, times: Optional[Tuple[float, float]] = ...) -> None: ... else: def utime(path: _PathType, times: Optional[Tuple[float, float]]) -> None: ... if sys.version_info >= (3, 6): def walk(top: Union[AnyStr, PathLike[AnyStr]], topdown: bool = ..., onerror: Optional[Callable[[OSError], Any]] = ..., followlinks: bool = ...) -> Iterator[Tuple[AnyStr, List[AnyStr], List[AnyStr]]]: ... else: def walk(top: AnyStr, topdown: bool = ..., onerror: Optional[Callable[[OSError], Any]] = ..., followlinks: bool = ...) -> Iterator[Tuple[AnyStr, List[AnyStr], List[AnyStr]]]: ... def abort() -> NoReturn: ... # These are defined as execl(file, *args) but the first *arg is mandatory. def execl(file: _PathType, __arg0: Union[bytes, Text], *args: Union[bytes, Text]) -> NoReturn: ... def execlp(file: _PathType, __arg0: Union[bytes, Text], *args: Union[bytes, Text]) -> NoReturn: ... # These are: execle(file, *args, env) but env is pulled from the last element of the args. def execle(file: _PathType, __arg0: Union[bytes, Text], *args: Any) -> NoReturn: ... def execlpe(file: _PathType, __arg0: Union[bytes, Text], *args: Any) -> NoReturn: ... # The docs say `args: tuple or list of strings` # The implementation enforces tuple or list so we can't use Sequence. _ExecVArgs = Union[Tuple[Union[bytes, Text], ...], List[bytes], List[Text], List[Union[bytes, Text]]] def execv(path: _PathType, args: _ExecVArgs) -> None: ... def execve(path: _PathType, args: _ExecVArgs, env: Mapping[str, str]) -> None: ... def execvp(file: _PathType, args: _ExecVArgs) -> None: ... def execvpe(file: _PathType, args: _ExecVArgs, env: Mapping[str, str]) -> None: ... def _exit(n: int) -> NoReturn: ... def fork() -> int: ... # Unix only def forkpty() -> Tuple[int, int]: ... # some flavors of Unix def kill(pid: int, sig: int) -> None: ... def killpg(pgid: int, sig: int) -> None: ... # Unix only def nice(increment: int) -> int: ... # Unix only def plock(op: int) -> None: ... # Unix only ???op is int? if sys.version_info >= (3, 0): class popen(_TextIOWrapper): # TODO 'b' modes or bytes command not accepted? def __init__(self, command: str, mode: str = ..., bufsize: int = ...) -> None: ... def close(self) -> Any: ... # may return int else: def popen(command: str, *args, **kwargs) -> Optional[IO[Any]]: ... def popen2(cmd: str, *args, **kwargs) -> Tuple[IO[Any], IO[Any]]: ... def popen3(cmd: str, *args, **kwargs) -> Tuple[IO[Any], IO[Any], IO[Any]]: ... def popen4(cmd: str, *args, **kwargs) -> Tuple[IO[Any], IO[Any]]: ... def spawnl(mode: int, path: _PathType, arg0: Union[bytes, Text], *args: Union[bytes, Text]) -> int: ... def spawnle(mode: int, path: _PathType, arg0: Union[bytes, Text], *args: Any) -> int: ... # Imprecise sig def spawnlp(mode: int, file: _PathType, arg0: Union[bytes, Text], *args: Union[bytes, Text]) -> int: ... # Unix only TODO def spawnlpe(mode: int, file: _PathType, arg0: Union[bytes, Text], *args: Any) -> int: ... # Imprecise signature; Unix only TODO def spawnv(mode: int, path: _PathType, args: List[Union[bytes, Text]]) -> int: ... def spawnve(mode: int, path: _PathType, args: List[Union[bytes, Text]], env: Mapping[str, str]) -> int: ... def spawnvp(mode: int, file: _PathType, args: List[Union[bytes, Text]]) -> int: ... # Unix only def spawnvpe(mode: int, file: _PathType, args: List[Union[bytes, Text]], env: Mapping[str, str]) -> int: ... # Unix only def startfile(path: _PathType, operation: Optional[str] = ...) -> None: ... # Windows only def system(command: _PathType) -> int: ... def times() -> Tuple[float, float, float, float, float]: ... def wait() -> Tuple[int, int]: ... # Unix only def waitpid(pid: int, options: int) -> Tuple[int, int]: ... def wait3(options: int) -> Tuple[int, int, Any]: ... # Unix only def wait4(pid: int, options: int) -> Tuple[int, int, Any]: ... # Unix only def WCOREDUMP(status: int) -> bool: ... # Unix only def WIFCONTINUED(status: int) -> bool: ... # Unix only def WIFSTOPPED(status: int) -> bool: ... # Unix only def WIFSIGNALED(status: int) -> bool: ... # Unix only def WIFEXITED(status: int) -> bool: ... # Unix only def WEXITSTATUS(status: int) -> int: ... # Unix only def WSTOPSIG(status: int) -> int: ... # Unix only def WTERMSIG(status: int) -> int: ... # Unix only def confstr(name: Union[str, int]) -> Optional[str]: ... # Unix only def getloadavg() -> Tuple[float, float, float]: ... # Unix only def sysconf(name: Union[str, int]) -> int: ... # Unix only def urandom(n: int) -> bytes: ... if sys.version_info >= (3, 0): def sched_getaffinity(id: int) -> Set[int]: ... if sys.version_info >= (3, 3): class waitresult: si_pid: int def waitid(idtype: int, id: int, options: int) -> waitresult: ... if sys.version_info < (3, 0): def tmpfile() -> IO[Any]: ... def tmpnam() -> str: ... def tempnam(dir: str = ..., prefix: str = ...) -> str: ... P_ALL: int WEXITED: int WNOWAIT: int if sys.version_info >= (3, 3): def sync() -> None: ... # Unix only def truncate(path: Union[_PathType, int], length: int) -> None: ... # Unix only up to version 3.4 def fwalk(top: AnyStr = ..., topdown: bool = ..., onerror: Callable = ..., *, follow_symlinks: bool = ..., dir_fd: int = ...) -> Iterator[Tuple[AnyStr, List[AnyStr], List[AnyStr], int]]: ... # Unix only terminal_size = NamedTuple('terminal_size', [('columns', int), ('lines', int)]) def get_terminal_size(fd: int = ...) -> terminal_size: ... if sys.version_info >= (3, 4): def cpu_count() -> Optional[int]: ... mypy-0.560/typeshed/stdlib/2/os/path.pyi0000644€tŠÔÚ€2›s®0000000665713215007212024252 0ustar jukkaDROPBOX\Domain Users00000000000000# Stubs for os.path # Ron Murawski # based on http://docs.python.org/3.2/library/os.path.html # adapted for 2.7 by Michal Pokorny import sys from typing import ( overload, List, Any, AnyStr, Sequence, Tuple, BinaryIO, TextIO, TypeVar, Union, Text, Callable ) _T = TypeVar('_T') if sys.version_info >= (3, 6): from builtins import _PathLike _PathType = Union[bytes, Text, _PathLike] else: _PathType = Union[bytes, Text] # ----- os.path variables ----- supports_unicode_filenames = False # aliases (also in os) curdir = ... # type: str pardir = ... # type: str sep = ... # type: str altsep = ... # type: str extsep = ... # type: str pathsep = ... # type: str defpath = ... # type: str devnull = ... # type: str # ----- os.path function stubs ----- def abspath(path: AnyStr) -> AnyStr: ... def basename(path: AnyStr) -> AnyStr: ... if sys.version_info >= (3, 5): def commonpath(paths: Sequence[AnyStr]) -> AnyStr: ... # NOTE: Empty lists results in '' (str) regardless of contained type. # Also, in Python 2 mixed sequences of Text and bytes results in either Text or bytes # So, fall back to Any def commonprefix(list: Sequence[AnyStr]) -> Any: ... def dirname(path: AnyStr) -> AnyStr: ... def exists(path: _PathType) -> bool: ... def lexists(path: _PathType) -> bool: ... def expanduser(path: AnyStr) -> AnyStr: ... def expandvars(path: AnyStr) -> AnyStr: ... # These return float if os.stat_float_times() == True, # but int is a subclass of float. def getatime(path: _PathType) -> float: ... def getmtime(path: _PathType) -> float: ... def getctime(path: _PathType) -> float: ... def getsize(path: _PathType) -> int: ... def isabs(path: _PathType) -> bool: ... def isfile(path: _PathType) -> bool: ... def isdir(path: _PathType) -> bool: ... def islink(path: _PathType) -> bool: ... def ismount(path: _PathType) -> bool: ... if sys.version_info < (3, 0): # Make sure signatures are disjunct, and allow combinations of bytes and unicode. # (Since Python 2 allows that, too) # Note that e.g. os.path.join("a", "b", "c", "d", u"e") will still result in # a type error. @overload def join(__p1: bytes, *p: bytes) -> bytes: ... @overload def join(__p1: Text, *p: _PathType) -> Text: ... @overload def join(__p1: bytes, __p2: Text, *p: _PathType) -> Text: ... @overload def join(__p1: bytes, __p2: bytes, __p3: Text, *p: _PathType) -> Text: ... @overload def join(__p1: bytes, __p2: bytes, __p3: bytes, __p4: Text, *p: _PathType) -> Text: ... else: def join(path: AnyStr, *paths: AnyStr) -> AnyStr: ... def normcase(path: AnyStr) -> AnyStr: ... def normpath(path: AnyStr) -> AnyStr: ... if sys.platform == 'win32': def realpath(path: AnyStr) -> AnyStr: ... else: def realpath(filename: AnyStr) -> AnyStr: ... def relpath(path: AnyStr, start: _PathType = ...) -> AnyStr: ... def samefile(path1: _PathType, path2: _PathType) -> bool: ... def sameopenfile(fp1: int, fp2: int) -> bool: ... # TODO # def samestat(stat1: stat_result, # stat2: stat_result) -> bool: ... # Unix only def split(path: AnyStr) -> Tuple[AnyStr, AnyStr]: ... def splitdrive(path: AnyStr) -> Tuple[AnyStr, AnyStr]: ... def splitext(path: AnyStr) -> Tuple[AnyStr, AnyStr]: ... def splitunc(path: AnyStr) -> Tuple[AnyStr, AnyStr]: ... # Windows only, deprecated if sys.version_info < (3,): def walk(path: AnyStr, visit: Callable[[_T, AnyStr, List[AnyStr]], Any], arg: _T) -> None: ... mypy-0.560/typeshed/stdlib/2/os2emxpath.pyi0000644€tŠÔÚ€2›s®0000000147313215007212024756 0ustar jukkaDROPBOX\Domain Users00000000000000from typing import Any from genericpath import * # noqa: F403 curdir = ... # type: Any pardir = ... # type: Any extsep = ... # type: Any sep = ... # type: Any pathsep = ... # type: Any defpath = ... # type: Any altsep = ... # type: Any devnull = ... # type: Any def normcase(s): ... def isabs(s): ... def join(a, *p): ... def split(p): ... def splitext(p): ... def splitdrive(p): ... def basename(p): ... def dirname(p): ... def islink(path): ... def lexists(path): ... def samefile(f1, f2): ... def sameopenfile(fp1, fp2): ... def samestat(s1, s2): ... def ismount(path): ... def walk(top, func, arg): ... def expanduser(path): ... def expandvars(path): ... def normpath(path): ... def abspath(path): ... def realpath(filename): ... supports_unicode_filenames = ... # type: Any def relpath(path, start=...): ... mypy-0.560/typeshed/stdlib/2/pipes.pyi0000644€tŠÔÚ€2›s®0000000070513215007212024001 0ustar jukkaDROPBOX\Domain Users00000000000000from typing import Any, IO class Template: def __init__(self) -> None: ... def reset(self) -> None: ... def clone(self) -> Template: ... def debug(self, flag: bool) -> None: ... def append(self, cmd: str, kind: str) -> None: ... def prepend(self, cmd: str, kind: str) -> None: ... def open(self, file: str, mode: str) -> IO[Any]: ... def copy(self, infile: str, outfile: str) -> None: ... def quote(s: str) -> str: ... mypy-0.560/typeshed/stdlib/2/platform.pyi0000644€tŠÔÚ€2›s®0000000264013215007212024505 0ustar jukkaDROPBOX\Domain Users00000000000000# Stubs for platform (Python 2) # # Based on stub generated by stubgen. from typing import Any __copyright__ = ... # type: Any DEV_NULL = ... # type: Any def libc_ver(executable=..., lib=..., version=..., chunksize=2048): ... def linux_distribution(distname=..., version=..., id=..., supported_dists=..., full_distribution_name=1): ... def dist(distname=..., version=..., id=..., supported_dists=...): ... class _popen: tmpfile = ... # type: Any pipe = ... # type: Any bufsize = ... # type: Any mode = ... # type: Any def __init__(self, cmd, mode=..., bufsize=None): ... def read(self): ... def readlines(self): ... def close(self, remove=..., error=...): ... __del__ = ... # type: Any def popen(cmd, mode=..., bufsize=None): ... def win32_ver(release=..., version=..., csd=..., ptype=...): ... def mac_ver(release=..., versioninfo=..., machine=...): ... def java_ver(release=..., vendor=..., vminfo=..., osinfo=...): ... def system_alias(system, release, version): ... def architecture(executable=..., bits=..., linkage=...): ... def uname(): ... def system(): ... def node(): ... def release(): ... def version(): ... def machine(): ... def processor(): ... def python_implementation(): ... def python_version(): ... def python_version_tuple(): ... def python_branch(): ... def python_revision(): ... def python_build(): ... def python_compiler(): ... def platform(aliased=0, terse=0): ... mypy-0.560/typeshed/stdlib/2/popen2.pyi0000644€tŠÔÚ€2›s®0000000220013215007212024054 0ustar jukkaDROPBOX\Domain Users00000000000000from typing import Any, Iterable, List, Optional, Union, TextIO, Tuple, TypeVar _T = TypeVar('_T') __all__ = ... # type: List[str] class Popen3: sts = ... # type: int cmd = ... # type: Iterable pid = ... # type: int tochild = ... # type: TextIO fromchild = ... # type: TextIO childerr = ... # type: Optional[TextIO] def __init__(self, cmd: Iterable = ..., capturestderr: bool = ..., bufsize: int = ...) -> None: ... def __del__(self) -> None: ... def poll(self, _deadstate: _T = ...) -> Union[int, _T]: ... def wait(self) -> int: ... class Popen4(Popen3): childerr = ... # type: None cmd = ... # type: Iterable pid = ... # type: int tochild = ... # type: TextIO fromchild = ... # type: TextIO def __init__(self, cmd: Iterable = ..., bufsize: int = ...) -> None: ... def popen2(cmd: Iterable = ..., bufsize: int = ..., mode: str = ...) -> Tuple[TextIO, TextIO]: ... def popen3(cmd: Iterable = ..., bufsize: int = ..., mode: str = ...) -> Tuple[TextIO, TextIO, TextIO]: ... def popen4(cmd: Iterable = ..., bufsize: int = ..., mode: str = ...) -> Tuple[TextIO, TextIO]: ... mypy-0.560/typeshed/stdlib/2/posix.pyi0000644€tŠÔÚ€2›s®0000001644413215007212024032 0ustar jukkaDROPBOX\Domain Users00000000000000from typing import Dict, List, Mapping, Tuple, Union, Sequence, IO, Optional, TypeVar error = OSError confstr_names = ... # type: Dict[str, int] environ = ... # type: Dict[str, str] pathconf_names = ... # type: Dict[str, int] sysconf_names = ... # type: Dict[str, int] EX_CANTCREAT = ... # type: int EX_CONFIG = ... # type: int EX_DATAERR = ... # type: int EX_IOERR = ... # type: int EX_NOHOST = ... # type: int EX_NOINPUT = ... # type: int EX_NOPERM = ... # type: int EX_NOUSER = ... # type: int EX_OK = ... # type: int EX_OSERR = ... # type: int EX_OSFILE = ... # type: int EX_PROTOCOL = ... # type: int EX_SOFTWARE = ... # type: int EX_TEMPFAIL = ... # type: int EX_UNAVAILABLE = ... # type: int EX_USAGE = ... # type: int F_OK = ... # type: int NGROUPS_MAX = ... # type: int O_APPEND = ... # type: int O_ASYNC = ... # type: int O_CREAT = ... # type: int O_DIRECT = ... # type: int O_DIRECTORY = ... # type: int O_DSYNC = ... # type: int O_EXCL = ... # type: int O_LARGEFILE = ... # type: int O_NDELAY = ... # type: int O_NOATIME = ... # type: int O_NOCTTY = ... # type: int O_NOFOLLOW = ... # type: int O_NONBLOCK = ... # type: int O_RDONLY = ... # type: int O_RDWR = ... # type: int O_RSYNC = ... # type: int O_SYNC = ... # type: int O_TRUNC = ... # type: int O_WRONLY = ... # type: int R_OK = ... # type: int TMP_MAX = ... # type: int WCONTINUED = ... # type: int WNOHANG = ... # type: int WUNTRACED = ... # type: int W_OK = ... # type: int X_OK = ... # type: int def WCOREDUMP(status: int) -> bool: ... def WEXITSTATUS(status: int) -> bool: ... def WIFCONTINUED(status: int) -> bool: ... def WIFEXITED(status: int) -> bool: ... def WIFSIGNALED(status: int) -> bool: ... def WIFSTOPPED(status: int) -> bool: ... def WSTOPSIG(status: int) -> bool: ... def WTERMSIG(status: int) -> bool: ... class stat_result(object): n_fields = ... # type: int n_sequence_fields = ... # type: int n_unnamed_fields = ... # type: int st_mode = ... # type: int st_ino = ... # type: int st_dev = ... # type: int st_nlink = ... # type: int st_uid = ... # type: int st_gid = ... # type: int st_size = ... # type: int st_atime = ... # type: int st_mtime = ... # type: int st_ctime = ... # type: int class statvfs_result(object): n_fields = ... # type: int n_sequence_fields = ... # type: int n_unnamed_fields = ... # type: int f_bsize = ... # type: int f_frsize = ... # type: int f_blocks = ... # type: int f_bfree = ... # type: int f_bavail = ... # type: int f_files = ... # type: int f_ffree = ... # type: int f_favail = ... # type: int f_flag = ... # type: int f_namemax = ... # type: int def _exit(status: int) -> None: ... def abort() -> None: ... def access(path: unicode, mode: int) -> bool: ... def chdir(path: unicode) -> None: ... def chmod(path: unicode, mode: int) -> None: ... def chown(path: unicode, uid: int, gid: int) -> None: ... def chroot(path: unicode) -> None: ... def close(fd: int) -> None: ... def closerange(fd_low: int, fd_high: int) -> None: ... def confstr(name: Union[str, int]) -> str: ... def ctermid() -> str: ... def dup(fd: int) -> int: ... def dup2(fd: int, fd2: int) -> None: ... def execv(path: str, args: Sequence[str], env: Mapping[str, str]) -> None: ... def execve(path: str, args: Sequence[str], env: Mapping[str, str]) -> None: ... def fchdir(fd: int) -> None: ... def fchmod(fd: int, mode: int) -> None: ... def fchown(fd: int, uid: int, gid: int) -> None: ... def fdatasync(fd: int) -> None: ... def fdopen(fd: int, mode: str = ..., bufsize: int = ...) -> IO[str]: ... def fork() -> int: raise OSError() def forkpty() -> Tuple[int, int]: raise OSError() def fpathconf(fd: int, name: str) -> None: ... def fstat(fd: int) -> stat_result: ... def fstatvfs(fd: int) -> statvfs_result: ... def fsync(fd: int) -> None: ... def ftruncate(fd: int, length: int) -> None: ... def getcwd() -> str: ... def getcwdu() -> unicode: ... def getegid() -> int: ... def geteuid() -> int: ... def getgid() -> int: ... def getgroups() -> List[int]: ... def getloadavg() -> Tuple[float, float, float]: raise OSError() def getlogin() -> str: ... def getpgid(pid: int) -> int: ... def getpgrp() -> int: ... def getpid() -> int: ... def getppid() -> int: ... def getresgid() -> Tuple[int, int, int]: ... def getresuid() -> Tuple[int, int, int]: ... def getsid(pid: int) -> int: ... def getuid() -> int: ... def initgroups(username: str, gid: int) -> None: ... def isatty(fd: int) -> bool: ... def kill(pid: int, sig: int) -> None: ... def killpg(pgid: int, sig: int) -> None: ... def lchown(path: unicode, uid: int, gid: int) -> None: ... def link(source: unicode, link_name: str) -> None: ... _T = TypeVar("_T") def listdir(path: _T) -> List[_T]: ... def lseek(fd: int, pos: int, how: int) -> None: ... def lstat(path: unicode) -> stat_result: ... def major(device: int) -> int: ... def makedev(major: int, minor: int) -> int: ... def minor(device: int) -> int: ... def mkdir(path: unicode, mode: int = ...) -> None: ... def mkfifo(path: unicode, mode: int = ...) -> None: ... def mknod(filename: unicode, mode: int = ..., device: int = ...) -> None: ... def nice(increment: int) -> int: ... def open(file: unicode, flags: int, mode: int = ...) -> int: ... def openpty() -> Tuple[int, int]: ... def pathconf(path: unicode, name: str) -> str: ... def pipe() -> Tuple[int, int]: ... def popen(command: str, mode: str = ..., bufsize: int = ...) -> IO[str]: ... def putenv(varname: str, value: str) -> None: ... def read(fd: int, n: int) -> str: ... def readlink(path: _T) -> _T: ... def remove(path: unicode) -> None: ... def rename(src: unicode, dst: unicode) -> None: ... def rmdir(path: unicode) -> None: ... def setegid(egid: int) -> None: ... def seteuid(euid: int) -> None: ... def setgid(gid: int) -> None: ... def setgroups(groups: Sequence[int]) -> None: ... def setpgid(pid: int, pgrp: int) -> None: ... def setpgrp() -> None: ... def setregid(rgid: int, egid: int) -> None: ... def setresgid(rgid: int, egid: int, sgid: int) -> None: ... def setresuid(ruid: int, euid: int, suid: int) -> None: ... def setreuid(ruid: int, euid: int) -> None: ... def setsid() -> None: ... def setuid(pid: int) -> None: ... def stat(path: unicode) -> stat_result: ... def statvfs(path: unicode) -> statvfs_result: ... def stat_float_times(fd: int) -> None: ... def strerror(code: int) -> str: ... def symlink(source: unicode, link_name: unicode) -> None: ... def sysconf(name: Union[str, int]) -> int: ... def system(command: unicode) -> int: ... def tcgetpgrp(fd: int) -> int: ... def tcsetpgrp(fd: int, pg: int) -> None: ... def times() -> Tuple[float, float, float, float, float]: ... def tmpfile() -> IO[str]: ... def ttyname(fd: int) -> str: ... def umask(mask: int) -> int: ... def uname() -> Tuple[str, str, str, str, str]: ... def unlink(path: unicode) -> None: ... def unsetenv(varname: str) -> None: ... def urandom(n: int) -> str: ... def utime(path: unicode, times: Optional[Tuple[int, int]]) -> None: raise OSError def wait() -> int: ... _r = Tuple[float, float, int, int, int, int, int, int, int, int, int, int, int, int, int, int] def wait3(options: int) -> Tuple[int, int, _r]: ... def wait4(pid: int, options: int) -> Tuple[int, int, _r]: ... def waitpid(pid: int, options: int) -> int: raise OSError() def write(fd: int, str: str) -> int: ... mypy-0.560/typeshed/stdlib/2/posixpath.pyi0000644€tŠÔÚ€2›s®0000000147313215007212024703 0ustar jukkaDROPBOX\Domain Users00000000000000from typing import Any from genericpath import * # noqa: F403 curdir = ... # type: Any pardir = ... # type: Any extsep = ... # type: Any sep = ... # type: Any pathsep = ... # type: Any defpath = ... # type: Any altsep = ... # type: Any devnull = ... # type: Any def normcase(s): ... def isabs(s): ... def join(a, *p): ... def split(p): ... def splitext(p): ... def splitdrive(p): ... def basename(p): ... def dirname(p): ... def islink(path): ... def lexists(path): ... def samefile(f1, f2): ... def sameopenfile(fp1, fp2): ... def samestat(s1, s2): ... def ismount(path): ... def walk(top, func, arg): ... def expanduser(path): ... def expandvars(path): ... def normpath(path): ... def abspath(path): ... def realpath(filename): ... supports_unicode_filenames = ... # type: Any def relpath(path, start=...): ... mypy-0.560/typeshed/stdlib/2/pydoc.pyi0000644€tŠÔÚ€2›s®0000002362013215007212024000 0ustar jukkaDROPBOX\Domain Users00000000000000from typing import Any, AnyStr, Callable, Container, Dict, IO, List, Mapping, MutableMapping, Optional, Tuple, Type, Union from mypy_extensions import NoReturn from repr import Repr from types import FunctionType, MethodType, ModuleType, TracebackType # the return type of sys.exc_info(), used by ErrorDuringImport.__init__ _Exc_Info = Tuple[Optional[Type[BaseException]], Optional[BaseException], Optional[TracebackType]] __author__ = ... # type: str __date__ = ... # type: str __version__ = ... # type: str __credits__ = ... # type: str def pathdirs() -> List[str]: ... def getdoc(object: object) -> Union[str, unicode]: ... def splitdoc(doc: AnyStr) -> Tuple[AnyStr, AnyStr]: ... def classname(object: object, modname: str) -> str: ... def isdata(object: object) -> bool: ... def replace(text: AnyStr, *pairs: AnyStr) -> AnyStr: ... def cram(text: str, maxlen: int) -> str: ... def stripid(text: str) -> str: ... def allmethods(cl: type) -> MutableMapping[str, MethodType]: ... def visiblename(name: str, all: Optional[Container[str]] = ..., obj: Optional[object] = ...) -> bool: ... def classify_class_attrs(object: object) -> List[Tuple[str, str, type, str]]: ... def ispackage(path: str) -> bool: ... def source_synopsis(file: IO[AnyStr]) -> Optional[AnyStr]: ... def synopsis(filename: str, cache: MutableMapping[str, Tuple[int, str]] = ...) -> Optional[str]: ... class ErrorDuringImport(Exception): filename = ... # type: str exc = ... # type: Optional[Type[BaseException]] value = ... # type: Optional[BaseException] tb = ... # type: Optional[TracebackType] def __init__(self, filename: str, exc_info: _Exc_Info) -> None: ... def importfile(path: str) -> ModuleType: ... def safeimport(path: str, forceload: bool = ..., cache: MutableMapping[str, ModuleType] = ...) -> ModuleType: ... class Doc: def document(self, object: object, name: Optional[str] = ..., *args: Any) -> str: ... def fail(self, object: object, name: Optional[str] = ..., *args: Any) -> NoReturn: ... def docmodule(self, object: object, name: Optional[str] = ..., *args: Any) -> str: ... def docclass(self, object: object, name: Optional[str] = ..., *args: Any) -> str: ... def docroutine(self, object: object, name: Optional[str] = ..., *args: Any) -> str: ... def docother(self, object: object, name: Optional[str] = ..., *args: Any) -> str: ... def docproperty(self, object: object, name: Optional[str] = ..., *args: Any) -> str: ... def docdata(self, object: object, name: Optional[str] = ..., *args: Any) -> str: ... def getdocloc(self, object: object) -> Optional[str]: ... class HTMLRepr(Repr): maxlist = ... # type: int maxtuple = ... # type: int maxdict = ... # type: int maxstring = ... # type: int maxother = ... # type: int def __init__(self) -> None: ... def escape(self, text: str) -> str: ... def repr(self, object: object) -> str: ... def repr1(self, x: object, level: complex) -> str: ... def repr_string(self, x: Union[str, unicode], level: complex) -> str: ... def repr_str(self, x: Union[str, unicode], level: complex) -> str: ... def repr_instance(self, x: object, level: complex) -> str: ... def repr_unicode(self, x: AnyStr, level: complex) -> str: ... class HTMLDoc(Doc): def repr(self, object: object) -> str: ... def escape(self, test: str) -> str: ... def page(self, title: str, contents: str) -> str: ... def heading(self, title: str, fgcol: str, bgcol: str, extras: str = ...) -> str: ... def section(self, title: str, fgcol: str, bgcol: str, contents: str, width: int = ..., prelude: str = ..., marginalia: Optional[str] = ..., gap: str = ...) -> str: ... def bigsection(self, title: str, *args) -> str: ... def preformat(self, text: str) -> str: ... def multicolumn(self, list: List[Any], format: Callable[[Any], str], cols: int = ...) -> str: ... def grey(self, text: str) -> str: ... def namelink(self, name: str, *dicts: MutableMapping[str, str]) -> str: ... def classlink(self, object: object, modname: str) -> str: ... def modulelink(self, object: object) -> str: ... def modpkglink(self, data: Tuple[str, str, bool, bool]) -> str: ... def markup(self, text: str, escape: Optional[Callable[[str], str]] = ..., funcs: Mapping[str, str] = ..., classes: Mapping[str, str] = ..., methods: Mapping[str, str] = ...) -> str: ... def formattree(self, tree: List[Union[Tuple[type, Tuple[type, ...]], list]], modname: str, parent: Optional[type] = ...) -> str: ... def docmodule(self, object: object, name: Optional[str] = ..., mod: Optional[str] = ..., *ignored) -> str: ... def docclass(self, object: object, name: Optional[str] = ..., mod: Optional[str] = ..., funcs: Mapping[str, str] = ..., classes: Mapping[str, str] = ..., *ignored) -> str: ... def formatvalue(self, object: object) -> str: ... def docroutine(self, object: object, name: Optional[str] = ..., mod: Optional[str] = ..., funcs: Mapping[str, str] = ..., classes: Mapping[str, str] = ..., methods: Mapping[str, str] = ..., cl: Optional[type] = ..., *ignored) -> str: ... def docproperty(self, object: object, name: Optional[str] = ..., mod: Optional[str] = ..., cl: Optional[Any] = ..., *ignored) -> str: ... def docother(self, object: object, name: Optional[str] = ..., mod: Optional[Any] = ..., *ignored) -> str: ... def docdata(self, object: object, name: Optional[str] = ..., mod: Optional[Any] = ..., cl: Optional[Any] = ..., *ignored) -> str: ... def index(self, dir: str, shadowed: Optional[MutableMapping[str, bool]] = ...) -> str: ... class TextRepr(Repr): maxlist = ... # type: int maxtuple = ... # type: int maxdict = ... # type: int maxstring = ... # type: int maxother = ... # type: int def __init__(self) -> None: ... def repr1(self, x: object, level: complex) -> str: ... def repr_string(self, x: str, level: complex) -> str: ... def repr_str(self, x: str, level: complex) -> str: ... def repr_instance(self, x: object, level: complex) -> str: ... class TextDoc(Doc): def repr(self, object: object) -> str: ... def bold(self, text: str) -> str: ... def indent(self, text: str, prefix: str = ...) -> str: ... def section(self, title: str, contents: str) -> str: ... def formattree(self, tree: List[Union[Tuple[type, Tuple[type, ...]], list]], modname: str, parent: Optional[type] = ..., prefix: str = ...) -> str: ... def docmodule(self, object: object, name: Optional[str] = ..., mod: Optional[Any] = ..., *ignored) -> str: ... def docclass(self, object: object, name: Optional[str] = ..., mod: Optional[str] = ..., *ignored) -> str: ... def formatvalue(self, object: object) -> str: ... def docroutine(self, object: object, name: Optional[str] = ..., mod: Optional[str] = ..., cl: Optional[Any] = ..., *ignored) -> str: ... def docproperty(self, object: object, name: Optional[str] = ..., mod: Optional[Any] = ..., cl: Optional[Any] = ..., *ignored) -> str: ... def docdata(self, object: object, name: Optional[str] = ..., mod: Optional[str] = ..., cl: Optional[Any] = ..., *ignored) -> str: ... def docother(self, object: object, name: Optional[str] = ..., mod: Optional[str] = ..., parent: Optional[str] = ..., maxlen: Optional[int] = ..., doc: Optional[Any] = ..., *ignored) -> str: ... def pager(text: str) -> None: ... def getpager() -> Callable[[str], None]: ... def plain(text: str) -> str: ... def pipepager(text: str, cmd: str) -> None: ... def tempfilepager(text: str, cmd: str) -> None: ... def ttypager(text: str) -> None: ... def plainpager(text: str) -> None: ... def describe(thing: Any) -> str: ... def locate(path: str, forceload: bool = ...) -> object: ... text = ... # type: TextDoc html = ... # type: HTMLDoc class _OldStyleClass: ... def resolve(thing: Union[str, object], forceload: bool = ...) -> Optional[Tuple[object, str]]: ... def render_doc(thing: Union[str, object], title: str = ..., forceload: bool = ...) -> str: ... def doc(thing: Union[str, object], title: str = ..., forceload: bool = ...) -> None: ... def writedoc(thing: Union[str, object], forceload: bool = ...) -> None: ... def writedocs(dir: str, pkgpath: str = ..., done: Optional[Any] = ...) -> None: ... class Helper: keywords = ... # type: Dict[str, Union[str, Tuple[str, str]]] symbols = ... # type: Dict[str, str] topics = ... # type: Dict[str, Union[str, Tuple[str, ...]]] def __init__(self, input: Optional[IO[str]] = ..., output: Optional[IO[str]] = ...) -> None: ... input = ... # type: IO[str] output = ... # type: IO[str] def __call__(self, request: Union[str, Helper, object] = ...) -> None: ... def interact(self) -> None: ... def getline(self, prompt: str) -> str: ... def help(self, request: Any) -> None: ... def intro(self) -> None: ... def list(self, items: List[str], columns: int = ..., width: int = ...) -> None: ... def listkeywords(self) -> None: ... def listsymbols(self) -> None: ... def listtopics(self) -> None: ... def showtopic(self, topic: str, more_xrefs: str = ...) -> None: ... def showsymbol(self, symbol: str) -> None: ... def listmodules(self, key: str = ...) -> None: ... help = ... # type: Helper # See Python issue #11182: "remove the unused and undocumented pydoc.Scanner class" # class Scanner: # roots = ... # type: Any # state = ... # type: Any # children = ... # type: Any # descendp = ... # type: Any # def __init__(self, roots, children, descendp) -> None: ... # def next(self): ... class ModuleScanner: quit = ... # type: bool def run(self, callback: Callable[[Optional[str], str, str], None], key: Optional[Any] = ..., completer: Optional[Callable[[], None]] = ..., onerror: Optional[Callable] = ...) -> None: ... def apropos(key: str) -> None: ... def serve(port: int, callback: Optional[Callable[[Any], None]] = ..., completer: Optional[Callable[[], None]] = ...) -> None: ... def gui() -> None: ... def ispath(x: Any) -> bool: ... def cli() -> None: ... mypy-0.560/typeshed/stdlib/2/Queue.pyi0000644€tŠÔÚ€2›s®0000000163613215007212023751 0ustar jukkaDROPBOX\Domain Users00000000000000# Stubs for Queue (Python 2) from typing import Any, TypeVar, Generic, Optional _T = TypeVar('_T') class Empty(Exception): ... class Full(Exception): ... class Queue(Generic[_T]): maxsize = ... # type: Any mutex = ... # type: Any not_empty = ... # type: Any not_full = ... # type: Any all_tasks_done = ... # type: Any unfinished_tasks = ... # type: Any def __init__(self, maxsize: int = ...) -> None: ... def task_done(self) -> None: ... def join(self) -> None: ... def qsize(self) -> int: ... def empty(self) -> bool: ... def full(self) -> bool: ... def put(self, item: _T, block: bool = ..., timeout: Optional[float] = ...) -> None: ... def put_nowait(self, item: _T) -> None: ... def get(self, block: bool = ..., timeout: Optional[float] = ...) -> _T: ... def get_nowait(self) -> _T: ... class PriorityQueue(Queue): ... class LifoQueue(Queue): ... mypy-0.560/typeshed/stdlib/2/random.pyi0000644€tŠÔÚ€2›s®0000000620713215007212024144 0ustar jukkaDROPBOX\Domain Users00000000000000# Stubs for random # Ron Murawski # Updated by Jukka Lehtosalo # based on https://docs.python.org/2/library/random.html # ----- random classes ----- import _random from typing import ( Any, TypeVar, Sequence, List, Callable, AbstractSet, Union, overload ) _T = TypeVar('_T') class Random(_random.Random): def __init__(self, x: object = ...) -> None: ... def seed(self, x: object = ...) -> None: ... def getstate(self) -> _random._State: ... def setstate(self, state: _random._State) -> None: ... def jumpahead(self, n: int) -> None: ... def getrandbits(self, k: int) -> int: ... @overload def randrange(self, stop: int) -> int: ... @overload def randrange(self, start: int, stop: int, step: int = ...) -> int: ... def randint(self, a: int, b: int) -> int: ... def choice(self, seq: Sequence[_T]) -> _T: ... def shuffle(self, x: List[Any], random: Callable[[], None] = ...) -> None: ... def sample(self, population: Union[Sequence[_T], AbstractSet[_T]], k: int) -> List[_T]: ... def random(self) -> float: ... def uniform(self, a: float, b: float) -> float: ... def triangular(self, low: float = ..., high: float = ..., mode: float = ...) -> float: ... def betavariate(self, alpha: float, beta: float) -> float: ... def expovariate(self, lambd: float) -> float: ... def gammavariate(self, alpha: float, beta: float) -> float: ... def gauss(self, mu: float, sigma: float) -> float: ... def lognormvariate(self, mu: float, sigma: float) -> float: ... def normalvariate(self, mu: float, sigma: float) -> float: ... def vonmisesvariate(self, mu: float, kappa: float) -> float: ... def paretovariate(self, alpha: float) -> float: ... def weibullvariate(self, alpha: float, beta: float) -> float: ... # SystemRandom is not implemented for all OS's; good on Windows & Linux class SystemRandom(Random): ... # ----- random function stubs ----- def seed(x: object = ...) -> None: ... def getstate() -> object: ... def setstate(state: object) -> None: ... def jumpahead(n: int) -> None: ... def getrandbits(k: int) -> int: ... @overload def randrange(stop: int) -> int: ... @overload def randrange(start: int, stop: int, step: int = ...) -> int: ... def randint(a: int, b: int) -> int: ... def choice(seq: Sequence[_T]) -> _T: ... def shuffle(x: List[Any], random: Callable[[], float] = ...) -> None: ... def sample(population: Union[Sequence[_T], AbstractSet[_T]], k: int) -> List[_T]: ... def random() -> float: ... def uniform(a: float, b: float) -> float: ... def triangular(low: float = ..., high: float = ..., mode: float = ...) -> float: ... def betavariate(alpha: float, beta: float) -> float: ... def expovariate(lambd: float) -> float: ... def gammavariate(alpha: float, beta: float) -> float: ... def gauss(mu: float, sigma: float) -> float: ... def lognormvariate(mu: float, sigma: float) -> float: ... def normalvariate(mu: float, sigma: float) -> float: ... def vonmisesvariate(mu: float, kappa: float) -> float: ... def paretovariate(alpha: float) -> float: ... def weibullvariate(alpha: float, beta: float) -> float: ... mypy-0.560/typeshed/stdlib/2/re.pyi0000644€tŠÔÚ€2›s®0000000723313215007212023272 0ustar jukkaDROPBOX\Domain Users00000000000000# Stubs for re # Ron Murawski # 'bytes' support added by Jukka Lehtosalo # based on: http: //docs.python.org/2.7/library/re.html from typing import ( List, Iterator, overload, Callable, Tuple, Sequence, Dict, Generic, AnyStr, Match, Pattern, Any, Union ) # ----- re variables and constants ----- DEBUG = 0 I = 0 IGNORECASE = 0 L = 0 LOCALE = 0 M = 0 MULTILINE = 0 S = 0 DOTALL = 0 X = 0 VERBOSE = 0 U = 0 UNICODE = 0 T = 0 TEMPLATE = 0 class error(Exception): ... @overload def compile(pattern: AnyStr, flags: int = ...) -> Pattern[AnyStr]: ... @overload def compile(pattern: Pattern[AnyStr], flags: int = ...) -> Pattern[AnyStr]: ... @overload def search(pattern: Union[str, unicode], string: AnyStr, flags: int = ...) -> Match[AnyStr]: ... @overload def search(pattern: Union[Pattern[str], Pattern[unicode]], string: AnyStr, flags: int = ...) -> Match[AnyStr]: ... @overload def match(pattern: Union[str, unicode], string: AnyStr, flags: int = ...) -> Match[AnyStr]: ... @overload def match(pattern: Union[Pattern[str], Pattern[unicode]], string: AnyStr, flags: int = ...) -> Match[AnyStr]: ... @overload def split(pattern: Union[str, unicode], string: AnyStr, maxsplit: int = ..., flags: int = ...) -> List[AnyStr]: ... @overload def split(pattern: Union[Pattern[str], Pattern[unicode]], string: AnyStr, maxsplit: int = ..., flags: int = ...) -> List[AnyStr]: ... @overload def findall(pattern: Union[str, unicode], string: AnyStr, flags: int = ...) -> List[Any]: ... @overload def findall(pattern: Union[Pattern[str], Pattern[unicode]], string: AnyStr, flags: int = ...) -> List[Any]: ... # Return an iterator yielding match objects over all non-overlapping matches # for the RE pattern in string. The string is scanned left-to-right, and # matches are returned in the order found. Empty matches are included in the # result unless they touch the beginning of another match. @overload def finditer(pattern: Union[str, unicode], string: AnyStr, flags: int = ...) -> Iterator[Match[AnyStr]]: ... @overload def finditer(pattern: Union[Pattern[str], Pattern[unicode]], string: AnyStr, flags: int = ...) -> Iterator[Match[AnyStr]]: ... @overload def sub(pattern: Union[str, unicode], repl: AnyStr, string: AnyStr, count: int = ..., flags: int = ...) -> AnyStr: ... @overload def sub(pattern: Union[str, unicode], repl: Callable[[Match[AnyStr]], AnyStr], string: AnyStr, count: int = ..., flags: int = ...) -> AnyStr: ... @overload def sub(pattern: Union[Pattern[str], Pattern[unicode]], repl: AnyStr, string: AnyStr, count: int = ..., flags: int = ...) -> AnyStr: ... @overload def sub(pattern: Union[Pattern[str], Pattern[unicode]], repl: Callable[[Match[AnyStr]], AnyStr], string: AnyStr, count: int = ..., flags: int = ...) -> AnyStr: ... @overload def subn(pattern: Union[str, unicode], repl: AnyStr, string: AnyStr, count: int = ..., flags: int = ...) -> Tuple[AnyStr, int]: ... @overload def subn(pattern: Union[str, unicode], repl: Callable[[Match[AnyStr]], AnyStr], string: AnyStr, count: int = ..., flags: int = ...) -> Tuple[AnyStr, int]: ... @overload def subn(pattern: Union[Pattern[str], Pattern[unicode]], repl: AnyStr, string: AnyStr, count: int = ..., flags: int = ...) -> Tuple[AnyStr, int]: ... @overload def subn(pattern: Union[Pattern[str], Pattern[unicode]], repl: Callable[[Match[AnyStr]], AnyStr], string: AnyStr, count: int = ..., flags: int = ...) -> Tuple[AnyStr, int]: ... def escape(string: AnyStr) -> AnyStr: ... def purge() -> None: ... def template(pattern: Union[AnyStr, Pattern[AnyStr]], flags: int = ...) -> Pattern[AnyStr]: ... mypy-0.560/typeshed/stdlib/2/repr.pyi0000644€tŠÔÚ€2›s®0000000231313215007212023626 0ustar jukkaDROPBOX\Domain Users00000000000000class Repr: maxarray = ... # type: int maxdeque = ... # type: int maxdict = ... # type: int maxfrozenset = ... # type: int maxlevel = ... # type: int maxlist = ... # type: int maxlong = ... # type: int maxother = ... # type: int maxset = ... # type: int maxstring = ... # type: int maxtuple = ... # type: int def __init__(self) -> None: ... def _repr_iterable(self, x, level: complex, left, right, maxiter, trail=...) -> str: ... def repr(self, x) -> str: ... def repr1(self, x, level: complex) -> str: ... def repr_array(self, x, level: complex) -> str: ... def repr_deque(self, x, level: complex) -> str: ... def repr_dict(self, x, level: complex) -> str: ... def repr_frozenset(self, x, level: complex) -> str: ... def repr_instance(self, x, level: complex) -> str: ... def repr_list(self, x, level: complex) -> str: ... def repr_long(self, x, level: complex) -> str: ... def repr_set(self, x, level: complex) -> str: ... def repr_str(self, x, level: complex) -> str: ... def repr_tuple(self, x, level: complex) -> str: ... def _possibly_sorted(x) -> list: ... aRepr = ... # type: Repr def repr(x) -> str: ... mypy-0.560/typeshed/stdlib/2/resource.pyi0000644€tŠÔÚ€2›s®0000000240713215007212024511 0ustar jukkaDROPBOX\Domain Users00000000000000from typing import Tuple, NamedTuple class error(Exception): ... RLIM_INFINITY = ... # type: int def getrlimit(resource: int) -> Tuple[int, int]: ... def setrlimit(resource: int, limits: Tuple[int, int]) -> None: ... RLIMIT_CORE = ... # type: int RLIMIT_CPU = ... # type: int RLIMIT_FSIZE = ... # type: int RLIMIT_DATA = ... # type: int RLIMIT_STACK = ... # type: int RLIMIT_RSS = ... # type: int RLIMIT_NPROC = ... # type: int RLIMIT_NOFILE = ... # type: int RLIMIT_OFILE = ... # type: int RLIMIT_MEMLOCK = ... # type: int RLIMIT_VMEM = ... # type: int RLIMIT_AS = ... # type: int _RUsage = NamedTuple('_RUsage', [('ru_utime', float), ('ru_stime', float), ('ru_maxrss', int), ('ru_ixrss', int), ('ru_idrss', int), ('ru_isrss', int), ('ru_minflt', int), ('ru_majflt', int), ('ru_nswap', int), ('ru_inblock', int), ('ru_oublock', int), ('ru_msgsnd', int), ('ru_msgrcv', int), ('ru_nsignals', int), ('ru_nvcsw', int), ('ru_nivcsw', int)]) def getrusage(who: int) -> _RUsage: ... def getpagesize() -> int: ... RUSAGE_SELF = ... # type: int RUSAGE_CHILDREN = ... # type: int RUSAGE_BOTH = ... # type: int mypy-0.560/typeshed/stdlib/2/rfc822.pyi0000644€tŠÔÚ€2›s®0000000457213215007212023675 0ustar jukkaDROPBOX\Domain Users00000000000000# Stubs for rfc822 (Python 2) # # Based on stub generated by stubgen. from typing import Any class Message: fp = ... # type: Any seekable = ... # type: Any startofheaders = ... # type: Any startofbody = ... # type: Any def __init__(self, fp, seekable=1): ... def rewindbody(self): ... dict = ... # type: Any unixfrom = ... # type: Any headers = ... # type: Any status = ... # type: Any def readheaders(self): ... def isheader(self, line): ... def islast(self, line): ... def iscomment(self, line): ... def getallmatchingheaders(self, name): ... def getfirstmatchingheader(self, name): ... def getrawheader(self, name): ... def getheader(self, name, default=None): ... get = ... # type: Any def getheaders(self, name): ... def getaddr(self, name): ... def getaddrlist(self, name): ... def getdate(self, name): ... def getdate_tz(self, name): ... def __len__(self): ... def __getitem__(self, name): ... def __setitem__(self, name, value): ... def __delitem__(self, name): ... def setdefault(self, name, default=...): ... def has_key(self, name): ... def __contains__(self, name): ... def __iter__(self): ... def keys(self): ... def values(self): ... def items(self): ... class AddrlistClass: specials = ... # type: Any pos = ... # type: Any LWS = ... # type: Any CR = ... # type: Any atomends = ... # type: Any phraseends = ... # type: Any field = ... # type: Any commentlist = ... # type: Any def __init__(self, field): ... def gotonext(self): ... def getaddrlist(self): ... def getaddress(self): ... def getrouteaddr(self): ... def getaddrspec(self): ... def getdomain(self): ... def getdelimited(self, beginchar, endchars, allowcomments=1): ... def getquote(self): ... def getcomment(self): ... def getdomainliteral(self): ... def getatom(self, atomends=None): ... def getphraselist(self): ... class AddressList(AddrlistClass): addresslist = ... # type: Any def __init__(self, field): ... def __len__(self): ... def __add__(self, other): ... def __iadd__(self, other): ... def __sub__(self, other): ... def __isub__(self, other): ... def __getitem__(self, index): ... def parsedate_tz(data): ... def parsedate(data): ... def mktime_tz(data): ... mypy-0.560/typeshed/stdlib/2/robotparser.pyi0000644€tŠÔÚ€2›s®0000000034613215007212025224 0ustar jukkaDROPBOX\Domain Users00000000000000class RobotFileParser: def set_url(self, url: str): ... def read(self): ... def parse(self, lines: str): ... def can_fetch(self, user_agent: str, url: str): ... def mtime(self): ... def modified(self): ... mypy-0.560/typeshed/stdlib/2/runpy.pyi0000644€tŠÔÚ€2›s®0000000076713215007212024046 0ustar jukkaDROPBOX\Domain Users00000000000000from typing import Any class _TempModule: mod_name = ... # type: Any module = ... # type: Any def __init__(self, mod_name): ... def __enter__(self): ... def __exit__(self, *args): ... class _ModifiedArgv0: value = ... # type: Any def __init__(self, value): ... def __enter__(self): ... def __exit__(self, *args): ... def run_module(mod_name, init_globals=None, run_name=None, alter_sys=False): ... def run_path(path_name, init_globals=None, run_name=None): ... mypy-0.560/typeshed/stdlib/2/sets.pyi0000644€tŠÔÚ€2›s®0000000555613215007212023650 0ustar jukkaDROPBOX\Domain Users00000000000000# Stubs for sets (Python 2) from typing import Any, Callable, Hashable, Iterable, Iterator, MutableMapping, Optional, TypeVar, Union _T = TypeVar('_T') _Setlike = Union[BaseSet[_T], Iterable[_T]] _SelfT = TypeVar('_SelfT', bound=BaseSet) class BaseSet(Iterable[_T]): def __init__(self) -> None: ... def __len__(self) -> int: ... def __repr__(self) -> str: ... def __str__(self) -> str: ... def __iter__(self) -> Iterator[_T]: ... def __cmp__(self, other: Any) -> int: ... def __eq__(self, other: Any) -> bool: ... def __ne__(self, other: Any) -> bool: ... def copy(self: _SelfT) -> _SelfT: ... def __copy__(self: _SelfT) -> _SelfT: ... def __deepcopy__(self: _SelfT, memo: MutableMapping[int, BaseSet[_T]]) -> _SelfT: ... def __or__(self: _SelfT, other: BaseSet[_T]) -> _SelfT: ... def union(self: _SelfT, other: _Setlike) -> _SelfT: ... def __and__(self: _SelfT, other: BaseSet[_T]) -> _SelfT: ... def intersection(self: _SelfT, other: _Setlike) -> _SelfT: ... def __xor__(self: _SelfT, other: BaseSet[_T]) -> _SelfT: ... def symmetric_difference(self: _SelfT, other: _Setlike) -> _SelfT: ... def __sub__(self: _SelfT, other: BaseSet[_T]) -> _SelfT: ... def difference(self: _SelfT, other: _Setlike) -> _SelfT: ... def __contains__(self, element: Any) -> bool: ... def issubset(self, other: BaseSet[_T]) -> bool: ... def issuperset(self, other: BaseSet[_T]) -> bool: ... def __le__(self, other: BaseSet[_T]) -> bool: ... def __ge__(self, other: BaseSet[_T]) -> bool: ... def __lt__(self, other: BaseSet[_T]) -> bool: ... def __gt__(self, other: BaseSet[_T]) -> bool: ... class ImmutableSet(BaseSet[_T], Hashable): def __init__(self, iterable: Optional[_Setlike] = ...) -> None: ... def __hash__(self) -> int: ... class Set(BaseSet[_T]): def __init__(self, iterable: Optional[_Setlike] = ...) -> None: ... def __ior__(self, other: BaseSet[_T]) -> Set: ... def union_update(self, other: _Setlike) -> None: ... def __iand__(self, other: BaseSet[_T]) -> Set: ... def intersection_update(self, other: _Setlike) -> None: ... def __ixor__(self, other: BaseSet[_T]) -> Set: ... def symmetric_difference_update(self, other: _Setlike) -> None: ... def __isub__(self, other: BaseSet[_T]) -> Set: ... def difference_update(self, other: _Setlike) -> None: ... def update(self, iterable: _Setlike) -> None: ... def clear(self) -> None: ... def add(self, element: _T) -> None: ... def remove(self, element: _T) -> None: ... def discard(self, element: _T) -> None: ... def pop(self) -> _T: ... def __as_immutable__(self) -> ImmutableSet[_T]: ... def __as_temporarily_immutable__(self) -> _TemporarilyImmutableSet[_T]: ... class _TemporarilyImmutableSet(BaseSet[_T]): def __init__(self, set: BaseSet[_T]) -> None: ... def __hash__(self) -> int: ... mypy-0.560/typeshed/stdlib/2/sha.pyi0000644€tŠÔÚ€2›s®0000000042313215007212023431 0ustar jukkaDROPBOX\Domain Users00000000000000# Stubs for Python 2.7 sha stdlib module class sha(object): def update(self, arg: str) -> None: ... def digest(self) -> str: ... def hexdigest(self) -> str: ... def copy(self) -> sha: ... def new(string: str = ...) -> sha: ... blocksize = 0 digest_size = 0 mypy-0.560/typeshed/stdlib/2/shelve.pyi0000644€tŠÔÚ€2›s®0000000304713215007212024151 0ustar jukkaDROPBOX\Domain Users00000000000000from typing import Any, Dict, Iterator, List, Optional, Tuple import collections class Shelf(collections.MutableMapping): def __init__(self, dict: Dict[Any, Any], protocol: Optional[int] = ..., writeback: bool = ..., keyencoding: str = ...) -> None: ... def __iter__(self) -> Iterator[str]: ... def keys(self) -> List[Any]: ... def __len__(self) -> int: ... def has_key(self, key: Any) -> bool: ... def __contains__(self, key: Any) -> bool: ... def get(self, key: Any, default: Any = ...) -> Any: ... def __getitem__(self, key: Any) -> Any: ... def __setitem__(self, key: Any, value: Any) -> None: ... def __delitem__(self, key: Any) -> None: ... def __enter__(self) -> Shelf: ... def __exit__(self, type: Any, value: Any, traceback: Any) -> None: ... def close(self) -> None: ... def __del__(self) -> None: ... def sync(self) -> None: ... class BsdDbShelf(Shelf): def __init__(self, dict: Dict[Any, Any], protocol: Optional[int] = ..., writeback: bool = ..., keyencoding: str = ...) -> None: ... def set_location(self, key: Any) -> Tuple[str, Any]: ... def next(self) -> Tuple[str, Any]: ... def previous(self) -> Tuple[str, Any]: ... def first(self) -> Tuple[str, Any]: ... def last(self) -> Tuple[str, Any]: ... class DbfilenameShelf(Shelf): def __init__(self, filename: str, flag: str = ..., protocol: Optional[int] = ..., writeback: bool = ...) -> None: ... def open(filename: str, flag: str = ..., protocol: Optional[int] = ..., writeback: bool = ...) -> DbfilenameShelf: ... mypy-0.560/typeshed/stdlib/2/shlex.pyi0000644€tŠÔÚ€2›s®0000000204713215007212024005 0ustar jukkaDROPBOX\Domain Users00000000000000from typing import Optional, List, Any, IO def split(s: Optional[str], comments: bool = ..., posix: bool = ...) -> List[str]: ... class shlex: def __init__(self, instream: IO[Any] = ..., infile: IO[Any] = ..., posix: bool = ...) -> None: ... def get_token(self) -> Optional[str]: ... def push_token(self, _str: str) -> None: ... def read_token(self) -> str: ... def sourcehook(self, filename: str) -> None: ... def push_source(self, stream: IO[Any], filename: str = ...) -> None: ... def pop_source(self) -> IO[Any]: ... def error_leader(self, file: str = ..., line: int = ...) -> str: ... commenters = ... # type: str wordchars = ... # type: str whitespace = ... # type: str escape = ... # type: str quotes = ... # type: str escapedquotes = ... # type: str whitespace_split = ... # type: bool infile = ... # type: IO[Any] source = ... # type: Optional[str] debug = ... # type: int lineno = ... # type: int token = ... # type: Any eof = ... # type: Optional[str] mypy-0.560/typeshed/stdlib/2/shutil.pyi0000644€tŠÔÚ€2›s®0000000302313215007212024165 0ustar jukkaDROPBOX\Domain Users00000000000000from typing import List, Iterable, Callable, IO, AnyStr, Any, Optional, Tuple, Sequence class Error(EnvironmentError): ... class SpecialFileError(EnvironmentError): ... class ExecError(EnvironmentError): ... def copyfileobj(fsrc: IO[AnyStr], fdst: IO[AnyStr], length: int = ...) -> None: ... def copyfile(src: unicode, dst: unicode) -> None: ... def copymode(src: unicode, dst: unicode) -> None: ... def copystat(src: unicode, dst: unicode) -> None: ... def copy(src: unicode, dst: unicode) -> None: ... def copy2(src: unicode, dst: unicode) -> None: ... def ignore_patterns(*patterns: AnyStr) -> Callable[[AnyStr, List[AnyStr]], Iterable[AnyStr]]: ... def copytree(src: AnyStr, dst: AnyStr, symlinks: bool = ..., ignore: Optional[Callable[[AnyStr, List[AnyStr]], Iterable[AnyStr]]] = ...) -> None: ... def rmtree(path: AnyStr, ignore_errors: bool = ..., onerror: Callable[[Any, AnyStr, Any], None] = ...) -> None: ... def move(src: unicode, dst: unicode) -> None: ... def get_archive_formats() -> List[Tuple[str, str]]: ... def register_archive_format(name: str, function: Callable[..., Any], extra_args: Sequence[Tuple[str, Any]] = ..., description: str = ...) -> None: ... def unregister_archive_format(name: str) -> None: ... def make_archive(base_name: AnyStr, format: str, root_dir: unicode = ..., base_dir: unicode = ..., verbose: int = ..., dry_run: int = ..., owner: str = ..., group: str = ..., logger: Any = ...) -> AnyStr: ... mypy-0.560/typeshed/stdlib/2/signal.pyi0000644€tŠÔÚ€2›s®0000000345213215007212024140 0ustar jukkaDROPBOX\Domain Users00000000000000from typing import Callable, Any, Tuple, Union from types import FrameType SIG_DFL = ... # type: int SIG_IGN = ... # type: int ITIMER_REAL = ... # type: int ITIMER_VIRTUAL = ... # type: int ITIMER_PROF = ... # type: int SIGABRT = ... # type: int SIGALRM = ... # type: int SIGBUS = ... # type: int SIGCHLD = ... # type: int SIGCLD = ... # type: int SIGCONT = ... # type: int SIGFPE = ... # type: int SIGHUP = ... # type: int SIGILL = ... # type: int SIGINT = ... # type: int SIGIO = ... # type: int SIGIOT = ... # type: int SIGKILL = ... # type: int SIGPIPE = ... # type: int SIGPOLL = ... # type: int SIGPROF = ... # type: int SIGPWR = ... # type: int SIGQUIT = ... # type: int SIGRTMAX = ... # type: int SIGRTMIN = ... # type: int SIGSEGV = ... # type: int SIGSTOP = ... # type: int SIGSYS = ... # type: int SIGTERM = ... # type: int SIGTRAP = ... # type: int SIGTSTP = ... # type: int SIGTTIN = ... # type: int SIGTTOU = ... # type: int SIGURG = ... # type: int SIGUSR1 = ... # type: int SIGUSR2 = ... # type: int SIGVTALRM = ... # type: int SIGWINCH = ... # type: int SIGXCPU = ... # type: int SIGXFSZ = ... # type: int NSIG = ... # type: int class ItimerError(IOError): ... _HANDLER = Union[Callable[[int, FrameType], None], int, None] def alarm(time: int) -> int: ... def getsignal(signalnum: int) -> _HANDLER: ... def pause() -> None: ... def setitimer(which: int, seconds: float, interval: float = ...) -> Tuple[float, float]: ... def getitimer(which: int) -> Tuple[float, float]: ... def set_wakeup_fd(fd: int) -> int: ... def siginterrupt(signalnum: int, flag: bool) -> None: raise RuntimeError() def signal(signalnum: int, handler: _HANDLER) -> _HANDLER: raise RuntimeError() def default_int_handler(signum: int, frame: FrameType) -> None: raise KeyboardInterrupt() mypy-0.560/typeshed/stdlib/2/SimpleHTTPServer.pyi0000644€tŠÔÚ€2›s®0000000131013215007212025772 0ustar jukkaDROPBOX\Domain Users00000000000000# Stubs for SimpleHTTPServer (Python 2) from typing import Any, AnyStr, IO, Mapping, Optional, Union import BaseHTTPServer from StringIO import StringIO class SimpleHTTPRequestHandler(BaseHTTPServer.BaseHTTPRequestHandler): server_version = ... # type: str def do_GET(self) -> None: ... def do_HEAD(self) -> None: ... def send_head(self) -> Optional[IO[str]]: ... def list_directory(self, path: Union[str, unicode]) -> Optional[StringIO]: ... def translate_path(self, path: AnyStr) -> AnyStr: ... def copyfile(self, source: IO[AnyStr], outputfile: IO[AnyStr]): ... def guess_type(self, path: Union[str, unicode]) -> str: ... extensions_map = ... # type: Mapping[str, str] mypy-0.560/typeshed/stdlib/2/smtplib.pyi0000644€tŠÔÚ€2›s®0000000555013215007212024336 0ustar jukkaDROPBOX\Domain Users00000000000000from typing import Any class SMTPException(Exception): ... class SMTPServerDisconnected(SMTPException): ... class SMTPResponseException(SMTPException): smtp_code = ... # type: Any smtp_error = ... # type: Any args = ... # type: Any def __init__(self, code, msg) -> None: ... class SMTPSenderRefused(SMTPResponseException): smtp_code = ... # type: Any smtp_error = ... # type: Any sender = ... # type: Any args = ... # type: Any def __init__(self, code, msg, sender) -> None: ... class SMTPRecipientsRefused(SMTPException): recipients = ... # type: Any args = ... # type: Any def __init__(self, recipients) -> None: ... class SMTPDataError(SMTPResponseException): ... class SMTPConnectError(SMTPResponseException): ... class SMTPHeloError(SMTPResponseException): ... class SMTPAuthenticationError(SMTPResponseException): ... def quoteaddr(addr): ... def quotedata(data): ... class SSLFakeFile: sslobj = ... # type: Any def __init__(self, sslobj) -> None: ... def readline(self, size=...): ... def close(self): ... class SMTP: debuglevel = ... # type: Any file = ... # type: Any helo_resp = ... # type: Any ehlo_msg = ... # type: Any ehlo_resp = ... # type: Any does_esmtp = ... # type: Any default_port = ... # type: Any timeout = ... # type: Any esmtp_features = ... # type: Any local_hostname = ... # type: Any def __init__(self, host: str = ..., port: int = ..., local_hostname=..., timeout=...) -> None: ... def set_debuglevel(self, debuglevel): ... sock = ... # type: Any def connect(self, host=..., port=...): ... def send(self, str): ... def putcmd(self, cmd, args=...): ... def getreply(self): ... def docmd(self, cmd, args=...): ... def helo(self, name=...): ... def ehlo(self, name=...): ... def has_extn(self, opt): ... def help(self, args=...): ... def rset(self): ... def noop(self): ... def mail(self, sender, options=...): ... def rcpt(self, recip, options=...): ... def data(self, msg): ... def verify(self, address): ... vrfy = ... # type: Any def expn(self, address): ... def ehlo_or_helo_if_needed(self): ... def login(self, user, password): ... def starttls(self, keyfile=..., certfile=...): ... def sendmail(self, from_addr, to_addrs, msg, mail_options=..., rcpt_options=...): ... def close(self): ... def quit(self): ... class SMTP_SSL(SMTP): default_port = ... # type: Any keyfile = ... # type: Any certfile = ... # type: Any def __init__(self, host=..., port=..., local_hostname=..., keyfile=..., certfile=..., timeout=...) -> None: ... class LMTP(SMTP): ehlo_msg = ... # type: Any def __init__(self, host=..., port=..., local_hostname=...) -> None: ... sock = ... # type: Any def connect(self, host=..., port=...): ... mypy-0.560/typeshed/stdlib/2/SocketServer.pyi0000644€tŠÔÚ€2›s®0000000744613215007212025311 0ustar jukkaDROPBOX\Domain Users00000000000000# Stubs for socketserver from typing import Any, BinaryIO, Optional, Tuple from socket import SocketType import sys import types class BaseServer: address_family = ... # type: int RequestHandlerClass = ... # type: type server_address = ... # type: Tuple[str, int] socket = ... # type: SocketType allow_reuse_address = ... # type: bool request_queue_size = ... # type: int socket_type = ... # type: int timeout = ... # type: Optional[float] def __init__(self, server_address: Tuple[str, int], RequestHandlerClass: type) -> None: ... def fileno(self) -> int: ... def handle_request(self) -> None: ... def serve_forever(self, poll_interval: float = ...) -> None: ... def shutdown(self) -> None: ... def server_close(self) -> None: ... def finish_request(self, request: bytes, client_address: Tuple[str, int]) -> None: ... def get_request(self) -> None: ... def handle_error(self, request: bytes, client_address: Tuple[str, int]) -> None: ... def handle_timeout(self) -> None: ... def process_request(self, request: bytes, client_address: Tuple[str, int]) -> None: ... def server_activate(self) -> None: ... def server_bind(self) -> None: ... def verify_request(self, request: bytes, client_address: Tuple[str, int]) -> bool: ... if sys.version_info >= (3, 6): def __enter__(self) -> 'BaseServer': ... def __exit__(self, exc_type: Optional[type], exc_val: Optional[Exception], exc_tb: Optional[types.TracebackType]) -> bool: ... if sys.version_info >= (3, 3): def service_actions(self) -> None: ... class TCPServer(BaseServer): def __init__(self, server_address: Tuple[str, int], RequestHandlerClass: type, bind_and_activate: bool = ...) -> None: ... class UDPServer(BaseServer): def __init__(self, server_address: Tuple[str, int], RequestHandlerClass: type, bind_and_activate: bool = ...) -> None: ... if sys.platform != 'win32': class UnixStreamServer(BaseServer): def __init__(self, server_address: Tuple[str, int], RequestHandlerClass: type, bind_and_activate: bool = ...) -> None: ... class UnixDatagramServer(BaseServer): def __init__(self, server_address: Tuple[str, int], RequestHandlerClass: type, bind_and_activate: bool = ...) -> None: ... class ForkingMixIn: ... class ThreadingMixIn: ... class ForkingTCPServer(ForkingMixIn, TCPServer): ... class ForkingUDPServer(ForkingMixIn, UDPServer): ... class ThreadingTCPServer(ThreadingMixIn, TCPServer): ... class ThreadingUDPServer(ThreadingMixIn, UDPServer): ... if sys.platform != 'win32': class ThreadingUnixStreamServer(ThreadingMixIn, UnixStreamServer): ... class ThreadingUnixDatagramServer(ThreadingMixIn, UnixDatagramServer): ... class BaseRequestHandler: # Those are technically of types, respectively: # * Union[SocketType, Tuple[bytes, SocketType]] # * Union[Tuple[str, int], str] # But there are some concerns that having unions here would cause # too much inconvenience to people using it (see # https://github.com/python/typeshed/pull/384#issuecomment-234649696) request = ... # type: Any client_address = ... # type: Any server = ... # type: BaseServer def setup(self) -> None: ... def handle(self) -> None: ... def finish(self) -> None: ... class StreamRequestHandler(BaseRequestHandler): rfile = ... # type: BinaryIO wfile = ... # type: BinaryIO class DatagramRequestHandler(BaseRequestHandler): rfile = ... # type: BinaryIO wfile = ... # type: BinaryIO mypy-0.560/typeshed/stdlib/2/spwd.pyi0000644€tŠÔÚ€2›s®0000000122013215007212023627 0ustar jukkaDROPBOX\Domain Users00000000000000from typing import List, NamedTuple struct_spwd = NamedTuple("struct_spwd", [("sp_nam", str), ("sp_pwd", str), ("sp_lstchg", int), ("sp_min", int), ("sp_max", int), ("sp_warn", int), ("sp_inact", int), ("sp_expire", int), ("sp_flag", int)]) def getspall() -> List[struct_spwd]: ... def getspnam(name: str) -> struct_spwd: ... mypy-0.560/typeshed/stdlib/2/sqlite3/0000755€tŠÔÚ€2›s®0000000000013215007244023525 5ustar jukkaDROPBOX\Domain Users00000000000000mypy-0.560/typeshed/stdlib/2/sqlite3/__init__.pyi0000644€tŠÔÚ€2›s®0000000005313215007212026000 0ustar jukkaDROPBOX\Domain Users00000000000000from sqlite3.dbapi2 import * # noqa: F403 mypy-0.560/typeshed/stdlib/2/sqlite3/dbapi2.pyi0000644€tŠÔÚ€2›s®0000002320713215007212025410 0ustar jukkaDROPBOX\Domain Users00000000000000# Filip Hron # based heavily on Andrey Vlasovskikh's python-skeletons https://github.com/JetBrains/python-skeletons/blob/master/sqlite3.py from typing import Any, Union, List, Iterator, Optional from datetime import time, datetime from collections import Iterable paramstyle = ... # type: str threadsafety = ... # type: int apilevel = ... # type: str Date = ... # type: datetime Time = ... # type: time Timestamp = ... # type: datetime def DateFromTicks(ticks): ... def TimeFromTicks(ticks): ... def TimestampFromTicks(ticks): ... version_info = ... # type: Any sqlite_version_info = ... # type: Any Binary = ... # type: Any def register_adapters_and_converters(): ... # The remaining definitions are imported from _sqlite3. PARSE_COLNAMES = ... # type: int PARSE_DECLTYPES = ... # type: int SQLITE_ALTER_TABLE = ... # type: int SQLITE_ANALYZE = ... # type: int SQLITE_ATTACH = ... # type: int SQLITE_CREATE_INDEX = ... # type: int SQLITE_CREATE_TABLE = ... # type: int SQLITE_CREATE_TEMP_INDEX = ... # type: int SQLITE_CREATE_TEMP_TABLE = ... # type: int SQLITE_CREATE_TEMP_TRIGGER = ... # type: int SQLITE_CREATE_TEMP_VIEW = ... # type: int SQLITE_CREATE_TRIGGER = ... # type: int SQLITE_CREATE_VIEW = ... # type: int SQLITE_DELETE = ... # type: int SQLITE_DENY = ... # type: int SQLITE_DETACH = ... # type: int SQLITE_DROP_INDEX = ... # type: int SQLITE_DROP_TABLE = ... # type: int SQLITE_DROP_TEMP_INDEX = ... # type: int SQLITE_DROP_TEMP_TABLE = ... # type: int SQLITE_DROP_TEMP_TRIGGER = ... # type: int SQLITE_DROP_TEMP_VIEW = ... # type: int SQLITE_DROP_TRIGGER = ... # type: int SQLITE_DROP_VIEW = ... # type: int SQLITE_IGNORE = ... # type: int SQLITE_INSERT = ... # type: int SQLITE_OK = ... # type: int SQLITE_PRAGMA = ... # type: int SQLITE_READ = ... # type: int SQLITE_REINDEX = ... # type: int SQLITE_SELECT = ... # type: int SQLITE_TRANSACTION = ... # type: int SQLITE_UPDATE = ... # type: int adapters = ... # type: Any converters = ... # type: Any sqlite_version = ... # type: str version = ... # type: str # TODO: adapt needs to get probed def adapt(obj, protocol, alternate): ... def complete_statement(sql: str) -> bool: ... def connect(database: Union[bytes, unicode], timeout: float = ..., detect_types: int = ..., isolation_level: Union[str, None] = ..., check_same_thread: bool = ..., factory: Union[Connection, None] = ..., cached_statements: int = ...) -> Connection: ... def enable_callback_tracebacks(flag: bool) -> None: ... def enable_shared_cache(do_enable: int) -> None: ... def register_adapter(type: type, callable: Any) -> None: ... # TODO: sqlite3.register_converter.__doc__ specifies callable as unknown def register_converter(typename: str, callable: bytes) -> None: ... class Cache(object): def __init__(self, *args, **kwargs) -> None: ... def display(self, *args, **kwargs) -> None: ... def get(self, *args, **kwargs) -> None: ... class Connection(object): DataError = ... # type: Any DatabaseError = ... # type: Any Error = ... # type: Any IntegrityError = ... # type: Any InterfaceError = ... # type: Any InternalError = ... # type: Any NotSupportedError = ... # type: Any OperationalError = ... # type: Any ProgrammingError = ... # type: Any Warning = ... # type: Any in_transaction = ... # type: Any isolation_level = ... # type: Any row_factory = ... # type: Any text_factory = ... # type: Any total_changes = ... # type: Any def __init__(self, *args, **kwargs): ... def close(self) -> None: ... def commit(self) -> None: ... def create_aggregate(self, name: str, num_params: int, aggregate_class: type) -> None: ... def create_collation(self, name: str, callable: Any) -> None: ... def create_function(self, name: str, num_params: int, func: Any) -> None: ... def cursor(self, cursorClass=...) -> Cursor: ... def execute(self, sql: str, parameters: Iterable = ...) -> Cursor: ... # TODO: please check in executemany() if seq_of_parameters type is possible like this def executemany(self, sql: str, seq_of_parameters: Iterable[Iterable]) -> Cursor: ... def executescript(self, sql_script: Union[bytes, unicode]) -> Cursor: ... def interrupt(self, *args, **kwargs) -> None: ... def iterdump(self, *args, **kwargs) -> None: ... def rollback(self, *args, **kwargs) -> None: ... # TODO: set_authorizer(authorzer_callback) # see https://docs.python.org/2/library/sqlite3.html#sqlite3.Connection.set_authorizer # returns [SQLITE_OK, SQLITE_DENY, SQLITE_IGNORE] so perhaps int def set_authorizer(self, *args, **kwargs) -> None: ... # set_progress_handler(handler, n) -> see https://docs.python.org/2/library/sqlite3.html#sqlite3.Connection.set_progress_handler def set_progress_handler(self, *args, **kwargs) -> None: ... def set_trace_callback(self, *args, **kwargs): ... def __call__(self, *args, **kwargs): ... def __enter__(self, *args, **kwargs): ... def __exit__(self, *args, **kwargs): ... class Cursor(Iterator[Any]): arraysize = ... # type: Any connection = ... # type: Any description = ... # type: Any lastrowid = ... # type: Any row_factory = ... # type: Any rowcount = ... # type: Any # TODO: Cursor class accepts exactly 1 argument # required type is sqlite3.Connection (which is imported as _Connection) # however, the name of the __init__ variable is unknown def __init__(self, *args, **kwargs): ... def close(self, *args, **kwargs): ... def execute(self, sql: str, parameters: Iterable = ...) -> Cursor: ... def executemany(self, sql: str, seq_of_parameters: Iterable[Iterable]): ... def executescript(self, sql_script: Union[bytes, unicode]) -> Cursor: ... def fetchall(self) -> List[Any]: ... def fetchmany(self, size: Optional[int] = ...) -> List[Any]: ... def fetchone(self) -> Any: ... def setinputsizes(self, *args, **kwargs): ... def setoutputsize(self, *args, **kwargs): ... def __iter__(self) -> Cursor: ... def __next__(self) -> Any: ... class DataError(DatabaseError): ... class DatabaseError(Error): ... class Error(Exception): ... class IntegrityError(DatabaseError): ... class InterfaceError(Error): ... class InternalError(DatabaseError): ... class NotSupportedError(DatabaseError): ... class OperationalError(DatabaseError): ... class OptimizedUnicode(object): maketrans = ... # type: Any def __init__(self, *args, **kwargs): ... def capitalize(self, *args, **kwargs): ... def casefold(self, *args, **kwargs): ... def center(self, *args, **kwargs): ... def count(self, *args, **kwargs): ... def encode(self, *args, **kwargs): ... def endswith(self, *args, **kwargs): ... def expandtabs(self, *args, **kwargs): ... def find(self, *args, **kwargs): ... def format(self, *args, **kwargs): ... def format_map(self, *args, **kwargs): ... def index(self, *args, **kwargs): ... def isalnum(self, *args, **kwargs): ... def isalpha(self, *args, **kwargs): ... def isdecimal(self, *args, **kwargs): ... def isdigit(self, *args, **kwargs): ... def isidentifier(self, *args, **kwargs): ... def islower(self, *args, **kwargs): ... def isnumeric(self, *args, **kwargs): ... def isprintable(self, *args, **kwargs): ... def isspace(self, *args, **kwargs): ... def istitle(self, *args, **kwargs): ... def isupper(self, *args, **kwargs): ... def join(self, *args, **kwargs): ... def ljust(self, *args, **kwargs): ... def lower(self, *args, **kwargs): ... def lstrip(self, *args, **kwargs): ... def partition(self, *args, **kwargs): ... def replace(self, *args, **kwargs): ... def rfind(self, *args, **kwargs): ... def rindex(self, *args, **kwargs): ... def rjust(self, *args, **kwargs): ... def rpartition(self, *args, **kwargs): ... def rsplit(self, *args, **kwargs): ... def rstrip(self, *args, **kwargs): ... def split(self, *args, **kwargs): ... def splitlines(self, *args, **kwargs): ... def startswith(self, *args, **kwargs): ... def strip(self, *args, **kwargs): ... def swapcase(self, *args, **kwargs): ... def title(self, *args, **kwargs): ... def translate(self, *args, **kwargs): ... def upper(self, *args, **kwargs): ... def zfill(self, *args, **kwargs): ... def __add__(self, other): ... def __contains__(self, *args, **kwargs): ... def __eq__(self, other): ... def __format__(self, *args, **kwargs): ... def __ge__(self, other): ... def __getitem__(self, index): ... def __getnewargs__(self, *args, **kwargs): ... def __gt__(self, other): ... def __hash__(self): ... def __iter__(self): ... def __le__(self, other): ... def __len__(self, *args, **kwargs): ... def __lt__(self, other): ... def __mod__(self, other): ... def __mul__(self, other): ... def __ne__(self, other): ... def __rmod__(self, other): ... def __rmul__(self, other): ... class PrepareProtocol(object): def __init__(self, *args, **kwargs): ... class ProgrammingError(DatabaseError): ... class Row(object): def __init__(self, *args, **kwargs): ... def keys(self, *args, **kwargs): ... def __eq__(self, other): ... def __ge__(self, other): ... def __getitem__(self, index): ... def __gt__(self, other): ... def __hash__(self): ... def __iter__(self): ... def __le__(self, other): ... def __len__(self, *args, **kwargs): ... def __lt__(self, other): ... def __ne__(self, other): ... class Statement(object): def __init__(self, *args, **kwargs): ... class Warning(Exception): ... mypy-0.560/typeshed/stdlib/2/sre_constants.pyi0000644€tŠÔÚ€2›s®0000000564013215007212025551 0ustar jukkaDROPBOX\Domain Users00000000000000# Source: https://hg.python.org/cpython/file/2.7/Lib/sre_constants.py from typing import Dict, List, TypeVar MAGIC = ... # type: int MAXREPEAT = ... # type: int class error(Exception): ... FAILURE = ... # type: str SUCCESS = ... # type: str ANY = ... # type: str ANY_ALL = ... # type: str ASSERT = ... # type: str ASSERT_NOT = ... # type: str AT = ... # type: str BIGCHARSET = ... # type: str BRANCH = ... # type: str CALL = ... # type: str CATEGORY = ... # type: str CHARSET = ... # type: str GROUPREF = ... # type: str GROUPREF_IGNORE = ... # type: str GROUPREF_EXISTS = ... # type: str IN = ... # type: str IN_IGNORE = ... # type: str INFO = ... # type: str JUMP = ... # type: str LITERAL = ... # type: str LITERAL_IGNORE = ... # type: str MARK = ... # type: str MAX_REPEAT = ... # type: str MAX_UNTIL = ... # type: str MIN_REPEAT = ... # type: str MIN_UNTIL = ... # type: str NEGATE = ... # type: str NOT_LITERAL = ... # type: str NOT_LITERAL_IGNORE = ... # type: str RANGE = ... # type: str REPEAT = ... # type: str REPEAT_ONE = ... # type: str SUBPATTERN = ... # type: str MIN_REPEAT_ONE = ... # type: str AT_BEGINNING = ... # type: str AT_BEGINNING_LINE = ... # type: str AT_BEGINNING_STRING = ... # type: str AT_BOUNDARY = ... # type: str AT_NON_BOUNDARY = ... # type: str AT_END = ... # type: str AT_END_LINE = ... # type: str AT_END_STRING = ... # type: str AT_LOC_BOUNDARY = ... # type: str AT_LOC_NON_BOUNDARY = ... # type: str AT_UNI_BOUNDARY = ... # type: str AT_UNI_NON_BOUNDARY = ... # type: str CATEGORY_DIGIT = ... # type: str CATEGORY_NOT_DIGIT = ... # type: str CATEGORY_SPACE = ... # type: str CATEGORY_NOT_SPACE = ... # type: str CATEGORY_WORD = ... # type: str CATEGORY_NOT_WORD = ... # type: str CATEGORY_LINEBREAK = ... # type: str CATEGORY_NOT_LINEBREAK = ... # type: str CATEGORY_LOC_WORD = ... # type: str CATEGORY_LOC_NOT_WORD = ... # type: str CATEGORY_UNI_DIGIT = ... # type: str CATEGORY_UNI_NOT_DIGIT = ... # type: str CATEGORY_UNI_SPACE = ... # type: str CATEGORY_UNI_NOT_SPACE = ... # type: str CATEGORY_UNI_WORD = ... # type: str CATEGORY_UNI_NOT_WORD = ... # type: str CATEGORY_UNI_LINEBREAK = ... # type: str CATEGORY_UNI_NOT_LINEBREAK = ... # type: str _T = TypeVar('_T') def makedict(list: List[_T]) -> Dict[_T, int]: ... OP_IGNORE = ... # type: Dict[str, str] AT_MULTILINE = ... # type: Dict[str, str] AT_LOCALE = ... # type: Dict[str, str] AT_UNICODE = ... # type: Dict[str, str] CH_LOCALE = ... # type: Dict[str, str] CH_UNICODE = ... # type: Dict[str, str] SRE_FLAG_TEMPLATE = ... # type: int SRE_FLAG_IGNORECASE = ... # type: int SRE_FLAG_LOCALE = ... # type: int SRE_FLAG_MULTILINE = ... # type: int SRE_FLAG_DOTALL = ... # type: int SRE_FLAG_UNICODE = ... # type: int SRE_FLAG_VERBOSE = ... # type: int SRE_FLAG_DEBUG = ... # type: int SRE_INFO_PREFIX = ... # type: int SRE_INFO_LITERAL = ... # type: int SRE_INFO_CHARSET = ... # type: int mypy-0.560/typeshed/stdlib/2/sre_parse.pyi0000644€tŠÔÚ€2›s®0000000507113215007212024645 0ustar jukkaDROPBOX\Domain Users00000000000000# Source: https://hg.python.org/cpython/file/2.7/Lib/sre_parse.py from typing import Any, Dict, Iterable, List, Match, Optional, Pattern as _Pattern, Set, Tuple, Union SPECIAL_CHARS = ... # type: str REPEAT_CHARS = ... # type: str DIGITS = ... # type: Set OCTDIGITS = ... # type: Set HEXDIGITS = ... # type: Set WHITESPACE = ... # type: Set ESCAPES = ... # type: Dict[str, Tuple[str, int]] CATEGORIES = ... # type: Dict[str, Union[Tuple[str, str], Tuple[str, List[Tuple[str, str]]]]] FLAGS = ... # type: Dict[str, int] class Pattern: flags = ... # type: int open = ... # type: List[int] groups = ... # type: int groupdict = ... # type: Dict[str, int] lookbehind = ... # type: int def __init__(self) -> None: ... def opengroup(self, name: str = ...) -> int: ... def closegroup(self, gid: int) -> None: ... def checkgroup(self, gid: int) -> bool: ... _OpSubpatternType = Tuple[Optional[int], int, int, SubPattern] _OpGroupRefExistsType = Tuple[int, SubPattern, SubPattern] _OpInType = List[Tuple[str, int]] _OpBranchType = Tuple[None, List[SubPattern]] _AvType = Union[_OpInType, _OpBranchType, Iterable[SubPattern], _OpGroupRefExistsType, _OpSubpatternType] _CodeType = Union[str, _AvType] class SubPattern: pattern = ... # type: str data = ... # type: List[_CodeType] width = ... # type: Optional[int] def __init__(self, pattern, data: List[_CodeType] = ...) -> None: ... def dump(self, level: int = ...) -> None: ... def __len__(self) -> int: ... def __delitem__(self, index: Union[int, slice]) -> None: ... def __getitem__(self, index: Union[int, slice]) -> Union[SubPattern, _CodeType]: ... def __setitem__(self, index: Union[int, slice], code: _CodeType): ... def insert(self, index, code: _CodeType) -> None: ... def append(self, code: _CodeType) -> None: ... def getwidth(self) -> int: ... class Tokenizer: string = ... # type: str index = ... # type: int def __init__(self, string: str) -> None: ... def match(self, char: str, skip: int = ...) -> int: ... def get(self) -> Optional[str]: ... def tell(self) -> Tuple[int, Optional[str]]: ... def seek(self, index: int) -> None: ... def isident(char: str) -> bool: ... def isdigit(char: str) -> bool: ... def isname(name: str) -> bool: ... def parse(str: str, flags: int = ..., pattern: Pattern = ...) -> SubPattern: ... _Template = Tuple[List[Tuple[int, int]], List[Optional[int]]] def parse_template(source: str, pattern: _Pattern) -> _Template: ... def expand_template(template: _Template, match: Match) -> str: ... mypy-0.560/typeshed/stdlib/2/ssl.pyi0000644€tŠÔÚ€2›s®0000002057213215007212023466 0ustar jukkaDROPBOX\Domain Users00000000000000# Stubs for ssl from typing import ( Any, Dict, Callable, List, NamedTuple, Optional, Set, Tuple, Union, ) import socket import sys _PCTRTT = Tuple[Tuple[str, str], ...] _PCTRTTT = Tuple[_PCTRTT, ...] _PeerCertRetDictType = Dict[str, Union[str, _PCTRTTT, _PCTRTT]] _PeerCertRetType = Union[_PeerCertRetDictType, bytes, None] _EnumRetType = List[Tuple[bytes, str, Union[Set[str], bool]]] _PasswordType = Union[Callable[[], Union[str, bytes]], str, bytes] _SrvnmeCbType = Callable[['SSLSocket', Optional[str], 'SSLSocket'], Optional[int]] class SSLError(OSError): library = ... # type: str reason = ... # type: str class SSLZeroReturnError(SSLError): ... class SSLWantReadError(SSLError): ... class SSLWantWriteError(SSLError): ... class SSLSyscallError(SSLError): ... class SSLEOFError(SSLError): ... class CertificateError(Exception): ... def wrap_socket(sock: socket.socket, keyfile: Optional[str] = ..., certfile: Optional[str] = ..., server_side: bool = ..., cert_reqs: int = ..., ssl_version: int = ..., ca_certs: Optional[str] = ..., do_handshake_on_connect: bool = ..., suppress_ragged_eofs: bool = ..., ciphers: Optional[str] = ...) -> 'SSLSocket': ... def create_default_context(purpose: Any = ..., *, cafile: Optional[str] = ..., capath: Optional[str] = ..., cadata: Optional[str] = ...) -> 'SSLContext': ... def RAND_status() -> bool: ... def RAND_egd(path: str) -> None: ... def RAND_add(bytes: bytes, entropy: float) -> None: ... def match_hostname(cert: _PeerCertRetType, hostname: str) -> None: ... def cert_time_to_seconds(cert_time: str) -> int: ... def get_server_certificate(addr: Tuple[str, int], ssl_version: int = ..., ca_certs: Optional[str] = ...) -> str: ... def DER_cert_to_PEM_cert(der_cert_bytes: bytes) -> str: ... def PEM_cert_to_DER_cert(pem_cert_string: str) -> bytes: ... DefaultVerifyPaths = NamedTuple('DefaultVerifyPaths', [('cafile', str), ('capath', str), ('openssl_cafile_env', str), ('openssl_cafile', str), ('openssl_capath_env', str), ('openssl_capath', str)]) def get_default_verify_paths() -> DefaultVerifyPaths: ... if sys.platform == 'win32': def enum_certificates(store_name: str) -> _EnumRetType: ... def enum_crls(store_name: str) -> _EnumRetType: ... CERT_NONE = ... # type: int CERT_OPTIONAL = ... # type: int CERT_REQUIRED = ... # type: int VERIFY_DEFAULT = ... # type: int VERIFY_CRL_CHECK_LEAF = ... # type: int VERIFY_CRL_CHECK_CHAIN = ... # type: int VERIFY_X509_STRICT = ... # type: int VERIFY_X509_TRUSTED_FIRST = ... # type: int PROTOCOL_SSLv23 = ... # type: int PROTOCOL_SSLv2 = ... # type: int PROTOCOL_SSLv3 = ... # type: int PROTOCOL_TLSv1 = ... # type: int PROTOCOL_TLSv1_1 = ... # type: int PROTOCOL_TLSv1_2 = ... # type: int OP_ALL = ... # type: int OP_NO_SSLv2 = ... # type: int OP_NO_SSLv3 = ... # type: int OP_NO_TLSv1 = ... # type: int OP_NO_TLSv1_1 = ... # type: int OP_NO_TLSv1_2 = ... # type: int OP_CIPHER_SERVER_PREFERENCE = ... # type: int OP_SINGLE_DH_USE = ... # type: int OP_SINGLE_ECDH_USE = ... # type: int OP_NO_COMPRESSION = ... # type: int HAS_ALPN = ... # type: int HAS_ECDH = ... # type: bool HAS_SNI = ... # type: bool HAS_NPN = ... # type: bool CHANNEL_BINDING_TYPES = ... # type: List[str] OPENSSL_VERSION = ... # type: str OPENSSL_VERSION_INFO = ... # type: Tuple[int, int, int, int, int] OPENSSL_VERSION_NUMBER = ... # type: int ALERT_DESCRIPTION_HANDSHAKE_FAILURE = ... # type: int ALERT_DESCRIPTION_INTERNAL_ERROR = ... # type: int ALERT_DESCRIPTION_ACCESS_DENIED = ... # type: int ALERT_DESCRIPTION_BAD_CERTIFICATE = ... # type: int ALERT_DESCRIPTION_BAD_CERTIFICATE_HASH_VALUE = ... # type: int ALERT_DESCRIPTION_BAD_CERTIFICATE_STATUS_RESPONSE = ... # type: int ALERT_DESCRIPTION_BAD_RECORD_MAC = ... # type: int ALERT_DESCRIPTION_CERTIFICATE_EXPIRED = ... # type: int ALERT_DESCRIPTION_CERTIFICATE_REVOKED = ... # type: int ALERT_DESCRIPTION_CERTIFICATE_UNKNOWN = ... # type: int ALERT_DESCRIPTION_CERTIFICATE_UNOBTAINABLE = ... # type: int ALERT_DESCRIPTION_CLOSE_NOTIFY = ... # type: int ALERT_DESCRIPTION_DECODE_ERROR = ... # type: int ALERT_DESCRIPTION_DECOMPRESSION_FAILURE = ... # type: int ALERT_DESCRIPTION_DECRYPT_ERROR = ... # type: int ALERT_DESCRIPTION_ILLEGAL_PARAMETER = ... # type: int ALERT_DESCRIPTION_INSUFFICIENT_SECURITY = ... # type: int ALERT_DESCRIPTION_NO_RENEGOTIATION = ... # type: int ALERT_DESCRIPTION_PROTOCOL_VERSION = ... # type: int ALERT_DESCRIPTION_RECORD_OVERFLOW = ... # type: int ALERT_DESCRIPTION_UNEXPECTED_MESSAGE = ... # type: int ALERT_DESCRIPTION_UNKNOWN_CA = ... # type: int ALERT_DESCRIPTION_UNKNOWN_PSK_IDENTITY = ... # type: int ALERT_DESCRIPTION_UNRECOGNIZED_NAME = ... # type: int ALERT_DESCRIPTION_UNSUPPORTED_CERTIFICATE = ... # type: int ALERT_DESCRIPTION_UNSUPPORTED_EXTENSION = ... # type: int ALERT_DESCRIPTION_USER_CANCELLED = ... # type: int _PurposeType = NamedTuple('_PurposeType', [('nid', int), ('shortname', str), ('longname', str), ('oid', str)]) class Purpose: SERVER_AUTH = ... # type: _PurposeType CLIENT_AUTH = ... # type: _PurposeType class SSLSocket(socket.socket): def do_handshake(self) -> None: ... def getpeercert(self, binary_form: bool = ...) -> _PeerCertRetType: ... def cipher(self) -> Tuple[str, int, int]: ... def compression(self) -> Optional[str]: ... def get_channel_binding(self, cb_type: str = ...) -> Optional[bytes]: ... def selected_alpn_protocol(self) -> Optional[str]: ... def selected_npn_protocol(self) -> Optional[str]: ... def unwrap(self) -> socket.socket: ... def version(self) -> Optional[str]: ... def read(self, len: int = ..., buffer: Optional[bytearray] = ...) -> str: ... def write(self, buf: str) -> int: ... def pending(self) -> int: ... class SSLContext: check_hostname = ... # type: bool options = ... # type: int @property def protocol(self) -> int: ... verify_flags = ... # type: int verify_mode = ... # type: int def __init__(self, protocol: int) -> None: ... def cert_store_stats(self) -> Dict[str, int]: ... def load_cert_chain(self, certfile: str, keyfile: Optional[str] = ..., password: _PasswordType = ...) -> None: ... def load_default_certs(self, purpose: _PurposeType = ...) -> None: ... def load_verify_locations(self, cafile: Optional[str] = ..., capath: Optional[str] = ..., cadata: Union[str, bytes, None] = ...) -> None: ... def get_ca_certs(self, binary_form: bool = ...) -> Union[List[_PeerCertRetDictType], List[bytes]]: ... def set_default_verify_paths(self) -> None: ... def set_ciphers(self, ciphers: str) -> None: ... def set_alpn_protocols(self, protocols: List[str]) -> None: ... def set_npn_protocols(self, protocols: List[str]) -> None: ... def set_servername_callback(self, server_name_callback: Optional[_SrvnmeCbType]) -> None: ... def load_dh_params(self, dhfile: str) -> None: ... def set_ecdh_curve(self, curve_name: str) -> None: ... def wrap_socket(self, sock: socket.socket, server_side: bool = ..., do_handshake_on_connect: bool = ..., suppress_ragged_eofs: bool = ..., server_hostname: Optional[str] = ...) -> SSLSocket: ... def session_stats(self) -> Dict[str, int]: ... # TODO below documented in cpython but not in docs.python.org # taken from python 3.4 SSL_ERROR_EOF = ... # type: int SSL_ERROR_INVALID_ERROR_CODE = ... # type: int SSL_ERROR_SSL = ... # type: int SSL_ERROR_SYSCALL = ... # type: int SSL_ERROR_WANT_CONNECT = ... # type: int SSL_ERROR_WANT_READ = ... # type: int SSL_ERROR_WANT_WRITE = ... # type: int SSL_ERROR_WANT_X509_LOOKUP = ... # type: int SSL_ERROR_ZERO_RETURN = ... # type: int def get_protocol_name(protocol_code: int) -> str: ... AF_INET = ... # type: int PEM_FOOTER = ... # type: str PEM_HEADER = ... # type: str SOCK_STREAM = ... # type: int SOL_SOCKET = ... # type: int SO_TYPE = ... # type: int mypy-0.560/typeshed/stdlib/2/stat.pyi0000644€tŠÔÚ€2›s®0000000166113215007212023636 0ustar jukkaDROPBOX\Domain Users00000000000000def S_ISDIR(mode: int) -> bool: ... def S_ISCHR(mode: int) -> bool: ... def S_ISBLK(mode: int) -> bool: ... def S_ISREG(mode: int) -> bool: ... def S_ISFIFO(mode: int) -> bool: ... def S_ISLNK(mode: int) -> bool: ... def S_ISSOCK(mode: int) -> bool: ... def S_IMODE(mode: int) -> int: ... def S_IFMT(mode: int) -> int: ... ST_MODE = 0 ST_INO = 0 ST_DEV = 0 ST_NLINK = 0 ST_UID = 0 ST_GID = 0 ST_SIZE = 0 ST_ATIME = 0 ST_MTIME = 0 ST_CTIME = 0 S_IFSOCK = 0 S_IFLNK = 0 S_IFREG = 0 S_IFBLK = 0 S_IFDIR = 0 S_IFCHR = 0 S_IFIFO = 0 S_ISUID = 0 S_ISGID = 0 S_ISVTX = 0 S_IRWXU = 0 S_IRUSR = 0 S_IWUSR = 0 S_IXUSR = 0 S_IRWXG = 0 S_IRGRP = 0 S_IWGRP = 0 S_IXGRP = 0 S_IRWXO = 0 S_IROTH = 0 S_IWOTH = 0 S_IXOTH = 0 S_ENFMT = 0 S_IREAD = 0 S_IWRITE = 0 S_IEXEC = 0 UF_NODUMP = 0 UF_IMMUTABLE = 0 UF_APPEND = 0 UF_OPAQUE = 0 UF_NOUNLINK = 0 UF_COMPRESSED = 0 UF_HIDDEN = 0 SF_ARCHIVED = 0 SF_IMMUTABLE = 0 SF_APPEND = 0 SF_NOUNLINK = 0 SF_SNAPSHOT = 0 mypy-0.560/typeshed/stdlib/2/string.pyi0000644€tŠÔÚ€2›s®0000000707613215007212024177 0ustar jukkaDROPBOX\Domain Users00000000000000# Stubs for string # Based on http://docs.python.org/3.2/library/string.html from typing import Mapping, Sequence, Any, Optional, Union, List, Tuple, Iterable, AnyStr ascii_letters = ... # type: str ascii_lowercase = ... # type: str ascii_uppercase = ... # type: str digits = ... # type: str hexdigits = ... # type: str letters = ... # type: str lowercase = ... # type: str octdigits = ... # type: str punctuation = ... # type: str printable = ... # type: str uppercase = ... # type: str whitespace = ... # type: str def capwords(s: AnyStr, sep: AnyStr = ...) -> AnyStr: ... # TODO: originally named 'from' def maketrans(_from: str, to: str) -> str: ... def atof(s: unicode) -> float: ... def atoi(s: unicode, base: int = ...) -> int: ... def atol(s: unicode, base: int = ...) -> int: ... def capitalize(word: AnyStr) -> AnyStr: ... def find(s: unicode, sub: unicode, start: int = ..., end: int = ...) -> int: ... def rfind(s: unicode, sub: unicode, start: int = ..., end: int = ...) -> int: ... def index(s: unicode, sub: unicode, start: int = ..., end: int = ...) -> int: ... def rindex(s: unicode, sub: unicode, start: int = ..., end: int = ...) -> int: ... def count(s: unicode, sub: unicode, start: int = ..., end: int = ...) -> int: ... def lower(s: AnyStr) -> AnyStr: ... def split(s: AnyStr, sep: AnyStr = ..., maxsplit: int = ...) -> List[AnyStr]: ... def rsplit(s: AnyStr, sep: AnyStr = ..., maxsplit: int = ...) -> List[AnyStr]: ... def splitfields(s: AnyStr, sep: AnyStr = ..., maxsplit: int = ...) -> List[AnyStr]: ... def join(words: Iterable[AnyStr], sep: AnyStr = ...) -> AnyStr: ... def joinfields(word: Iterable[AnyStr], sep: AnyStr = ...) -> AnyStr: ... def lstrip(s: AnyStr, chars: AnyStr = ...) -> AnyStr: ... def rstrip(s: AnyStr, chars: AnyStr = ...) -> AnyStr: ... def strip(s: AnyStr, chars: AnyStr = ...) -> AnyStr: ... def swapcase(s: AnyStr) -> AnyStr: ... def translate(s: str, table: str, deletechars: str = ...) -> str: ... def upper(s: AnyStr) -> AnyStr: ... def ljust(s: AnyStr, width: int, fillchar: AnyStr = ...) -> AnyStr: ... def rjust(s: AnyStr, width: int, fillchar: AnyStr = ...) -> AnyStr: ... def center(s: AnyStr, width: int, fillchar: AnyStr = ...) -> AnyStr: ... def zfill(s: AnyStr, width: int) -> AnyStr: ... def replace(s: AnyStr, old: AnyStr, new: AnyStr, maxreplace: int = ...) -> AnyStr: ... class Template(object): # TODO: Unicode support? template = ... # type: str def __init__(self, template: str) -> None: ... def substitute(self, mapping: Mapping[str, str] = ..., **kwds: str) -> str: ... def safe_substitute(self, mapping: Mapping[str, str] = ..., **kwds: str) -> str: ... # TODO(MichalPokorny): This is probably badly and/or loosely typed. class Formatter(object): def format(self, format_string: str, *args, **kwargs) -> str: ... def vformat(self, format_string: str, args: Sequence[Any], kwargs: Mapping[str, Any]) -> str: ... def parse(self, format_string: str) -> Iterable[Tuple[str, str, str, str]]: ... def get_field(self, field_name: str, args: Sequence[Any], kwargs: Mapping[str, Any]) -> Any: ... def get_value(self, key: Union[int, str], args: Sequence[Any], kwargs: Mapping[str, Any]) -> Any: raise IndexError() raise KeyError() def check_unused_args(self, used_args: Sequence[Union[int, str]], args: Sequence[Any], kwargs: Mapping[str, Any]) -> None: ... def format_field(self, value: Any, format_spec: str) -> Any: ... def convert_field(self, value: Any, conversion: str) -> Any: ... mypy-0.560/typeshed/stdlib/2/StringIO.pyi0000644€tŠÔÚ€2›s®0000000232313215007212024355 0ustar jukkaDROPBOX\Domain Users00000000000000# Stubs for StringIO (Python 2) from typing import Any, IO, AnyStr, Iterator, Iterable, Generic, List, Optional class StringIO(IO[AnyStr], Generic[AnyStr]): closed = ... # type: bool softspace = ... # type: int len = ... # type: int name = ... # type: str def __init__(self, buf: AnyStr = ...) -> None: ... def __iter__(self) -> Iterator[AnyStr]: ... def next(self) -> AnyStr: ... def close(self) -> None: ... def isatty(self) -> bool: ... def seek(self, pos: int, mode: int = ...) -> int: ... def tell(self) -> int: ... def read(self, n: int = ...) -> AnyStr: ... def readline(self, length: int = ...) -> AnyStr: ... def readlines(self, sizehint: int = ...) -> List[AnyStr]: ... def truncate(self, size: Optional[int] = ...) -> int: ... def write(self, s: AnyStr) -> int: ... def writelines(self, iterable: Iterable[AnyStr]) -> None: ... def flush(self) -> None: ... def getvalue(self) -> AnyStr: ... def __enter__(self) -> Any: ... def __exit__(self, type: Any, value: Any, traceback: Any) -> Any: ... def fileno(self) -> int: ... def readable(self) -> bool: ... def seekable(self) -> bool: ... def writable(self) -> bool: ... mypy-0.560/typeshed/stdlib/2/stringold.pyi0000644€tŠÔÚ€2›s®0000000423313215007212024666 0ustar jukkaDROPBOX\Domain Users00000000000000# Source: https://hg.python.org/cpython/file/2.7/Lib/stringold.py from typing import AnyStr, Iterable, List, Optional, Type whitespace = ... # type: str lowercase = ... # type: str uppercase = ... # type: str letters = ... # type: str digits = ... # type: str hexdigits = ... # type: str octdigits = ... # type: str _idmap = ... # type: str _idmapL = ... # type: Optional[List[str]] index_error = ValueError atoi_error = ValueError atof_error = ValueError atol_error = ValueError def lower(s: AnyStr) -> AnyStr: ... def upper(s: AnyStr) -> AnyStr: ... def swapcase(s: AnyStr) -> AnyStr: ... def strip(s: AnyStr) -> AnyStr: ... def lstrip(s: AnyStr) -> AnyStr: ... def rstrip(s: AnyStr) -> AnyStr: ... def split(s: AnyStr, sep: AnyStr = ..., maxsplit: int = ...) -> List[AnyStr]: ... def splitfields(s: AnyStr, sep: AnyStr = ..., maxsplit: int = ...) -> List[AnyStr]: ... def join(words: Iterable[AnyStr], sep: AnyStr = ...) -> AnyStr: ... def joinfields(words: Iterable[AnyStr], sep: AnyStr = ...) -> AnyStr: ... def index(s: unicode, sub: unicode, start: int = ..., end: int = ...) -> int: ... def rindex(s: unicode, sub: unicode, start: int = ..., end: int = ...) -> int: ... def count(s: unicode, sub: unicode, start: int = ..., end: int = ...) -> int: ... def find(s: unicode, sub: unicode, start: int = ..., end: int = ...) -> int: ... def rfind(s: unicode, sub: unicode, start: int = ..., end: int = ...) -> int: ... def atof(s: unicode) -> float: ... def atoi(s: unicode, base: int = ...) -> int: ... def atol(s: unicode, base: int = ...) -> long: ... def ljust(s: AnyStr, width: int, fillchar: AnyStr = ...) -> AnyStr: ... def rjust(s: AnyStr, width: int, fillchar: AnyStr = ...) -> AnyStr: ... def center(s: AnyStr, width: int, fillchar: AnyStr = ...) -> AnyStr: ... def zfill(s: AnyStr, width: int) -> AnyStr: ... def expandtabs(s: AnyStr, tabsize: int = ...) -> AnyStr: ... def translate(s: str, table: str, deletions: str = ...) -> str: ... def capitalize(s: AnyStr) -> AnyStr: ... def capwords(s: AnyStr, sep: AnyStr = ...) -> AnyStr: ... def maketrans(fromstr: str, tostr: str) -> str: ... def replace(s: AnyStr, old: AnyStr, new: AnyStr, maxreplace: int = ...) -> AnyStr: ... mypy-0.560/typeshed/stdlib/2/strop.pyi0000644€tŠÔÚ€2›s®0000000352613215007212024034 0ustar jukkaDROPBOX\Domain Users00000000000000"""Stub file for the 'strop' module.""" from typing import List, Sequence lowercase = ... # type: str uppercase = ... # type: str whitespace = ... # type: str def atof(a: str) -> float: raise DeprecationWarning() def atoi(a: str, base: int = ...) -> int: raise DeprecationWarning() def atol(a: str, base: int = ...) -> long: raise DeprecationWarning() def capitalize(s: str) -> str: raise DeprecationWarning() def count(s: str, sub: str, start: int = ..., end: int = ...) -> int: raise DeprecationWarning() def expandtabs(string: str, tabsize: int = ...) -> str: raise DeprecationWarning() raise OverflowError() def find(s: str, sub: str, start: int = ..., end: int = ...) -> int: raise DeprecationWarning() def join(list: Sequence[str], sep: str = ...) -> str: raise DeprecationWarning() raise OverflowError() def joinfields(list: Sequence[str], sep: str = ...) -> str: raise DeprecationWarning() raise OverflowError() def lower(s: str) -> str: raise DeprecationWarning() def lstrip(s: str) -> str: raise DeprecationWarning() def maketrans(frm: str, to: str) -> str: ... def replace(s: str, old: str, new: str, maxsplit: int = ...) -> str: raise DeprecationWarning() def rfind(s: str, sub: str, start: int = ..., end: int = ...) -> int: raise DeprecationWarning() def rstrip(s: str) -> str: raise DeprecationWarning() def split(s: str, sep: str, maxsplit: int = ...) -> List[str]: raise DeprecationWarning() def splitfields(s: str, sep: str, maxsplit: int = ...) -> List[str]: raise DeprecationWarning() def strip(s: str) -> str: raise DeprecationWarning() def swapcase(s: str) -> str: raise DeprecationWarning() def translate(s: str, table: str, deletechars: str = ...) -> str: raise DeprecationWarning() def upper(s: str) -> str: raise DeprecationWarning() mypy-0.560/typeshed/stdlib/2/subprocess.pyi0000644€tŠÔÚ€2›s®0000000747613215007212025065 0ustar jukkaDROPBOX\Domain Users00000000000000# Stubs for subprocess # Based on http://docs.python.org/2/library/subprocess.html and Python 3 stub from typing import Sequence, Any, Mapping, Callable, Tuple, IO, Union, Optional, List, Text _FILE = Union[None, int, IO[Any]] _TXT = Union[bytes, Text] _CMD = Union[_TXT, Sequence[_TXT]] _ENV = Union[Mapping[bytes, _TXT], Mapping[Text, _TXT]] # Same args as Popen.__init__ def call(args: _CMD, bufsize: int = ..., executable: _TXT = ..., stdin: _FILE = ..., stdout: _FILE = ..., stderr: _FILE = ..., preexec_fn: Callable[[], Any] = ..., close_fds: bool = ..., shell: bool = ..., cwd: _TXT = ..., env: _ENV = ..., universal_newlines: bool = ..., startupinfo: Any = ..., creationflags: int = ...) -> int: ... def check_call(args: _CMD, bufsize: int = ..., executable: _TXT = ..., stdin: _FILE = ..., stdout: _FILE = ..., stderr: _FILE = ..., preexec_fn: Callable[[], Any] = ..., close_fds: bool = ..., shell: bool = ..., cwd: _TXT = ..., env: _ENV = ..., universal_newlines: bool = ..., startupinfo: Any = ..., creationflags: int = ...) -> int: ... # Same args as Popen.__init__ except for stdout def check_output(args: _CMD, bufsize: int = ..., executable: _TXT = ..., stdin: _FILE = ..., stderr: _FILE = ..., preexec_fn: Callable[[], Any] = ..., close_fds: bool = ..., shell: bool = ..., cwd: _TXT = ..., env: _ENV = ..., universal_newlines: bool = ..., startupinfo: Any = ..., creationflags: int = ...) -> bytes: ... PIPE = ... # type: int STDOUT = ... # type: int class CalledProcessError(Exception): returncode = 0 # morally: _CMD cmd = ... # type: Any # morally: Optional[bytes] output = ... # type: Any def __init__(self, returncode: int, cmd: _CMD, output: Optional[bytes] = ...) -> None: ... class Popen: stdin = ... # type: Optional[IO[Any]] stdout = ... # type: Optional[IO[Any]] stderr = ... # type: Optional[IO[Any]] pid = 0 returncode = 0 def __init__(self, args: _CMD, bufsize: int = ..., executable: Optional[_TXT] = ..., stdin: Optional[_FILE] = ..., stdout: Optional[_FILE] = ..., stderr: Optional[_FILE] = ..., preexec_fn: Optional[Callable[[], Any]] = ..., close_fds: bool = ..., shell: bool = ..., cwd: Optional[_TXT] = ..., env: Optional[_ENV] = ..., universal_newlines: bool = ..., startupinfo: Optional[Any] = ..., creationflags: int = ...) -> None: ... def poll(self) -> int: ... def wait(self) -> int: ... # morally: -> Tuple[Optional[bytes], Optional[bytes]] def communicate(self, input: Optional[_TXT] = ...) -> Tuple[Any, Any]: ... def send_signal(self, signal: int) -> None: ... def terminate(self) -> None: ... def kill(self) -> None: ... def __enter__(self) -> 'Popen': ... def __exit__(self, type, value, traceback) -> bool: ... # Windows-only: STARTUPINFO etc. STD_INPUT_HANDLE = ... # type: Any STD_OUTPUT_HANDLE = ... # type: Any STD_ERROR_HANDLE = ... # type: Any SW_HIDE = ... # type: Any STARTF_USESTDHANDLES = ... # type: Any STARTF_USESHOWWINDOW = ... # type: Any CREATE_NEW_CONSOLE = ... # type: Any CREATE_NEW_PROCESS_GROUP = ... # type: Any mypy-0.560/typeshed/stdlib/2/symbol.pyi0000644€tŠÔÚ€2›s®0000000502013215007212024161 0ustar jukkaDROPBOX\Domain Users00000000000000# Stubs for symbol (Python 2) from typing import Dict single_input = ... # type: int file_input = ... # type: int eval_input = ... # type: int decorator = ... # type: int decorators = ... # type: int decorated = ... # type: int funcdef = ... # type: int parameters = ... # type: int varargslist = ... # type: int fpdef = ... # type: int fplist = ... # type: int stmt = ... # type: int simple_stmt = ... # type: int small_stmt = ... # type: int expr_stmt = ... # type: int augassign = ... # type: int print_stmt = ... # type: int del_stmt = ... # type: int pass_stmt = ... # type: int flow_stmt = ... # type: int break_stmt = ... # type: int continue_stmt = ... # type: int return_stmt = ... # type: int yield_stmt = ... # type: int raise_stmt = ... # type: int import_stmt = ... # type: int import_name = ... # type: int import_from = ... # type: int import_as_name = ... # type: int dotted_as_name = ... # type: int import_as_names = ... # type: int dotted_as_names = ... # type: int dotted_name = ... # type: int global_stmt = ... # type: int exec_stmt = ... # type: int assert_stmt = ... # type: int compound_stmt = ... # type: int if_stmt = ... # type: int while_stmt = ... # type: int for_stmt = ... # type: int try_stmt = ... # type: int with_stmt = ... # type: int with_item = ... # type: int except_clause = ... # type: int suite = ... # type: int testlist_safe = ... # type: int old_test = ... # type: int old_lambdef = ... # type: int test = ... # type: int or_test = ... # type: int and_test = ... # type: int not_test = ... # type: int comparison = ... # type: int comp_op = ... # type: int expr = ... # type: int xor_expr = ... # type: int and_expr = ... # type: int shift_expr = ... # type: int arith_expr = ... # type: int term = ... # type: int factor = ... # type: int power = ... # type: int atom = ... # type: int listmaker = ... # type: int testlist_comp = ... # type: int lambdef = ... # type: int trailer = ... # type: int subscriptlist = ... # type: int subscript = ... # type: int sliceop = ... # type: int exprlist = ... # type: int testlist = ... # type: int dictorsetmaker = ... # type: int classdef = ... # type: int arglist = ... # type: int argument = ... # type: int list_iter = ... # type: int list_for = ... # type: int list_if = ... # type: int comp_iter = ... # type: int comp_for = ... # type: int comp_if = ... # type: int testlist1 = ... # type: int encoding_decl = ... # type: int yield_expr = ... # type: int sym_name = ... # type: Dict[int, str] mypy-0.560/typeshed/stdlib/2/sys.pyi0000644€tŠÔÚ€2›s®0000001121513215007212023475 0ustar jukkaDROPBOX\Domain Users00000000000000"""Stubs for the 'sys' module.""" from typing import ( IO, Union, List, Sequence, Any, Dict, Tuple, BinaryIO, Optional, Callable, overload, Type, ) from types import FrameType, ModuleType, TracebackType, ClassType from mypy_extensions import NoReturn class _flags: bytes_warning = ... # type: int debug = ... # type: int division_new = ... # type: int division_warning = ... # type: int dont_write_bytecode = ... # type: int hash_randomization = ... # type: int ignore_environment = ... # type: int inspect = ... # type: int interactive = ... # type: int no_site = ... # type: int no_user_site = ... # type: int optimize = ... # type: int py3k_warning = ... # type: int tabcheck = ... # type: int unicode = ... # type: int verbose = ... # type: int class _float_info: max = ... # type: float max_exp = ... # type: int max_10_exp = ... # type: int min = ... # type: float min_exp = ... # type: int min_10_exp = ... # type: int dig = ... # type: int mant_dig = ... # type: int epsilon = ... # type: float radix = ... # type: int rounds = ... # type: int class _version_info(Tuple[int, int, int, str, int]): major = 0 minor = 0 micro = 0 releaselevel = ... # type: str serial = 0 _mercurial = ... # type: Tuple[str, str, str] api_version = ... # type: int argv = ... # type: List[str] builtin_module_names = ... # type: Tuple[str, ...] byteorder = ... # type: str copyright = ... # type: str dont_write_bytecode = ... # type: bool exec_prefix = ... # type: str executable = ... # type: str flags = ... # type: _flags float_repr_style = ... # type: str hexversion = ... # type: int long_info = ... # type: object maxint = ... # type: int maxsize = ... # type: int maxunicode = ... # type: int modules = ... # type: Dict[str, Any] path = ... # type: List[str] platform = ... # type: str prefix = ... # type: str py3kwarning = ... # type: bool __stderr__ = ... # type: IO[str] __stdin__ = ... # type: IO[str] __stdout__ = ... # type: IO[str] stderr = ... # type: IO[str] stdin = ... # type: IO[str] stdout = ... # type: IO[str] subversion = ... # type: Tuple[str, str, str] version = ... # type: str warnoptions = ... # type: object float_info = ... # type: _float_info version_info = ... # type: _version_info ps1 = ... # type: str ps2 = ... # type: str last_type = ... # type: type last_value = ... # type: BaseException last_traceback = ... # type: TracebackType # TODO precise types meta_path = ... # type: List[Any] path_hooks = ... # type: List[Any] path_importer_cache = ... # type: Dict[str, Any] displayhook = ... # type: Optional[Callable[[int], None]] excepthook = ... # type: Optional[Callable[[type, BaseException, TracebackType], None]] exc_type = ... # type: Optional[type] exc_value = ... # type: Union[BaseException, ClassType] exc_traceback = ... # type: TracebackType class _WindowsVersionType: major = ... # type: Any minor = ... # type: Any build = ... # type: Any platform = ... # type: Any service_pack = ... # type: Any service_pack_major = ... # type: Any service_pack_minor = ... # type: Any suite_mask = ... # type: Any product_type = ... # type: Any def getwindowsversion() -> _WindowsVersionType: ... def _clear_type_cache() -> None: ... def _current_frames() -> Dict[int, FrameType]: ... def _getframe(depth: int = ...) -> FrameType: ... def call_tracing(fn: Any, args: Any) -> Any: ... def __displayhook__(value: int) -> None: ... def __excepthook__(type_: type, value: BaseException, traceback: TracebackType) -> None: ... def exc_clear() -> None: raise DeprecationWarning() # TODO should be a union of tuple, see mypy#1178 def exc_info() -> Tuple[Optional[Type[BaseException]], Optional[BaseException], Optional[TracebackType]]: ... # sys.exit() accepts an optional argument of anything printable def exit(arg: Any = ...) -> NoReturn: raise SystemExit() def getcheckinterval() -> int: ... # deprecated def getdefaultencoding() -> str: ... def getdlopenflags() -> int: ... def getfilesystemencoding() -> str: ... # In practice, never returns None def getrefcount(arg: Any) -> int: ... def getrecursionlimit() -> int: ... def getsizeof(obj: object, default: int = ...) -> int: ... def getprofile() -> None: ... def gettrace() -> None: ... def setcheckinterval(interval: int) -> None: ... # deprecated def setdlopenflags(n: int) -> None: ... def setprofile(profilefunc: Any) -> None: ... # TODO type def setrecursionlimit(limit: int) -> None: ... def settrace(tracefunc: Any) -> None: ... # TODO type mypy-0.560/typeshed/stdlib/2/tempfile.pyi0000644€tŠÔÚ€2›s®0000000661513215007212024474 0ustar jukkaDROPBOX\Domain Users00000000000000# Stubs for tempfile # Ron Murawski # based on http: //docs.python.org/3.3/library/tempfile.html # Adapted for Python 2.7 by Michal Pokorny # TODO: Don't use basestring. Use Union[str, bytes] or AnyStr for arguments. # Avoid using Union[str, bytes] for return values, as it implies that # an isinstance() check will often be required, which is inconvenient. from typing import Tuple, IO, Union, AnyStr, Any, overload, Iterator, List, Type, Optional import thread import random TMP_MAX = ... # type: int tempdir = ... # type: str template = ... # type: str _name_sequence = ... # type: Optional[_RandomNameSequence] class _RandomNameSequence: _rng = ... # type: random.Random _rng_pid = ... # type: int characters = ... # type: str mutex = ... # type: thread.LockType rng = ... # type: random.Random def __iter__(self) -> "_RandomNameSequence": ... def next(self) -> str: ... # from os.path: def normcase(self, path: AnyStr) -> AnyStr: ... class _TemporaryFileWrapper(IO[str]): close_called = ... # type: bool delete = ... # type: bool file = ... # type: IO name = ... # type: Any def __init__(self, file: IO, name, delete: bool = ...) -> None: ... def __del__(self) -> None: ... def __enter__(self) -> "_TemporaryFileWrapper": ... def __exit__(self, exc, value, tb) -> bool: ... def __getattr__(self, name: unicode) -> Any: ... def close(self) -> None: ... def unlink(self, path: unicode) -> None: ... # TODO text files def TemporaryFile( mode: Union[bytes, unicode] = ..., bufsize: int = ..., suffix: Union[bytes, unicode] = ..., prefix: Union[bytes, unicode] = ..., dir: Union[bytes, unicode] = ... ) -> _TemporaryFileWrapper: ... def NamedTemporaryFile( mode: Union[bytes, unicode] = ..., bufsize: int = ..., suffix: Union[bytes, unicode] = ..., prefix: Union[bytes, unicode] = ..., dir: Union[bytes, unicode] = ..., delete: bool = ... ) -> _TemporaryFileWrapper: ... def SpooledTemporaryFile( max_size: int = ..., mode: Union[bytes, unicode] = ..., buffering: int = ..., suffix: Union[bytes, unicode] = ..., prefix: Union[bytes, unicode] = ..., dir: Union[bytes, unicode] = ... ) -> _TemporaryFileWrapper: ... class TemporaryDirectory: name = ... # type: Any # Can be str or unicode def __init__(self, suffix: Union[bytes, unicode] = ..., prefix: Union[bytes, unicode] = ..., dir: Union[bytes, unicode] = ...) -> None: ... def cleanup(self) -> None: ... def __enter__(self) -> Any: ... # Can be str or unicode def __exit__(self, type, value, traceback) -> bool: ... @overload def mkstemp() -> Tuple[int, str]: ... @overload def mkstemp(suffix: AnyStr = ..., prefix: AnyStr = ..., dir: Optional[AnyStr] = ..., text: bool = ...) -> Tuple[int, AnyStr]: ... @overload def mkdtemp() -> str: ... @overload def mkdtemp(suffix: AnyStr = ..., prefix: AnyStr = ..., dir: Optional[AnyStr] = ...) -> AnyStr: ... @overload def mktemp() -> str: ... @overload def mktemp(suffix: AnyStr = ..., prefix: AnyStr = ..., dir: Optional[AnyStr] = ...) -> AnyStr: ... def gettempdir() -> str: ... def gettempprefix() -> str: ... def _candidate_tempdir_list() -> List[str]: ... def _get_candidate_names() -> Optional[_RandomNameSequence]: ... def _get_default_tempdir() -> str: ... mypy-0.560/typeshed/stdlib/2/textwrap.pyi0000644€tŠÔÚ€2›s®0000000367113215007212024544 0ustar jukkaDROPBOX\Domain Users00000000000000from typing import AnyStr, List, Dict, Pattern class TextWrapper(object): width: int = ... initial_indent: str = ... subsequent_indent: str = ... expand_tabs: bool = ... replace_whitespace: bool = ... fix_sentence_endings: bool = ... drop_whitespace: bool = ... break_long_words: bool = ... break_on_hyphens: bool = ... # Attributes not present in documentation sentence_end_re: Pattern[str] = ... wordsep_re: Pattern[str] = ... wordsep_simple_re: Pattern[str] = ... whitespace_trans: str = ... unicode_whitespace_trans: Dict[int, int] = ... uspace: int = ... x: int = ... def __init__( self, width: int = ..., initial_indent: str = ..., subsequent_indent: str = ..., expand_tabs: bool = ..., replace_whitespace: bool = ..., fix_sentence_endings: bool = ..., break_long_words: bool = ..., drop_whitespace: bool = ..., break_on_hyphens: bool = ...) -> None: ... def wrap(self, text: AnyStr) -> List[AnyStr]: ... def fill(self, text: AnyStr) -> AnyStr: ... def wrap( text: AnyStr, width: int = ..., initial_indent: AnyStr = ..., subsequent_indent: AnyStr = ..., expand_tabs: bool = ..., replace_whitespace: bool = ..., fix_sentence_endings: bool = ..., break_long_words: bool = ..., drop_whitespace: bool = ..., break_on_hyphens: bool = ...) -> AnyStr: ... def fill( text: AnyStr, width: int =..., initial_indent: AnyStr = ..., subsequent_indent: AnyStr = ..., expand_tabs: bool = ..., replace_whitespace: bool = ..., fix_sentence_endings: bool = ..., break_long_words: bool = ..., drop_whitespace: bool = ..., break_on_hyphens: bool = ...) -> AnyStr: ... def dedent(text: AnyStr) -> AnyStr: ... mypy-0.560/typeshed/stdlib/2/thread.pyi0000644€tŠÔÚ€2›s®0000000175613215007212024137 0ustar jukkaDROPBOX\Domain Users00000000000000"""Stubs for the "thread" module.""" from typing import Callable, Any def _count() -> int: ... class error(Exception): ... class LockType: def acquire(self, waitflag: int = ...) -> bool: ... def acquire_lock(self, waitflag: int = ...) -> bool: ... def release(self) -> None: ... def release_lock(self) -> None: ... def locked(self) -> bool: ... def locked_lock(self) -> bool: ... def __enter__(self) -> LockType: ... def __exit__(self, typ: Any, value: Any, traceback: Any) -> None: ... class _local(object): pass class _localdummy(object): pass def start_new(function: Callable[..., Any], args: Any, kwargs: Any = ...) -> int: ... def start_new_thread(function: Callable[..., Any], args: Any, kwargs: Any = ...) -> int: ... def interrupt_main() -> None: ... def exit() -> None: raise SystemExit() def exit_thread() -> Any: raise SystemExit() def allocate_lock() -> LockType: ... def get_ident() -> int: ... def stack_size(size: int = ...) -> int: ... mypy-0.560/typeshed/stdlib/2/time.pyi0000644€tŠÔÚ€2›s®0000000327513215007212023624 0ustar jukkaDROPBOX\Domain Users00000000000000"""Stub file for the 'time' module.""" # See https://docs.python.org/2/library/time.html from typing import NamedTuple, Tuple, Union, Any, Optional # ----- variables and constants ----- accept2dyear = False altzone = 0 daylight = 0 timezone = 0 tzname = ... # type: Tuple[str, str] class struct_time(NamedTuple('_struct_time', [('tm_year', int), ('tm_mon', int), ('tm_mday', int), ('tm_hour', int), ('tm_min', int), ('tm_sec', int), ('tm_wday', int), ('tm_yday', int), ('tm_isdst', int)])): def __init__(self, o: Tuple[int, int, int, int, int, int, int, int, int], _arg: Any = ...) -> None: ... def __new__(cls, o: Tuple[int, int, int, int, int, int, int, int, int], _arg: Any = ...) -> struct_time: ... _TIME_TUPLE = Tuple[int, int, int, int, int, int, int, int, int] def asctime(t: Union[struct_time, _TIME_TUPLE] = ...) -> str: raise ValueError() def clock() -> float: ... def ctime(secs: Optional[float] = ...) -> str: raise ValueError() def gmtime(secs: Optional[float] = ...) -> struct_time: ... def localtime(secs: Optional[float] = ...) -> struct_time: ... def mktime(t: Union[struct_time, _TIME_TUPLE]) -> float: raise OverflowError() raise ValueError() def sleep(secs: float) -> None: ... def strftime(format: str, t: Union[struct_time, _TIME_TUPLE] = ...) -> str: raise MemoryError() raise ValueError() def strptime(string: str, format: str = ...) -> struct_time: raise ValueError() def time() -> float: raise IOError() def tzset() -> None: ... mypy-0.560/typeshed/stdlib/2/toaiff.pyi0000644€tŠÔÚ€2›s®0000000054513215007212024133 0ustar jukkaDROPBOX\Domain Users00000000000000# Stubs for toaiff (Python 2) # Source: https://hg.python.org/cpython/file/2.7/Lib/toaiff.py from pipes import Template from typing import Dict, List __all__: List[str] table: Dict[str, Template] t: Template uncompress: Template class error(Exception): ... def toaiff(filename: str) -> str: ... def _toaiff(filename: str, temps: List[str]) -> str: ... mypy-0.560/typeshed/stdlib/2/tokenize.pyi0000644€tŠÔÚ€2›s®0000001025513215007212024512 0ustar jukkaDROPBOX\Domain Users00000000000000# Automatically generated by pytype, manually fixed up. May still contain errors. from typing import Any, Callable, Dict, Generator, Iterator, List, Tuple, Union, Iterable __all__ = ... # type: List[str] __author__ = ... # type: str __credits__ = ... # type: str AMPER = ... # type: int AMPEREQUAL = ... # type: int AT = ... # type: int BACKQUOTE = ... # type: int Binnumber = ... # type: str Bracket = ... # type: str CIRCUMFLEX = ... # type: int CIRCUMFLEXEQUAL = ... # type: int COLON = ... # type: int COMMA = ... # type: int COMMENT = ... # type: int Comment = ... # type: str ContStr = ... # type: str DEDENT = ... # type: int DOT = ... # type: int DOUBLESLASH = ... # type: int DOUBLESLASHEQUAL = ... # type: int DOUBLESTAR = ... # type: int DOUBLESTAREQUAL = ... # type: int Decnumber = ... # type: str Double = ... # type: str Double3 = ... # type: str ENDMARKER = ... # type: int EQEQUAL = ... # type: int EQUAL = ... # type: int ERRORTOKEN = ... # type: int Expfloat = ... # type: str Exponent = ... # type: str Floatnumber = ... # type: str Funny = ... # type: str GREATER = ... # type: int GREATEREQUAL = ... # type: int Hexnumber = ... # type: str INDENT = ... # type: int def ISEOF(x: int) -> bool: ... def ISNONTERMINAL(x: int) -> bool: ... def ISTERMINAL(x: int) -> bool: ... Ignore = ... # type: str Imagnumber = ... # type: str Intnumber = ... # type: str LBRACE = ... # type: int LEFTSHIFT = ... # type: int LEFTSHIFTEQUAL = ... # type: int LESS = ... # type: int LESSEQUAL = ... # type: int LPAR = ... # type: int LSQB = ... # type: int MINEQUAL = ... # type: int MINUS = ... # type: int NAME = ... # type: int NEWLINE = ... # type: int NL = ... # type: int NOTEQUAL = ... # type: int NT_OFFSET = ... # type: int NUMBER = ... # type: int N_TOKENS = ... # type: int Name = ... # type: str Number = ... # type: str OP = ... # type: int Octnumber = ... # type: str Operator = ... # type: str PERCENT = ... # type: int PERCENTEQUAL = ... # type: int PLUS = ... # type: int PLUSEQUAL = ... # type: int PlainToken = ... # type: str Pointfloat = ... # type: str PseudoExtras = ... # type: str PseudoToken = ... # type: str RBRACE = ... # type: int RIGHTSHIFT = ... # type: int RIGHTSHIFTEQUAL = ... # type: int RPAR = ... # type: int RSQB = ... # type: int SEMI = ... # type: int SLASH = ... # type: int SLASHEQUAL = ... # type: int STAR = ... # type: int STAREQUAL = ... # type: int STRING = ... # type: int Single = ... # type: str Single3 = ... # type: str Special = ... # type: str String = ... # type: str TILDE = ... # type: int Token = ... # type: str Triple = ... # type: str VBAR = ... # type: int VBAREQUAL = ... # type: int Whitespace = ... # type: str chain = ... # type: type double3prog = ... # type: type endprogs = ... # type: Dict[str, Any] pseudoprog = ... # type: type single3prog = ... # type: type single_quoted = ... # type: Dict[str, str] t = ... # type: str tabsize = ... # type: int tok_name = ... # type: Dict[int, str] tokenprog = ... # type: type triple_quoted = ... # type: Dict[str, str] x = ... # type: str _Pos = Tuple[int, int] _TokenType = Tuple[int, str, _Pos, _Pos, str] def any(*args, **kwargs) -> str: ... def generate_tokens(readline: Callable[[], str]) -> Generator[_TokenType, None, None]: ... def group(*args: str) -> str: ... def maybe(*args: str) -> str: ... def printtoken(type: int, token: str, srow_scol: _Pos, erow_ecol: _Pos, line: str) -> None: ... def tokenize(readline: Callable[[], str], tokeneater: Callable[[Tuple[int, str, _Pos, _Pos, str]], None]) -> None: ... def tokenize_loop(readline: Callable[[], str], tokeneater: Callable[[Tuple[int, str, _Pos, _Pos, str]], None]) -> None: ... def untokenize(iterable: Iterable[_TokenType]) -> str: ... class StopTokenizing(Exception): pass class TokenError(Exception): pass class Untokenizer: prev_col = ... # type: int prev_row = ... # type: int tokens = ... # type: List[str] def __init__(self) -> None: ... def add_whitespace(self, _Pos) -> None: ... def compat(self, token: Tuple[int, Any], iterable: Iterator[_TokenType]) -> None: ... def untokenize(self, iterable: Iterable[_TokenType]) -> str: ... mypy-0.560/typeshed/stdlib/2/types.pyi0000644€tŠÔÚ€2›s®0000001261313215007212024026 0ustar jukkaDROPBOX\Domain Users00000000000000# Stubs for types # Note, all classes "defined" here require special handling. from typing import ( Any, Callable, Dict, Iterable, Iterator, List, Optional, Tuple, Type, TypeVar, Union, overload, ) _T = TypeVar('_T') class NoneType: ... TypeType = type ObjectType = object IntType = int LongType = int # Really long, but can't reference that due to a mypy import cycle FloatType = float BooleanType = bool ComplexType = complex StringType = str UnicodeType = unicode StringTypes = ... # type: Tuple[Type[StringType], Type[UnicodeType]] BufferType = buffer TupleType = tuple ListType = list DictType = dict DictionaryType = dict class _Cell: cell_contents = ... # type: Any class FunctionType: func_closure = ... # type: Optional[Tuple[_Cell, ...]] func_code = ... # type: CodeType func_defaults = ... # type: Optional[Tuple[Any, ...]] func_dict = ... # type: Dict[str, Any] func_doc = ... # type: Optional[str] func_globals = ... # type: Dict[str, Any] func_name = ... # type: str __closure__ = func_closure __code__ = func_code __defaults__ = func_defaults __dict__ = func_dict __globals__ = func_globals __name__ = func_name def __call__(self, *args: Any, **kwargs: Any) -> Any: ... def __get__(self, obj: Optional[object], type: Optional[type]) -> 'UnboundMethodType': ... LambdaType = FunctionType class CodeType: co_argcount = ... # type: int co_cellvars = ... # type: Tuple[str, ...] co_code = ... # type: str co_consts = ... # type: Tuple[Any, ...] co_filename = ... # type: Optional[str] co_firstlineno = ... # type: int co_flags = ... # type: int co_freevars = ... # type: Tuple[str, ...] co_lnotab = ... # type: str co_name = ... # type: str co_names = ... # type: Tuple[str, ...] co_nlocals = ... # type: int co_stacksize = ... # type: int co_varnames = ... # type: Tuple[str, ...] class GeneratorType: gi_code = ... # type: CodeType gi_frame = ... # type: FrameType gi_running = ... # type: int def __iter__(self) -> 'GeneratorType': ... def close(self) -> None: ... def next(self) -> Any: ... def send(self, arg: Any) -> Any: ... @overload def throw(self, val: BaseException) -> Any: ... @overload def throw(self, typ: type, val: BaseException = ..., tb: 'TracebackType' = ...) -> Any: ... class ClassType: ... class UnboundMethodType: im_class = ... # type: type im_func = ... # type: FunctionType im_self = ... # type: object __name__ = ... # type: str __func__ = im_func __self__ = im_self def __init__(self, func: Callable, obj: object) -> None: ... def __call__(self, *args: Any, **kwargs: Any) -> Any: ... class InstanceType: __doc__ = ... # type: Optional[str] __class__ = ... # type: type __module__ = ... # type: Any MethodType = UnboundMethodType class BuiltinFunctionType: __self__ = ... # type: Optional[object] def __call__(self, *args: Any, **kwargs: Any) -> Any: ... BuiltinMethodType = BuiltinFunctionType class ModuleType: __doc__ = ... # type: Optional[str] __file__ = ... # type: Optional[str] __name__ = ... # type: str __package__ = ... # type: Optional[str] __path__ = ... # type: Optional[Iterable[str]] __dict__ = ... # type: Dict[str, Any] def __init__(self, name: str, doc: Optional[str] = ...) -> None: ... FileType = file XRangeType = xrange class TracebackType: tb_frame = ... # type: FrameType tb_lasti = ... # type: int tb_lineno = ... # type: int tb_next = ... # type: TracebackType class FrameType: f_back = ... # type: FrameType f_builtins = ... # type: Dict[str, Any] f_code = ... # type: CodeType f_exc_type = ... # type: None f_exc_value = ... # type: None f_exc_traceback = ... # type: None f_globals = ... # type: Dict[str, Any] f_lasti = ... # type: int f_lineno = ... # type: int f_locals = ... # type: Dict[str, Any] f_restricted = ... # type: bool f_trace = ... # type: Callable[[], None] def clear(self) -> None: ... SliceType = slice class EllipsisType: ... class DictProxyType: # TODO is it possible to have non-string keys? # no __init__ def copy(self) -> dict: ... def get(self, key: str, default: _T = ...) -> Union[Any, _T]: ... def has_key(self, key: str) -> bool: ... def items(self) -> List[Tuple[str, Any]]: ... def iteritems(self) -> Iterator[Tuple[str, Any]]: ... def iterkeys(self) -> Iterator[str]: ... def itervalues(self) -> Iterator[Any]: ... def keys(self) -> List[str]: ... def values(self) -> List[Any]: ... def __contains__(self, key: str) -> bool: ... def __getitem__(self, key: str) -> Any: ... def __iter__(self) -> Iterator[str]: ... def __len__(self) -> int: ... class NotImplementedType: ... class GetSetDescriptorType: __name__ = ... # type: str __objclass__ = ... # type: type def __get__(self, obj: Any, type: type = ...) -> Any: ... def __set__(self, obj: Any) -> None: ... def __delete__(self, obj: Any) -> None: ... # Same type on Jython, different on CPython and PyPy, unknown on IronPython. class MemberDescriptorType: __name__ = ... # type: str __objclass__ = ... # type: type def __get__(self, obj: Any, type: type = ...) -> Any: ... def __set__(self, obj: Any) -> None: ... def __delete__(self, obj: Any) -> None: ... mypy-0.560/typeshed/stdlib/2/typing.pyi0000644€tŠÔÚ€2›s®0000003436613215007212024205 0ustar jukkaDROPBOX\Domain Users00000000000000# Stubs for typing (Python 2.7) from abc import abstractmethod, ABCMeta from types import CodeType, FrameType, TracebackType import collections # Needed by aliases like DefaultDict, see mypy issue 2986 # Definitions of special type checking related constructs. Their definitions # are not used, so their value does not matter. overload = object() Any = object() TypeVar = object() _promote = object() no_type_check = object() class _SpecialForm(object): def __getitem__(self, typeargs: Any) -> object: ... Tuple: _SpecialForm = ... Generic: _SpecialForm = ... Protocol: _SpecialForm = ... Callable: _SpecialForm = ... Type: _SpecialForm = ... ClassVar: _SpecialForm = ... class GenericMeta(type): ... # Return type that indicates a function does not return. # This type is equivalent to the None type, but the no-op Union is necessary to # distinguish the None type from the None value. NoReturn = Union[None] # Type aliases and type constructors class TypeAlias: # Class for defining generic aliases for library types. def __init__(self, target_type: type) -> None: ... def __getitem__(self, typeargs: Any) -> Any: ... Union = TypeAlias(object) Optional = TypeAlias(object) List = TypeAlias(object) Dict = TypeAlias(object) DefaultDict = TypeAlias(object) Set = TypeAlias(object) FrozenSet = TypeAlias(object) Counter = TypeAlias(object) Deque = TypeAlias(object) # Predefined type variables. AnyStr = TypeVar('AnyStr', str, unicode) # Abstract base classes. # These type variables are used by the container types. _T = TypeVar('_T') _S = TypeVar('_S') _KT = TypeVar('_KT') # Key type. _VT = TypeVar('_VT') # Value type. _T_co = TypeVar('_T_co', covariant=True) # Any type covariant containers. _V_co = TypeVar('_V_co', covariant=True) # Any type covariant containers. _KT_co = TypeVar('_KT_co', covariant=True) # Key type covariant containers. _VT_co = TypeVar('_VT_co', covariant=True) # Value type covariant containers. _T_contra = TypeVar('_T_contra', contravariant=True) # Ditto contravariant. _TC = TypeVar('_TC', bound=Type[object]) def runtime(cls: _TC) -> _TC: ... @runtime class SupportsInt(Protocol, metaclass=ABCMeta): @abstractmethod def __int__(self) -> int: ... @runtime class SupportsFloat(Protocol, metaclass=ABCMeta): @abstractmethod def __float__(self) -> float: ... @runtime class SupportsComplex(Protocol, metaclass=ABCMeta): @abstractmethod def __complex__(self) -> complex: ... @runtime class SupportsAbs(Protocol[_T_co]): @abstractmethod def __abs__(self) -> _T_co: ... @runtime class SupportsRound(Protocol[_T_co]): @abstractmethod def __round__(self, ndigits: int = ...) -> _T_co: ... @runtime class Reversible(Protocol[_T_co]): @abstractmethod def __reversed__(self) -> Iterator[_T_co]: ... @runtime class Sized(Protocol, metaclass=ABCMeta): @abstractmethod def __len__(self) -> int: ... @runtime class Hashable(Protocol, metaclass=ABCMeta): # TODO: This is special, in that a subclass of a hashable class may not be hashable # (for example, list vs. object). It's not obvious how to represent this. This class # is currently mostly useless for static checking. @abstractmethod def __hash__(self) -> int: ... @runtime class Iterable(Protocol[_T_co]): @abstractmethod def __iter__(self) -> Iterator[_T_co]: ... @runtime class Iterator(Iterable[_T_co], Protocol[_T_co]): @abstractmethod def next(self) -> _T_co: ... class Generator(Iterator[_T_co], Generic[_T_co, _T_contra, _V_co]): @abstractmethod def next(self) -> _T_co: ... @abstractmethod def send(self, value: _T_contra) -> _T_co: ... @abstractmethod def throw(self, typ: Type[BaseException], val: Optional[BaseException] = ..., # TODO: tb should be TracebackType but that's defined in types tb: Any = ...) -> _T_co: ... @abstractmethod def close(self) -> None: ... gi_code = ... # type: CodeType gi_frame = ... # type: FrameType gi_running = ... # type: bool @runtime class Container(Protocol[_T_co]): @abstractmethod def __contains__(self, x: object) -> bool: ... class Sequence(Iterable[_T_co], Container[_T_co], Sized, Reversible[_T_co], Generic[_T_co]): @overload @abstractmethod def __getitem__(self, i: int) -> _T_co: ... @overload @abstractmethod def __getitem__(self, s: slice) -> Sequence[_T_co]: ... # Mixin methods def index(self, x: Any) -> int: ... def count(self, x: Any) -> int: ... def __contains__(self, x: object) -> bool: ... def __iter__(self) -> Iterator[_T_co]: ... def __reversed__(self) -> Iterator[_T_co]: ... class MutableSequence(Sequence[_T], Generic[_T]): @abstractmethod def insert(self, index: int, object: _T) -> None: ... @overload @abstractmethod def __setitem__(self, i: int, o: _T) -> None: ... @overload @abstractmethod def __setitem__(self, s: slice, o: Iterable[_T]) -> None: ... @overload @abstractmethod def __delitem__(self, i: int) -> None: ... @overload @abstractmethod def __delitem__(self, i: slice) -> None: ... # Mixin methods def append(self, object: _T) -> None: ... def extend(self, iterable: Iterable[_T]) -> None: ... def reverse(self) -> None: ... def pop(self, index: int = ...) -> _T: ... def remove(self, object: _T) -> None: ... def __iadd__(self, x: Iterable[_T]) -> MutableSequence[_T]: ... class AbstractSet(Sized, Iterable[_T_co], Container[_T_co], Generic[_T_co]): @abstractmethod def __contains__(self, x: object) -> bool: ... # Mixin methods def __le__(self, s: AbstractSet[Any]) -> bool: ... def __lt__(self, s: AbstractSet[Any]) -> bool: ... def __gt__(self, s: AbstractSet[Any]) -> bool: ... def __ge__(self, s: AbstractSet[Any]) -> bool: ... def __and__(self, s: AbstractSet[Any]) -> AbstractSet[_T_co]: ... def __or__(self, s: AbstractSet[_T]) -> AbstractSet[Union[_T_co, _T]]: ... def __sub__(self, s: AbstractSet[Any]) -> AbstractSet[_T_co]: ... def __xor__(self, s: AbstractSet[_T]) -> AbstractSet[Union[_T_co, _T]]: ... # TODO: argument can be any container? def isdisjoint(self, s: AbstractSet[Any]) -> bool: ... class MutableSet(AbstractSet[_T], Generic[_T]): @abstractmethod def add(self, x: _T) -> None: ... @abstractmethod def discard(self, x: _T) -> None: ... # Mixin methods def clear(self) -> None: ... def pop(self) -> _T: ... def remove(self, element: _T) -> None: ... def __ior__(self, s: AbstractSet[_S]) -> MutableSet[Union[_T, _S]]: ... def __iand__(self, s: AbstractSet[Any]) -> MutableSet[_T]: ... def __ixor__(self, s: AbstractSet[_S]) -> MutableSet[Union[_T, _S]]: ... def __isub__(self, s: AbstractSet[Any]) -> MutableSet[_T]: ... class MappingView(Sized): def __len__(self) -> int: ... class ItemsView(AbstractSet[Tuple[_KT_co, _VT_co]], MappingView, Generic[_KT_co, _VT_co]): def __contains__(self, o: object) -> bool: ... def __iter__(self) -> Iterator[Tuple[_KT_co, _VT_co]]: ... class KeysView(AbstractSet[_KT_co], MappingView, Generic[_KT_co]): def __contains__(self, o: object) -> bool: ... def __iter__(self) -> Iterator[_KT_co]: ... class ValuesView(MappingView, Iterable[_VT_co], Generic[_VT_co]): def __contains__(self, o: object) -> bool: ... def __iter__(self) -> Iterator[_VT_co]: ... @runtime class ContextManager(Protocol[_T_co]): def __enter__(self) -> _T_co: ... def __exit__(self, exc_type: Optional[Type[BaseException]], exc_value: Optional[BaseException], traceback: Optional[TracebackType]) -> Optional[bool]: ... class Mapping(Iterable[_KT], Container[_KT], Sized, Generic[_KT, _VT_co]): # TODO: We wish the key type could also be covariant, but that doesn't work, # see discussion in https: //github.com/python/typing/pull/273. @abstractmethod def __getitem__(self, k: _KT) -> _VT_co: ... # Mixin methods @overload def get(self, k: _KT) -> Optional[_VT_co]: ... @overload def get(self, k: _KT, default: Union[_VT_co, _T]) -> Union[_VT_co, _T]: ... def keys(self) -> list[_KT]: ... def values(self) -> list[_VT_co]: ... def items(self) -> list[Tuple[_KT, _VT_co]]: ... def iterkeys(self) -> Iterator[_KT]: ... def itervalues(self) -> Iterator[_VT_co]: ... def iteritems(self) -> Iterator[Tuple[_KT, _VT_co]]: ... def __contains__(self, o: object) -> bool: ... class MutableMapping(Mapping[_KT, _VT], Generic[_KT, _VT]): @abstractmethod def __setitem__(self, k: _KT, v: _VT) -> None: ... @abstractmethod def __delitem__(self, v: _KT) -> None: ... def clear(self) -> None: ... @overload def pop(self, k: _KT) -> _VT: ... @overload def pop(self, k: _KT, default: Union[_VT, _T] = ...) -> Union[_VT, _T]: ... def popitem(self) -> Tuple[_KT, _VT]: ... def setdefault(self, k: _KT, default: _VT = ...) -> _VT: ... @overload def update(self, __m: Mapping[_KT, _VT], **kwargs: _VT) -> None: ... @overload def update(self, __m: Iterable[Tuple[_KT, _VT]], **kwargs: _VT) -> None: ... @overload def update(self, **kwargs: _VT) -> None: ... Text = unicode TYPE_CHECKING = True class IO(Iterator[AnyStr], Generic[AnyStr]): # TODO detach # TODO use abstract properties @property def mode(self) -> str: ... @property def name(self) -> str: ... @abstractmethod def close(self) -> None: ... @property def closed(self) -> bool: ... @abstractmethod def fileno(self) -> int: ... @abstractmethod def flush(self) -> None: ... @abstractmethod def isatty(self) -> bool: ... # TODO what if n is None? @abstractmethod def read(self, n: int = ...) -> AnyStr: ... @abstractmethod def readable(self) -> bool: ... @abstractmethod def readline(self, limit: int = ...) -> AnyStr: ... @abstractmethod def readlines(self, hint: int = ...) -> list[AnyStr]: ... @abstractmethod def seek(self, offset: int, whence: int = ...) -> int: ... @abstractmethod def seekable(self) -> bool: ... @abstractmethod def tell(self) -> int: ... @abstractmethod def truncate(self, size: Optional[int] = ...) -> int: ... @abstractmethod def writable(self) -> bool: ... # TODO buffer objects @abstractmethod def write(self, s: AnyStr) -> int: ... @abstractmethod def writelines(self, lines: Iterable[AnyStr]) -> None: ... @abstractmethod def next(self) -> AnyStr: ... @abstractmethod def __iter__(self) -> Iterator[AnyStr]: ... @abstractmethod def __enter__(self) -> 'IO[AnyStr]': ... @abstractmethod def __exit__(self, t: Optional[Type[BaseException]], value: Optional[BaseException], # TODO: traceback should be TracebackType but that's defined in types traceback: Optional[Any]) -> bool: ... class BinaryIO(IO[str]): # TODO readinto # TODO read1? # TODO peek? @abstractmethod def __enter__(self) -> BinaryIO: ... class TextIO(IO[unicode]): # TODO use abstractproperty @property def buffer(self) -> BinaryIO: ... @property def encoding(self) -> str: ... @property def errors(self) -> Optional[str]: ... @property def line_buffering(self) -> bool: ... @property def newlines(self) -> Any: ... # None, str or tuple @abstractmethod def __enter__(self) -> TextIO: ... class ByteString(Sequence[int]): ... class Match(Generic[AnyStr]): pos = 0 endpos = 0 lastindex = 0 lastgroup = ... # type: AnyStr string = ... # type: AnyStr # The regular expression object whose match() or search() method produced # this match instance. re = ... # type: 'Pattern[AnyStr]' def expand(self, template: AnyStr) -> AnyStr: ... @overload def group(self, group1: int = ...) -> AnyStr: ... @overload def group(self, group1: str) -> AnyStr: ... @overload def group(self, group1: int, group2: int, *groups: int) -> Sequence[AnyStr]: ... @overload def group(self, group1: str, group2: str, *groups: str) -> Sequence[AnyStr]: ... def groups(self, default: AnyStr = ...) -> Sequence[AnyStr]: ... def groupdict(self, default: AnyStr = ...) -> dict[str, AnyStr]: ... def start(self, group: Union[int, str] = ...) -> int: ... def end(self, group: Union[int, str] = ...) -> int: ... def span(self, group: Union[int, str] = ...) -> Tuple[int, int]: ... class Pattern(Generic[AnyStr]): flags = 0 groupindex = 0 groups = 0 pattern = ... # type: AnyStr def search(self, string: AnyStr, pos: int = ..., endpos: int = ...) -> Match[AnyStr]: ... def match(self, string: AnyStr, pos: int = ..., endpos: int = ...) -> Match[AnyStr]: ... def split(self, string: AnyStr, maxsplit: int = ...) -> list[AnyStr]: ... def findall(self, string: AnyStr, pos: int = ..., endpos: int = ...) -> list[Any]: ... def finditer(self, string: AnyStr, pos: int = ..., endpos: int = ...) -> Iterator[Match[AnyStr]]: ... @overload def sub(self, repl: AnyStr, string: AnyStr, count: int = ...) -> AnyStr: ... @overload def sub(self, repl: Callable[[Match[AnyStr]], AnyStr], string: AnyStr, count: int = ...) -> AnyStr: ... @overload def subn(self, repl: AnyStr, string: AnyStr, count: int = ...) -> Tuple[AnyStr, int]: ... @overload def subn(self, repl: Callable[[Match[AnyStr]], AnyStr], string: AnyStr, count: int = ...) -> Tuple[AnyStr, int]: ... # Functions def get_type_hints(obj: Callable, globalns: Optional[dict[Text, Any]] = ..., localns: Optional[dict[Text, Any]] = ...) -> None: ... def cast(tp: Type[_T], obj: Any) -> _T: ... # Type constructors # NamedTuple is special-cased in the type checker class NamedTuple(tuple): _fields = ... # type: Tuple[str, ...] def __init__(self, typename: str, fields: Iterable[Tuple[str, Any]] = ..., *, verbose: bool = ..., rename: bool = ..., **kwargs: Any) -> None: ... @classmethod def _make(cls, iterable: Iterable[Any]) -> NamedTuple: ... def _asdict(self) -> dict: ... def _replace(self, **kwargs: Any) -> NamedTuple: ... def NewType(name: str, tp: Type[_T]) -> Type[_T]: ... mypy-0.560/typeshed/stdlib/2/unittest.pyi0000644€tŠÔÚ€2›s®0000002247313215007212024546 0ustar jukkaDROPBOX\Domain Users00000000000000# Stubs for unittest # Based on http://docs.python.org/2.7/library/unittest.html # Only a subset of functionality is included. from typing import (Any, Callable, Dict, FrozenSet, Iterable, List, Optional, overload, Pattern, Sequence, Set, TextIO, Tuple, Type, TypeVar, Union) from abc import abstractmethod, ABCMeta import types _T = TypeVar('_T') _FT = TypeVar('_FT') class Testable(metaclass=ABCMeta): @abstractmethod def run(self, result: 'TestResult') -> None: ... @abstractmethod def debug(self) -> None: ... @abstractmethod def countTestCases(self) -> int: ... # TODO ABC for test runners? class TestResult: errors = ... # type: List[Tuple[Testable, str]] failures = ... # type: List[Tuple[Testable, str]] testsRun = 0 shouldStop = ... # type: bool def wasSuccessful(self) -> bool: ... def stop(self) -> None: ... def startTest(self, test: Testable) -> None: ... def stopTest(self, test: Testable) -> None: ... def addError(self, test: Testable, err: Tuple[type, Any, Any]) -> None: ... # TODO def addFailure(self, test: Testable, err: Tuple[type, Any, Any]) -> None: ... # TODO def addSuccess(self, test: Testable) -> None: ... class _AssertRaisesBaseContext: expected = ... # type: Any failureException = ... # type: Type[BaseException] obj_name = ... # type: str expected_regex = ... # type: Pattern[str] class _AssertRaisesContext(_AssertRaisesBaseContext): exception = ... # type: Any # TODO precise type def __enter__(self) -> _AssertRaisesContext: ... def __exit__(self, exc_type, exc_value, tb) -> bool: ... class TestCase(Testable): failureException = ... # type: Type[BaseException] def __init__(self, methodName: str = ...) -> None: ... def setUp(self) -> None: ... def tearDown(self) -> None: ... @classmethod def setUpClass(cls) -> None: ... @classmethod def tearDownClass(cls) -> None: ... def run(self, result: TestResult = ...) -> None: ... def debug(self) -> None: ... def assert_(self, expr: Any, msg: object = ...) -> None: ... def failUnless(self, expr: Any, msg: object = ...) -> None: ... def assertTrue(self, expr: Any, msg: object = ...) -> None: ... def assertEqual(self, first: Any, second: Any, msg: object = ...) -> None: ... def assertEquals(self, first: Any, second: Any, msg: object = ...) -> None: ... def failUnlessEqual(self, first: Any, second: Any, msg: object = ...) -> None: ... def assertNotEqual(self, first: Any, second: Any, msg: object = ...) -> None: ... def assertNotEquals(self, first: Any, second: Any, msg: object = ...) -> None: ... def failIfEqual(self, first: Any, second: Any, msg: object = ...) -> None: ... def assertAlmostEqual(self, first: float, second: float, places: int = ..., msg: object = ..., delta: float = ...) -> None: ... def assertAlmostEquals(self, first: float, second: float, places: int = ..., msg: object = ..., delta: float = ...) -> None: ... def failUnlessAlmostEqual(self, first: float, second: float, places: int = ..., msg: object = ...) -> None: ... def assertNotAlmostEqual(self, first: float, second: float, places: int = ..., msg: object = ..., delta: float = ...) -> None: ... def assertNotAlmostEquals(self, first: float, second: float, places: int = ..., msg: object = ..., delta: float = ...) -> None: ... def failIfAlmostEqual(self, first: float, second: float, places: int = ..., msg: object = ..., delta: float = ...) -> None: ... def assertGreater(self, first: Any, second: Any, msg: object = ...) -> None: ... def assertGreaterEqual(self, first: Any, second: Any, msg: object = ...) -> None: ... def assertMultiLineEqual(self, first: str, second: str, msg: object = ...) -> None: ... def assertSequenceEqual(self, first: Sequence[Any], second: Sequence[Any], msg: object = ..., seq_type: type = ...) -> None: ... def assertListEqual(self, first: List[Any], second: List[Any], msg: object = ...) -> None: ... def assertTupleEqual(self, first: Tuple[Any, ...], second: Tuple[Any, ...], msg: object = ...) -> None: ... def assertSetEqual(self, first: Union[Set[Any], FrozenSet[Any]], second: Union[Set[Any], FrozenSet[Any]], msg: object = ...) -> None: ... def assertDictEqual(self, first: Dict[Any, Any], second: Dict[Any, Any], msg: object = ...) -> None: ... def assertLess(self, first: Any, second: Any, msg: object = ...) -> None: ... def assertLessEqual(self, first: Any, second: Any, msg: object = ...) -> None: ... def assertRaises(self, expected_exception: type, *args: Any, **kwargs: Any) -> Any: ... def failUnlessRaises(self, expected_exception: type, *args: Any, **kwargs: Any) -> Any: ... def failIf(self, expr: Any, msg: object = ...) -> None: ... def assertFalse(self, expr: Any, msg: object = ...) -> None: ... def assertIs(self, first: object, second: object, msg: object = ...) -> None: ... def assertIsNot(self, first: object, second: object, msg: object = ...) -> None: ... def assertIsNone(self, expr: Any, msg: object = ...) -> None: ... def assertIsNotNone(self, expr: Any, msg: object = ...) -> None: ... def assertIn(self, first: _T, second: Iterable[_T], msg: object = ...) -> None: ... def assertNotIn(self, first: _T, second: Iterable[_T], msg: object = ...) -> None: ... def assertIsInstance(self, obj: Any, cls: Union[type, Tuple[type, ...]], msg: object = ...) -> None: ... def assertNotIsInstance(self, obj: Any, cls: Union[type, Tuple[type, ...]], msg: object = ...) -> None: ... def fail(self, msg: object = ...) -> None: ... def countTestCases(self) -> int: ... def defaultTestResult(self) -> TestResult: ... def id(self) -> str: ... def shortDescription(self) -> str: ... # May return None def addCleanup(function: Any, *args: Any, **kwargs: Any) -> None: ... def skipTest(self, reason: Any) -> None: ... class CallableTestCase(Testable): def __init__(self, testFunc: Callable[[], None], setUp: Callable[[], None] = ..., tearDown: Callable[[], None] = ..., description: str = ...) -> None: ... def run(self, result: TestResult) -> None: ... def debug(self) -> None: ... def countTestCases(self) -> int: ... class TestSuite(Testable): def __init__(self, tests: Iterable[Testable] = ...) -> None: ... def addTest(self, test: Testable) -> None: ... def addTests(self, tests: Iterable[Testable]) -> None: ... def run(self, result: TestResult) -> None: ... def debug(self) -> None: ... def countTestCases(self) -> int: ... class TestLoader: testMethodPrefix = ... # type: str sortTestMethodsUsing = ... # type: Optional[Callable[[str, str], int]] suiteClass = ... # type: Callable[[List[TestCase]], TestSuite] def loadTestsFromTestCase(self, testCaseClass: Type[TestCase]) -> TestSuite: ... def loadTestsFromModule(self, module: str = ..., use_load_tests: bool = ...) -> TestSuite: ... def loadTestsFromName(self, name: str = ..., module: Optional[str] = ...) -> TestSuite: ... def loadTestsFromNames(self, names: List[str] = ..., module: Optional[str] = ...) -> TestSuite: ... def discover(self, start_dir: str, pattern: str = ..., top_level_dir: Optional[str] = ...) -> TestSuite: ... def getTestCaseNames(self, testCaseClass: Type[TestCase] = ...) -> List[str]: ... defaultTestLoader = TestLoader class TextTestRunner: def __init__(self, stream: Optional[TextIO] = ..., descriptions: bool = ..., verbosity: int = ..., failfast: bool = ...) -> None: ... class SkipTest(Exception): ... # TODO precise types def skipUnless(condition: Any, reason: Union[str, unicode]) -> Any: ... def skipIf(condition: Any, reason: Union[str, unicode]) -> Any: ... def expectedFailure(func: _FT) -> _FT: ... def skip(reason: Union[str, unicode]) -> Any: ... # not really documented class TestProgram: result = ... # type: TestResult def main(module: str = ..., defaultTest: Optional[str] = ..., argv: Optional[Sequence[str]] = ..., testRunner: Union[Type[TextTestRunner], TextTestRunner, None] = ..., testLoader: TestLoader = ..., exit: bool = ..., verbosity: int = ..., failfast: Optional[bool] = ..., catchbreak: Optional[bool] = ..., buffer: Optional[bool] = ...) -> TestProgram: ... # private but occasionally used util = ... # type: types.ModuleType mypy-0.560/typeshed/stdlib/2/urllib.pyi0000644€tŠÔÚ€2›s®0000001144713215007212024157 0ustar jukkaDROPBOX\Domain Users00000000000000from typing import Any, Mapping, Union, Tuple, Sequence, IO def url2pathname(pathname: str) -> str: ... def pathname2url(pathname: str) -> str: ... def urlopen(url: str, data=..., proxies: Mapping[str, str] = ..., context=...) -> IO[Any]: ... def urlretrieve(url, filename=..., reporthook=..., data=..., context=...): ... def urlcleanup() -> None: ... class ContentTooShortError(IOError): content = ... # type: Any def __init__(self, message, content) -> None: ... class URLopener: version = ... # type: Any proxies = ... # type: Any key_file = ... # type: Any cert_file = ... # type: Any context = ... # type: Any addheaders = ... # type: Any tempcache = ... # type: Any ftpcache = ... # type: Any def __init__(self, proxies: Mapping[str, str] = ..., context=..., **x509) -> None: ... def __del__(self): ... def close(self): ... def cleanup(self): ... def addheader(self, *args): ... type = ... # type: Any def open(self, fullurl: str, data=...): ... def open_unknown(self, fullurl, data=...): ... def open_unknown_proxy(self, proxy, fullurl, data=...): ... def retrieve(self, url, filename=..., reporthook=..., data=...): ... def open_http(self, url, data=...): ... def http_error(self, url, fp, errcode, errmsg, headers, data=...): ... def http_error_default(self, url, fp, errcode, errmsg, headers): ... def open_https(self, url, data=...): ... def open_file(self, url): ... def open_local_file(self, url): ... def open_ftp(self, url): ... def open_data(self, url, data=...): ... class FancyURLopener(URLopener): auth_cache = ... # type: Any tries = ... # type: Any maxtries = ... # type: Any def __init__(self, *args, **kwargs) -> None: ... def http_error_default(self, url, fp, errcode, errmsg, headers): ... def http_error_302(self, url, fp, errcode, errmsg, headers, data=...): ... def redirect_internal(self, url, fp, errcode, errmsg, headers, data): ... def http_error_301(self, url, fp, errcode, errmsg, headers, data=...): ... def http_error_303(self, url, fp, errcode, errmsg, headers, data=...): ... def http_error_307(self, url, fp, errcode, errmsg, headers, data=...): ... def http_error_401(self, url, fp, errcode, errmsg, headers, data=...): ... def http_error_407(self, url, fp, errcode, errmsg, headers, data=...): ... def retry_proxy_http_basic_auth(self, url, realm, data=...): ... def retry_proxy_https_basic_auth(self, url, realm, data=...): ... def retry_http_basic_auth(self, url, realm, data=...): ... def retry_https_basic_auth(self, url, realm, data=...): ... def get_user_passwd(self, host, realm, clear_cache=...): ... def prompt_user_passwd(self, host, realm): ... class ftpwrapper: user = ... # type: Any passwd = ... # type: Any host = ... # type: Any port = ... # type: Any dirs = ... # type: Any timeout = ... # type: Any refcount = ... # type: Any keepalive = ... # type: Any def __init__(self, user, passwd, host, port, dirs, timeout=..., persistent=...) -> None: ... busy = ... # type: Any ftp = ... # type: Any def init(self): ... def retrfile(self, file, type): ... def endtransfer(self): ... def close(self): ... def file_close(self): ... def real_close(self): ... class addbase: fp = ... # type: Any read = ... # type: Any readline = ... # type: Any readlines = ... # type: Any fileno = ... # type: Any __iter__ = ... # type: Any next = ... # type: Any def __init__(self, fp) -> None: ... def close(self): ... class addclosehook(addbase): closehook = ... # type: Any hookargs = ... # type: Any def __init__(self, fp, closehook, *hookargs) -> None: ... def close(self): ... class addinfo(addbase): headers = ... # type: Any def __init__(self, fp, headers) -> None: ... def info(self): ... class addinfourl(addbase): headers = ... # type: Any url = ... # type: Any code = ... # type: Any def __init__(self, fp, headers, url, code=...) -> None: ... def info(self): ... def getcode(self): ... def geturl(self): ... def unwrap(url): ... def splittype(url): ... def splithost(url): ... def splituser(host): ... def splitpasswd(user): ... def splitport(host): ... def splitnport(host, defport=...): ... def splitquery(url): ... def splittag(url): ... def splitattr(url): ... def splitvalue(attr): ... def unquote(s: str) -> str: ... def unquote_plus(s: str) -> str: ... def quote(s: str, safe=...) -> str: ... def quote_plus(s: str, safe=...) -> str: ... def urlencode(query: Union[Sequence[Tuple[Any, Any]], Mapping[Any, Any]], doseq=...) -> str: ... def getproxies() -> Mapping[str, str]: ... def proxy_bypass(host): ... # Names in __all__ with no definition: # basejoin mypy-0.560/typeshed/stdlib/2/urllib2.pyi0000644€tŠÔÚ€2›s®0000001532013215007212024233 0ustar jukkaDROPBOX\Domain Users00000000000000 import ssl from typing import Any, AnyStr, Dict, List, Union, Optional, Mapping, Callable, Sequence, Tuple from urllib import addinfourl from httplib import HTTPResponse _string = Union[str, unicode] class URLError(IOError): reason = ... # type: Union[str, BaseException] class HTTPError(URLError, addinfourl): code = ... # type: int headers = ... # type: Dict[str, str] class Request(object): host = ... # type: str port = ... # type: str data = ... # type: str headers = ... # type: Dict[str, str] unverifiable = ... # type: bool type = ... # type: Optional[str] origin_req_host = ... unredirected_hdrs = ... def __init__(self, url: str, data: Optional[str] = ..., headers: Dict[str, str] = ..., origin_req_host: Optional[str] = ..., unverifiable: bool = ...) -> None: ... def __getattr__(self, attr): ... def get_method(self) -> str: ... def add_data(self, data) -> None: ... def has_data(self) -> bool: ... def get_data(self) -> str: ... def get_full_url(self) -> str: ... def get_type(self): ... def get_host(self) -> str: ... def get_selector(self): ... def set_proxy(self, host, type) -> None: ... def has_proxy(self) -> bool: ... def get_origin_req_host(self) -> str: ... def is_unverifiable(self) -> bool: ... def add_header(self, key: str, val: str) -> None: ... def add_unredirected_header(self, key: str, val: str) -> None: ... def has_header(self, header_name: str) -> bool: ... def get_header(self, header_name: str, default: Optional[str] = ...) -> str: ... def header_items(self): ... class OpenerDirector(object): def add_handler(self, handler: BaseHandler) -> None: ... def open(self, url: Union[Request, _string], data: Optional[_string] = ..., timeout: int = ...): ... def error(self, proto: _string, *args: Any): ... def urlopen(url: Union[Request, _string], data: Optional[_string] = ..., timeout: int = ..., cafile: Optional[_string] = ..., capath: Optional[_string] = ..., cadefault: bool = ..., context: Optional[ssl.SSLContext] = ...): ... def install_opener(opener: OpenerDirector) -> None: ... def build_opener(*handlers: BaseHandler) -> OpenerDirector: ... class BaseHandler: handler_order = ... # type: int parent = ... # type: OpenerDirector def add_parent(self, parent: OpenerDirector) -> None: ... def close(self) -> None: ... def __lt__(self, other: Any) -> bool: ... class HTTPErrorProcessor(BaseHandler): def http_response(self, request, response): ... class HTTPDefaultErrorHandler(BaseHandler): def http_error_default(self, req, fp, code, msg, hdrs): ... class HTTPRedirectHandler(BaseHandler): max_repeats = ... # type: int max_redirections = ... # type: int def redirect_request(self, req, fp, code, msg, headers, newurl): ... def http_error_301(self, req, fp, code, msg, headers): ... def http_error_302(self, req, fp, code, msg, headers): ... def http_error_303(self, req, fp, code, msg, headers): ... def http_error_307(self, req, fp, code, msg, headers): ... inf_msg = ... # type: str class ProxyHandler(BaseHandler): def __init__(self, proxies=None): ... def proxy_open(self, req, proxy, type): ... class HTTPPasswordMgr: def __init__(self) -> None: ... def add_password(self, realm: _string, uri: Union[_string, Sequence[_string]], user: _string, passwd: _string) -> None: ... def find_user_password(self, realm: _string, authuri: _string) -> Tuple[Any, Any]: ... def reduce_uri(self, uri: _string, default_port: bool = ...) -> Tuple[Any, Any]: ... def is_suburi(self, base: _string, test: _string) -> bool: ... class HTTPPasswordMgrWithDefaultRealm(HTTPPasswordMgr): ... class AbstractBasicAuthHandler: def __init__(self, password_mgr: Optional[HTTPPasswordMgr] = ...) -> None: ... def http_error_auth_reqed(self, authreq, host, req, headers): ... def retry_http_basic_auth(self, host, req, realm): ... class HTTPBasicAuthHandler(AbstractBasicAuthHandler, BaseHandler): auth_header = ... # type: str def http_error_401(self, req, fp, code, msg, headers): ... class ProxyBasicAuthHandler(AbstractBasicAuthHandler, BaseHandler): auth_header = ... # type: str def http_error_407(self, req, fp, code, msg, headers): ... class AbstractDigestAuthHandler: def __init__(self, passwd: Optional[HTTPPasswordMgr] = ...) -> None: ... def reset_retry_count(self) -> None: ... def http_error_auth_reqed(self, auth_header: str, host: str, req: Request, headers: Mapping[str, str]) -> None: ... def retry_http_digest_auth(self, req: Request, auth: str) -> Optional[HTTPResponse]: ... def get_cnonce(self, nonce: str) -> str: ... def get_authorization(self, req: Request, chal: Mapping[str, str]) -> str: ... def get_algorithm_impls(self, algorithm: str) -> Tuple[Callable[[str], str], Callable[[str, str], str]]: ... def get_entity_digest(self, data: Optional[bytes], chal: Mapping[str, str]) -> Optional[str]: ... class HTTPDigestAuthHandler(BaseHandler, AbstractDigestAuthHandler): auth_header = ... # type: str handler_order = ... # type: int def http_error_401(self, req, fp, code, msg, headers): ... class ProxyDigestAuthHandler(BaseHandler, AbstractDigestAuthHandler): auth_header = ... # type: str handler_order = ... # type: int def http_error_407(self, req, fp, code, msg, headers): ... class AbstractHTTPHandler(BaseHandler): def __init__(self, debuglevel: int = ...) -> None: ... def do_request_(self, request): ... def do_open(self, http_class, req): ... class HTTPHandler(AbstractHTTPHandler): def http_open(self, req): ... def http_request(self, request): ... class HTTPSHandler(AbstractHTTPHandler): def __init__(self, debuglevel=0, context=None): ... def https_open(self, req): ... def https_request(self, request): ... class HTTPCookieProcessor(BaseHandler): def __init__(self, cookiejar=None): ... def http_request(self, request): ... def http_response(self, request, response): ... class UnknownHandler(BaseHandler): def unknown_open(self, req): ... class FileHandler(BaseHandler): def file_open(self, req): ... def get_names(self): ... def open_local_file(self, req): ... class FTPHandler(BaseHandler): def ftp_open(self, req): ... def connect_ftp(self, user, passwd, host, port, dirs, timeout): ... class CacheFTPHandler(FTPHandler): def __init__(self) -> None: ... def setTimeout(self, t): ... def setMaxConns(self, m): ... def check_cache(self): ... def clear_cache(self): ... def parse_http_list(s: AnyStr) -> List[AnyStr]: ... def parse_keqv_list(l: List[AnyStr]) -> Dict[AnyStr, AnyStr]: ... mypy-0.560/typeshed/stdlib/2/urlparse.pyi0000644€tŠÔÚ€2›s®0000000420113215007212024511 0ustar jukkaDROPBOX\Domain Users00000000000000# Stubs for urlparse (Python 2) from typing import AnyStr, Dict, List, NamedTuple, Tuple, Sequence, Union, overload _String = Union[str, unicode] uses_relative = ... # type: List[str] uses_netloc = ... # type: List[str] uses_params = ... # type: List[str] non_hierarchical = ... # type: List[str] uses_query = ... # type: List[str] uses_fragment = ... # type: List[str] scheme_chars = ... # type: str MAX_CACHE_SIZE = 0 def clear_cache() -> None: ... class ResultMixin(object): @property def username(self) -> str: ... @property def password(self) -> str: ... @property def hostname(self) -> str: ... @property def port(self) -> int: ... class SplitResult( NamedTuple( 'SplitResult', [ ('scheme', str), ('netloc', str), ('path', str), ('query', str), ('fragment', str) ] ), ResultMixin ): def geturl(self) -> str: ... class ParseResult( NamedTuple( 'ParseResult', [ ('scheme', str), ('netloc', str), ('path', str), ('params', str), ('query', str), ('fragment', str) ] ), ResultMixin ): def geturl(self) -> str: ... def urlparse(url: _String, scheme: _String = ..., allow_fragments: bool = ...) -> ParseResult: ... def urlsplit(url: _String, scheme: _String = ..., allow_fragments: bool = ...) -> SplitResult: ... @overload def urlunparse(data: Tuple[_String, _String, _String, _String, _String, _String]) -> str: ... @overload def urlunparse(data: Sequence[_String]) -> str: ... @overload def urlunsplit(data: Tuple[_String, _String, _String, _String, _String]) -> str: ... @overload def urlunsplit(data: Sequence[_String]) -> str: ... def urljoin(base: _String, url: _String, allow_fragments: bool = ...) -> str: ... def urldefrag(url: AnyStr) -> Tuple[AnyStr, str]: ... def unquote(s: AnyStr) -> AnyStr: ... def parse_qs(qs: AnyStr, keep_blank_values: bool = ..., strict_parsing: bool = ...) -> Dict[AnyStr, List[AnyStr]]: ... def parse_qsl(qs: AnyStr, keep_blank_values: int = ..., strict_parsing: bool = ...) -> List[Tuple[AnyStr, AnyStr]]: ... mypy-0.560/typeshed/stdlib/2/user.pyi0000644€tŠÔÚ€2›s®0000000036013215007212023634 0ustar jukkaDROPBOX\Domain Users00000000000000# Stubs for user (Python 2) # Docs: https://docs.python.org/2/library/user.html # Source: https://hg.python.org/cpython/file/2.7/Lib/user.py from typing import Any def __getattr__(name) -> Any: ... # type: ignore home: str pythonrc: str mypy-0.560/typeshed/stdlib/2/UserDict.pyi0000644€tŠÔÚ€2›s®0000000304213215007212024400 0ustar jukkaDROPBOX\Domain Users00000000000000from typing import (Any, Container, Dict, Generic, Iterable, Iterator, List, Mapping, Optional, Sized, Tuple, TypeVar, Union, overload) _KT = TypeVar('_KT') _VT = TypeVar('_VT') _T = TypeVar('_T') class UserDict(Dict[_KT, _VT], Generic[_KT, _VT]): data = ... # type: Mapping[_KT, _VT] def __init__(self, initialdata: Mapping[_KT, _VT] = ...) -> None: ... # TODO: __iter__ is not available for UserDict class IterableUserDict(UserDict[_KT, _VT], Generic[_KT, _VT]): ... class DictMixin(Iterable[_KT], Container[_KT], Sized, Generic[_KT, _VT]): def has_key(self, key: _KT) -> bool: ... # From typing.Mapping[_KT, _VT] # (can't inherit because of keys()) @overload def get(self, k: _KT) -> Optional[_VT]: ... @overload def get(self, k: _KT, default: Union[_VT, _T]) -> Union[_VT, _T]: ... def values(self) -> List[_VT]: ... def items(self) -> List[Tuple[_KT, _VT]]: ... def iterkeys(self) -> Iterator[_KT]: ... def itervalues(self) -> Iterator[_VT]: ... def iteritems(self) -> Iterator[Tuple[_KT, _VT]]: ... def __contains__(self, o: Any) -> bool: ... # From typing.MutableMapping[_KT, _VT] def clear(self) -> None: ... def pop(self, k: _KT, default: _VT = ...) -> _VT: ... def popitem(self) -> Tuple[_KT, _VT]: ... def setdefault(self, k: _KT, default: _VT = ...) -> _VT: ... @overload def update(self, m: Mapping[_KT, _VT], **kwargs: _VT) -> None: ... @overload def update(self, m: Iterable[Tuple[_KT, _VT]], **kwargs: _VT) -> None: ... mypy-0.560/typeshed/stdlib/2/UserList.pyi0000644€tŠÔÚ€2›s®0000000010513215007212024425 0ustar jukkaDROPBOX\Domain Users00000000000000import collections class UserList(collections.MutableSequence): ... mypy-0.560/typeshed/stdlib/2/UserString.pyi0000644€tŠÔÚ€2›s®0000000020213215007212024756 0ustar jukkaDROPBOX\Domain Users00000000000000import collections class UserString(collections.Sequence): ... class MutableString(UserString, collections.MutableSequence): ... mypy-0.560/typeshed/stdlib/2/whichdb.pyi0000644€tŠÔÚ€2›s®0000000022613215007212024267 0ustar jukkaDROPBOX\Domain Users00000000000000# Source: https://hg.python.org/cpython/file/2.7/Lib/whichdb.py from typing import Optional, Text def whichdb(filename: Text) -> Optional[str]: ... mypy-0.560/typeshed/stdlib/2/wsgiref/0000755€tŠÔÚ€2›s®0000000000013215007244023607 5ustar jukkaDROPBOX\Domain Users00000000000000mypy-0.560/typeshed/stdlib/2/wsgiref/__init__.pyi0000644€tŠÔÚ€2›s®0000000000013215007212026052 0ustar jukkaDROPBOX\Domain Users00000000000000mypy-0.560/typeshed/stdlib/2/wsgiref/types.pyi0000644€tŠÔÚ€2›s®0000000233313215007212025472 0ustar jukkaDROPBOX\Domain Users00000000000000# Type declaration for a WSGI Function in Python 2 # # wsgiref/types.py doesn't exist and neither does WSGIApplication, it's a type # provided for type checking purposes. # # This means you cannot simply import wsgiref.types in your code. Instead, # use the `TYPE_CHECKING` flag from the typing module: # # from typing import TYPE_CHECKING # # if TYPE_CHECKING: # from wsgiref.types import WSGIApplication # # This import is now only taken into account by the type checker. Consequently, # you need to use 'WSGIApplication' and not simply WSGIApplication when type # hinting your code. Otherwise Python will raise NameErrors. from typing import Callable, Dict, Iterable, List, Optional, Tuple, Type, Union, Any from types import TracebackType _exc_info = Tuple[Optional[Type[BaseException]], Optional[BaseException], Optional[TracebackType]] _Text = Union[unicode, str] WSGIEnvironment = Dict[_Text, Any] WSGIApplication = Callable[ [ WSGIEnvironment, Union[ Callable[[_Text, List[Tuple[_Text, _Text]]], Callable[[_Text], None]], Callable[[_Text, List[Tuple[_Text, _Text]], _exc_info], Callable[[_Text], None]] ] ], Iterable[_Text] ] mypy-0.560/typeshed/stdlib/2/wsgiref/validate.pyi0000644€tŠÔÚ€2›s®0000000215113215007212026115 0ustar jukkaDROPBOX\Domain Users00000000000000from typing import Any class WSGIWarning(Warning): ... def validator(application): ... class InputWrapper: input = ... # type: Any def __init__(self, wsgi_input): ... def read(self, *args): ... def readline(self): ... def readlines(self, *args): ... def __iter__(self): ... def close(self): ... class ErrorWrapper: errors = ... # type: Any def __init__(self, wsgi_errors): ... def write(self, s): ... def flush(self): ... def writelines(self, seq): ... def close(self): ... class WriteWrapper: writer = ... # type: Any def __init__(self, wsgi_writer): ... def __call__(self, s): ... class PartialIteratorWrapper: iterator = ... # type: Any def __init__(self, wsgi_iterator): ... def __iter__(self): ... class IteratorWrapper: original_iterator = ... # type: Any iterator = ... # type: Any closed = ... # type: Any check_start_response = ... # type: Any def __init__(self, wsgi_iterator, check_start_response): ... def __iter__(self): ... def next(self): ... def close(self): ... def __del__(self): ... mypy-0.560/typeshed/stdlib/2/xmlrpclib.pyi0000644€tŠÔÚ€2›s®0000002274613215007212024666 0ustar jukkaDROPBOX\Domain Users00000000000000# Stubs for xmlrpclib (Python 2) from typing import Any, AnyStr, Callable, IO, Iterable, List, Mapping, MutableMapping, Optional, Tuple, Type, TypeVar, Union from types import InstanceType from datetime import datetime from time import struct_time from httplib import HTTPConnection, HTTPResponse, HTTPSConnection from ssl import SSLContext from StringIO import StringIO from gzip import GzipFile _Unmarshaller = Any _timeTuple = Tuple[int, int, int, int, int, int, int, int, int] # Represents types that can be compared against a DateTime object _dateTimeComp = Union[AnyStr, DateTime, datetime, _timeTuple] # A "host description" used by Transport factories _hostDesc = Union[str, Tuple[str, Mapping[Any, Any]]] def escape(s: AnyStr, replace: Callable[[AnyStr, AnyStr, AnyStr], AnyStr] = ...) -> AnyStr: ... MAXINT = ... # type: int MININT = ... # type: int PARSE_ERROR = ... # type: int SERVER_ERROR = ... # type: int APPLICATION_ERROR = ... # type: int SYSTEM_ERROR = ... # type: int TRANSPORT_ERROR = ... # type: int NOT_WELLFORMED_ERROR = ... # type: int UNSUPPORTED_ENCODING = ... # type: int INVALID_ENCODING_CHAR = ... # type: int INVALID_XMLRPC = ... # type: int METHOD_NOT_FOUND = ... # type: int INVALID_METHOD_PARAMS = ... # type: int INTERNAL_ERROR = ... # type: int class Error(Exception): ... class ProtocolError(Error): url = ... # type: str errcode = ... # type: int errmsg = ... # type: str headers = ... # type: Any def __init__(self, url: str, errcode: int, errmsg: str, headers: Any) -> None: ... class ResponseError(Error): ... class Fault(Error): faultCode = ... # type: Any faultString = ... # type: str def __init__(self, faultCode: Any, faultString: str, **extra: Any) -> None: ... boolean = ... # type: Type[bool] Boolean = ... # type: Type[bool] class DateTime: value = ... # type: str def __init__(self, value: Union[str, unicode, datetime, float, int, _timeTuple, struct_time] = ...) -> None: ... def make_comparable(self, other: _dateTimeComp) -> Tuple[_dateTimeComp, _dateTimeComp]: ... def __lt__(self, other: _dateTimeComp) -> bool: ... def __le__(self, other: _dateTimeComp) -> bool: ... def __gt__(self, other: _dateTimeComp) -> bool: ... def __ge__(self, other: _dateTimeComp) -> bool: ... def __eq__(self, other: _dateTimeComp) -> bool: ... def __ne__(self, other: _dateTimeComp) -> bool: ... def timetuple(self) -> struct_time: ... def __cmp__(self, other: _dateTimeComp) -> int: ... def decode(self, data: Any) -> None: ... def encode(self, out: IO) -> None: ... class Binary: data = ... # type: str def __init__(self, data: Optional[str] = ...) -> None: ... def __cmp__(self, other: Any) -> int: ... def decode(self, data: str) -> None: ... def encode(self, out: IO) -> None: ... WRAPPERS = ... # type: tuple # Still part of the public API, but see http://bugs.python.org/issue1773632 FastParser = ... # type: None FastUnmarshaller = ... # type: None FastMarshaller = ... # type: None # xmlrpclib.py will leave ExpatParser undefined if it can't import expat from # xml.parsers. Because this is Python 2.7, the import will succeed. class ExpatParser: def __init__(self, target: _Unmarshaller) -> None: ... def feed(self, data: str): ... def close(self): ... # TODO: Add xmllib.XMLParser as base class class SlowParser: handle_xml = ... # type: Callable[[str, bool], None] unknown_starttag = ... # type: Callable[[str, Any], None] handle_data = ... # type: Callable[[str], None] handle_cdata = ... # type: Callable[[str], None] unknown_endtag = ... # type: Callable[[str, Callable[[Iterable[str], str], str]], None] def __init__(self, target: _Unmarshaller) -> None: ... class Marshaller: memo = ... # type: MutableMapping[int, Any] data = ... # type: Optional[str] encoding = ... # type: Optional[str] allow_none = ... # type: bool def __init__(self, encoding: Optional[str] = ..., allow_none: bool = ...) -> None: ... dispatch = ... # type: Mapping[type, Callable[[Marshaller, str, Callable[[str], None]], None]] def dumps(self, values: Union[Iterable[Union[None, int, bool, long, float, str, unicode, List, Tuple, Mapping, datetime, InstanceType]], Fault]) -> str: ... def dump_nil(self, value: None, write: Callable[[str], None]) -> None: ... def dump_int(self, value: int, write: Callable[[str], None]) -> None: ... def dump_bool(self, value: bool, write: Callable[[str], None]) -> None: ... def dump_long(self, value: long, write: Callable[[str], None]) -> None: ... def dump_double(self, value: float, write: Callable[[str], None]) -> None: ... def dump_string(self, value: str, write: Callable[[str], None], escape: Callable[[AnyStr, Callable[[AnyStr, AnyStr, AnyStr], AnyStr]], AnyStr] = ...) -> None: ... def dump_unicode(self, value: unicode, write: Callable[[str], None], escape: Callable[[AnyStr, Callable[[AnyStr, AnyStr, AnyStr], AnyStr]], AnyStr] = ...) -> None: ... def dump_array(self, value: Union[List, Tuple], write: Callable[[str], None]) -> None: ... def dump_struct(self, value: Mapping, write: Callable[[str], None], escape: Callable[[AnyStr, Callable[[AnyStr, AnyStr, AnyStr], AnyStr]], AnyStr] = ...) -> None: ... def dump_datetime(self, value: datetime, write: Callable[[str], None]) -> None: ... def dump_instance(self, value: InstanceType, write: Callable[[str], None]) -> None: ... class Unmarshaller: def append(self, object: Any) -> None: ... def __init__(self, use_datetime: bool = ...) -> None: ... def close(self) -> tuple: ... def getmethodname(self) -> Optional[str]: ... def xml(self, encoding: str, standalone: bool) -> None: ... def start(self, tag: str, attrs: Any) -> None: ... def data(self, text: str) -> None: ... def end(self, tag: str, join: Callable[[Iterable[str], str], str] = ...) -> None: ... def end_dispatch(self, tag: str, data: str) -> None: ... dispatch = ... # type: Mapping[str, Callable[[Unmarshaller, str], None]] def end_nil(self, data: str): ... def end_boolean(self, data: str) -> None: ... def end_int(self, data: str) -> None: ... def end_double(self, data: str) -> None: ... def end_string(self, data: str) -> None: ... def end_array(self, data: str) -> None: ... def end_struct(self, data: str) -> None: ... def end_base64(self, data: str) -> None: ... def end_dateTime(self, data: str) -> None: ... def end_value(self, data: str) -> None: ... def end_params(self, data: str) -> None: ... def end_fault(self, data: str) -> None: ... def end_methodName(self, data: str) -> None: ... class _MultiCallMethod: def __init__(self, call_list: List[Tuple[str, tuple]], name: str) -> None: ... class MultiCallIterator: def __init__(self, results: List) -> None: ... class MultiCall: def __init__(self, server: ServerProxy) -> None: ... def __getattr__(self, name: str) -> _MultiCallMethod: ... def __call__(self) -> MultiCallIterator: ... def getparser(use_datetime: bool = ...) -> Tuple[Union[ExpatParser, SlowParser], Unmarshaller]: ... def dumps(params: Union[tuple, Fault], methodname: Optional[str] = ..., methodresponse: Optional[bool] = ..., encoding: Optional[str] = ..., allow_none: bool = ...) -> str: ... def loads(data: str, use_datetime: bool = ...) -> Tuple[tuple, Optional[str]]: ... def gzip_encode(data: str) -> str: ... def gzip_decode(data: str, max_decode: int = ...) -> str: ... class GzipDecodedResponse(GzipFile): stringio = ... # type: StringIO def __init__(self, response: HTTPResponse) -> None: ... def close(self): ... class _Method: def __init__(self, send: Callable[[str, tuple], Any], name: str) -> None: ... def __getattr__(self, name: str) -> _Method: ... def __call__(self, *args: Any) -> Any: ... class Transport: user_agent = ... # type: str accept_gzip_encoding = ... # type: bool encode_threshold = ... # type: Optional[int] def __init__(self, use_datetime: bool = ...) -> None: ... def request(self, host: _hostDesc, handler: str, request_body: str, verbose: bool = ...) -> tuple: ... verbose = ... # type: bool def single_request(self, host: _hostDesc, handler: str, request_body: str, verbose: bool = ...) -> tuple: ... def getparser(self) -> Tuple[Union[ExpatParser, SlowParser], Unmarshaller]: ... def get_host_info(self, host: _hostDesc) -> Tuple[str, Optional[List[Tuple[str, str]]], Optional[Mapping[Any, Any]]]: ... def make_connection(self, host: _hostDesc) -> HTTPConnection: ... def close(self) -> None: ... def send_request(self, connection: HTTPConnection, handler: str, request_body: str) -> None: ... def send_host(self, connection: HTTPConnection, host: str) -> None: ... def send_user_agent(self, connection: HTTPConnection) -> None: ... def send_content(self, connection: HTTPConnection, request_body: str) -> None: ... def parse_response(self, response: HTTPResponse) -> tuple: ... class SafeTransport(Transport): def __init__(self, use_datetime: bool = ..., context: Optional[SSLContext] = ...) -> None: ... def make_connection(self, host: _hostDesc) -> HTTPSConnection: ... class ServerProxy: def __init__(self, uri: str, transport: Optional[Transport] = ..., encoding: Optional[str] = ..., verbose: bool = ..., allow_none: bool = ..., use_datetime: bool = ..., context: Optional[SSLContext] = ...) -> None: ... def __getattr__(self, name: str) -> _Method: ... def __call__(self, attr: str) -> Optional[Transport]: ... Server = ServerProxy mypy-0.560/typeshed/stdlib/2and3/0000755€tŠÔÚ€2›s®0000000000013215007244022707 5ustar jukkaDROPBOX\Domain Users00000000000000mypy-0.560/typeshed/stdlib/2and3/__future__.pyi0000644€tŠÔÚ€2›s®0000000075213215007212025537 0ustar jukkaDROPBOX\Domain Users00000000000000import sys from typing import List class _Feature: def getOptionalRelease(self) -> sys._version_info: ... def getMandatoryRelease(self) -> sys._version_info: ... absolute_import: _Feature division: _Feature generators: _Feature nested_scopes: _Feature print_function: _Feature unicode_literals: _Feature with_statement: _Feature if sys.version_info >= (3, 0): barry_as_FLUFL: _Feature if sys.version_info >= (3, 5): generator_stop: _Feature all_feature_names: List[str] mypy-0.560/typeshed/stdlib/2and3/_bisect.pyi0000644€tŠÔÚ€2›s®0000000112013215007212025027 0ustar jukkaDROPBOX\Domain Users00000000000000"""Stub file for the '_bisect' module.""" from typing import Any, Sequence, TypeVar _T = TypeVar('_T') def bisect(a: Sequence[_T], x: _T, lo: int = ..., hi: int = ...) -> int: ... def bisect_left(a: Sequence[_T], x: _T, lo: int = ..., hi: int = ...) -> int: ... def bisect_right(a: Sequence[_T], x: _T, lo: int = ..., hi: int = ...) -> int: ... def insort(a: Sequence[_T], x: _T, lo: int = ..., hi: int = ...) -> None: ... def insort_left(a: Sequence[_T], x: _T, lo: int = ..., hi: int = ...) -> None: ... def insort_right(a: Sequence[_T], x: _T, lo: int = ..., hi: int = ...) -> None: ... mypy-0.560/typeshed/stdlib/2and3/_codecs.pyi0000644€tŠÔÚ€2›s®0000001156413215007212025033 0ustar jukkaDROPBOX\Domain Users00000000000000"""Stub file for the '_codecs' module.""" import sys from typing import Any, Callable, Tuple, Optional, Dict, Text, Union import codecs # For convenience: _Handler = Callable[[Exception], Tuple[Text, int]] _String = Union[bytes, str] _Errors = Union[str, Text, None] if sys.version_info < (3, 0): _Decodable = Union[bytes, Text] _Encodable = Union[bytes, Text] else: _Decodable = bytes _Encodable = str # This type is not exposed; it is defined in unicodeobject.c class _EncodingMap(object): def size(self) -> int: ... _MapT = Union[Dict[int, int], _EncodingMap] def register(search_function: Callable[[str], Any]) -> None: ... def register_error(errors: Union[str, Text], handler: _Handler) -> None: ... def lookup(encoding: Union[str, Text]) -> codecs.CodecInfo: ... def lookup_error(name: Union[str, Text]) -> _Handler: ... def decode(obj: Any, encoding: Union[str, Text] = ..., errors: _Errors = ...) -> Any: ... def encode(obj: Any, encoding: Union[str, Text] = ..., errors: _Errors = ...) -> Any: ... def charmap_build(map: Text) -> _MapT: ... def ascii_decode(data: _Decodable, errors: _Errors = ...) -> Tuple[Text, int]: ... def ascii_encode(data: _Encodable, errors: _Errors = ...) -> Tuple[bytes, int]: ... def charbuffer_encode(data: _Encodable, errors: _Errors = ...) -> Tuple[bytes, int]: ... def charmap_decode(data: _Decodable, errors: _Errors = ..., mapping: Optional[_MapT] = ...) -> Tuple[Text, int]: ... def charmap_encode(data: _Encodable, errors: _Errors, mapping: Optional[_MapT] = ...) -> Tuple[bytes, int]: ... def escape_decode(data: _String, errors: _Errors = ...) -> Tuple[str, int]: ... def escape_encode(data: bytes, errors: _Errors = ...) -> Tuple[bytes, int]: ... def latin_1_decode(data: _Decodable, errors: _Errors = ...) -> Tuple[Text, int]: ... def latin_1_encode(data: _Encodable, errors: _Errors = ...) -> Tuple[bytes, int]: ... def raw_unicode_escape_decode(data: _String, errors: _Errors = ...) -> Tuple[Text, int]: ... def raw_unicode_escape_encode(data: _Encodable, errors: _Errors = ...) -> Tuple[bytes, int]: ... def readbuffer_encode(data: _String, errors: _Errors = ...) -> Tuple[bytes, int]: ... def unicode_escape_decode(data: _String, errors: _Errors = ...) -> Tuple[Text, int]: ... def unicode_escape_encode(data: _Encodable, errors: _Errors = ...) -> Tuple[bytes, int]: ... def unicode_internal_decode(data: _String, errors: _Errors = ...) -> Tuple[Text, int]: ... def unicode_internal_encode(data: _String, errors: _Errors = ...) -> Tuple[bytes, int]: ... def utf_16_be_decode(data: _Decodable, errors: _Errors = ..., final: int = ...) -> Tuple[Text, int]: ... def utf_16_be_encode(data: _Encodable, errors: _Errors = ...) -> Tuple[bytes, int]: ... def utf_16_decode(data: _Decodable, errors: _Errors = ..., final: int = ...) -> Tuple[Text, int]: ... def utf_16_encode(data: _Encodable, errors: _Errors = ..., byteorder: int = ...) -> Tuple[bytes, int]: ... def utf_16_ex_decode(data: _Decodable, errors: _Errors = ..., final: int = ...) -> Tuple[Text, int, int]: ... def utf_16_le_decode(data: _Decodable, errors: _Errors = ..., final: int = ...) -> Tuple[Text, int]: ... def utf_16_le_encode(data: _Encodable, errors: _Errors = ...) -> Tuple[bytes, int]: ... def utf_32_be_decode(data: _Decodable, errors: _Errors = ..., final: int = ...) -> Tuple[Text, int]: ... def utf_32_be_encode(data: _Encodable, errors: _Errors = ...) -> Tuple[bytes, int]: ... def utf_32_decode(data: _Decodable, errors: _Errors = ..., final: int = ...) -> Tuple[Text, int]: ... def utf_32_encode(data: _Encodable, errors: _Errors = ..., byteorder: int = ...) -> Tuple[bytes, int]: ... def utf_32_ex_decode(data: _Decodable, errors: _Errors = ..., final: int = ...) -> Tuple[Text, int, int]: ... def utf_32_le_decode(data: _Decodable, errors: _Errors = ..., final: int = ...) -> Tuple[Text, int]: ... def utf_32_le_encode(data: _Encodable, errors: _Errors = ...) -> Tuple[bytes, int]: ... def utf_7_decode(data: _Decodable, errors: _Errors = ..., final: int = ...) -> Tuple[Text, int]: ... def utf_7_encode(data: _Encodable, errors: _Errors = ...) -> Tuple[bytes, int]: ... def utf_8_decode(data: _Decodable, errors: _Errors = ..., final: int = ...) -> Tuple[Text, int]: ... def utf_8_encode(data: _Encodable, errors: _Errors = ...) -> Tuple[bytes, int]: ... if sys.platform == 'win32': def mbcs_decode(data: _Decodable, errors: _Errors = ..., final: int = ...) -> Tuple[Text, int]: ... def mbcs_encode(str: _Encodable, errors: _Errors = ...) -> Tuple[bytes, int]: ... if sys.version_info >= (3, 0): def oem_decode(data: bytes, errors: _Errors = ..., final: int = ...) -> Tuple[Text, int]: ... def code_page_decode(codepage: int, data: bytes, errors: _Errors = ..., final: int = ...) -> Tuple[Text, int]: ... def oem_encode(str: Text, errors: _Errors = ...) -> Tuple[bytes, int]: ... def code_page_encode(code_page: int, str: Text, errors: _Errors = ...) -> Tuple[bytes, int]: ... mypy-0.560/typeshed/stdlib/2and3/_csv.pyi0000644€tŠÔÚ€2›s®0000000274513215007212024367 0ustar jukkaDROPBOX\Domain Users00000000000000import sys from typing import Any, Iterable, Iterator, List, Optional, Sequence QUOTE_ALL = ... # type: int QUOTE_MINIMAL = ... # type: int QUOTE_NONE = ... # type: int QUOTE_NONNUMERIC = ... # type: int class Error(Exception): ... class Dialect: delimiter = ... # type: str quotechar = ... # type: Optional[str] escapechar = ... # type: Optional[str] doublequote = ... # type: bool skipinitialspace = ... # type: bool lineterminator = ... # type: str quoting = ... # type: int strict = ... # type: int def __init__(self) -> None: ... class _reader(Iterator[List[str]]): dialect = ... # type: Dialect line_num = ... # type: int class _writer: dialect = ... # type: Dialect if sys.version_info >= (3, 5): def writerow(self, row: Iterable[Any]) -> None: ... def writerows(self, rows: Iterable[Iterable[Any]]) -> None: ... else: def writerow(self, row: Sequence[Any]) -> None: ... def writerows(self, rows: Iterable[Sequence[Any]]) -> None: ... # TODO: precise type def writer(csvfile: Any, dialect: Any = ..., **fmtparams: Any) -> _writer: ... def reader(csvfile: Iterable[str], dialect: Any = ..., **fmtparams: Any) -> _reader: ... def register_dialect(name: str, dialect: Any = ..., **fmtparams: Any) -> None: ... def unregister_dialect(name: str) -> None: ... def get_dialect(name: str) -> Dialect: ... def list_dialects() -> List[str]: ... def field_size_limit(new_limit: int = ...) -> int: ... mypy-0.560/typeshed/stdlib/2and3/_heapq.pyi0000644€tŠÔÚ€2›s®0000000100313215007212024654 0ustar jukkaDROPBOX\Domain Users00000000000000"""Stub file for the '_heapq' module.""" from typing import TypeVar, List _T = TypeVar("_T") def heapify(heap: List[_T]) -> None: ... def heappop(heap: List[_T]) -> _T: raise IndexError() # if list is empty def heappush(heap: List[_T], item: _T) -> None: ... def heappushpop(heap: List[_T], item: _T) -> _T: ... def heapreplace(heap: List[_T], item: _T) -> _T: raise IndexError() # if list is empty def nlargest(a: int, b: List[_T]) -> List[_T]: ... def nsmallest(a: int, b: List[_T]) -> List[_T]: ... mypy-0.560/typeshed/stdlib/2and3/_random.pyi0000644€tŠÔÚ€2›s®0000000075513215007212025053 0ustar jukkaDROPBOX\Domain Users00000000000000# Stubs for _random import sys from typing import Tuple # Actually Tuple[(int,) * 625] _State = Tuple[int, ...] class Random(object): def __init__(self, seed: object = ...) -> None: ... def seed(self, x: object = ...) -> None: ... def getstate(self) -> _State: ... def setstate(self, state: _State) -> None: ... def random(self) -> float: ... def getrandbits(self, k: int) -> int: ... if sys.version_info < (3,): def jumpahead(self, i: int) -> None: ... mypy-0.560/typeshed/stdlib/2and3/_weakref.pyi0000644€tŠÔÚ€2›s®0000000146513215007212025216 0ustar jukkaDROPBOX\Domain Users00000000000000import sys from typing import Any, Callable, Generic, Optional, TypeVar _T = TypeVar('_T') class CallableProxyType(object): # "weakcallableproxy" def __getattr__(self, attr: str) -> Any: ... class ProxyType(object): # "weakproxy" def __getattr__(self, attr: str) -> Any: ... class ReferenceType(Generic[_T]): if sys.version_info >= (3, 4): __callback__: Callable[[ReferenceType[_T]], Any] def __init__(self, o: _T, callback: Callable[[ReferenceType[_T]], Any] = ...) -> None: ... def __call__(self) -> Optional[_T]: ... def __hash__(self) -> int: ... ref = ReferenceType def getweakrefcount(object: Any) -> int: ... def getweakrefs(object: Any) -> int: ... def proxy(object: _T, callback: Callable[[_T], Any] = ...) -> ref[_T]: ... mypy-0.560/typeshed/stdlib/2and3/_weakrefset.pyi0000644€tŠÔÚ€2›s®0000000427213215007212025731 0ustar jukkaDROPBOX\Domain Users00000000000000from typing import Iterator, Any, Iterable, MutableSet, Optional, TypeVar, Generic, Union _S = TypeVar('_S') _T = TypeVar('_T') _SelfT = TypeVar('_SelfT', bound=WeakSet) class WeakSet(MutableSet[_T], Generic[_T]): def __init__(self, data: Optional[Iterable[_T]] = ...) -> None: ... def add(self, item: _T) -> None: ... def clear(self) -> None: ... def discard(self, item: _T) -> None: ... def copy(self: _SelfT) -> _SelfT: ... def pop(self) -> _T: ... def remove(self, item: _T) -> None: ... def update(self, other: Iterable[_T]) -> None: ... def __contains__(self, item: object) -> bool: ... def __len__(self) -> int: ... def __iter__(self) -> Iterator[_T]: ... def __ior__(self, other: Iterable[_S]) -> WeakSet[Union[_S, _T]]: ... def difference(self: _SelfT, other: Iterable[_T]) -> _SelfT: ... def __sub__(self: _SelfT, other: Iterable[_T]) -> _SelfT: ... def difference_update(self: _SelfT, other: Iterable[_T]) -> None: ... def __isub__(self: _SelfT, other: Iterable[_T]) -> _SelfT: ... def intersection(self: _SelfT, other: Iterable[_T]) -> _SelfT: ... def __and__(self: _SelfT, other: Iterable[_T]) -> _SelfT: ... def intersection_update(self, other: Iterable[_T]) -> None: ... def __iand__(self: _SelfT, other: Iterable[_T]) -> _SelfT: ... def issubset(self, other: Iterable[_T]) -> bool: ... def __le__(self, other: Iterable[_T]) -> bool: ... def __lt__(self, other: Iterable[_T]) -> bool: ... def issuperset(self, other: Iterable[_T]) -> bool: ... def __ge__(self, other: Iterable[_T]) -> bool: ... def __gt__(self, other: Iterable[_T]) -> bool: ... def __eq__(self, other: object) -> bool: ... def symmetric_difference(self, other: Iterable[_S]) -> WeakSet[Union[_S, _T]]: ... def __xor__(self, other: Iterable[_S]) -> WeakSet[Union[_S, _T]]: ... def symmetric_difference_update(self, other: Iterable[_S]) -> None: ... def __ixor__(self, other: Iterable[_S]) -> WeakSet[Union[_S, _T]]: ... def union(self, other: Iterable[_S]) -> WeakSet[Union[_S, _T]]: ... def __or__(self, other: Iterable[_S]) -> WeakSet[Union[_S, _T]]: ... def isdisjoint(self, other: Iterable[_T]) -> bool: ... mypy-0.560/typeshed/stdlib/2and3/argparse.pyi0000644€tŠÔÚ€2›s®0000001576113215007212025243 0ustar jukkaDROPBOX\Domain Users00000000000000# Stubs for argparse (Python 3.4) from typing import ( Any, Callable, Iterable, List, IO, Optional, Sequence, Tuple, Type, Union, TypeVar, overload ) import sys _T = TypeVar('_T') if sys.version_info >= (3,): _Text = str else: _Text = Union[str, unicode] ONE_OR_MORE = ... # type: str OPTIONAL = ... # type: str PARSER = ... # type: str REMAINDER = ... # type: str SUPPRESS = ... # type: str ZERO_OR_MORE = ... # type: str class ArgumentError(Exception): ... class ArgumentParser: if sys.version_info >= (3, 5): def __init__(self, prog: Optional[str] = ..., usage: Optional[str] = ..., description: Optional[str] = ..., epilog: Optional[str] = ..., parents: Sequence[ArgumentParser] = ..., formatter_class: Type[HelpFormatter] = ..., prefix_chars: _Text = ..., fromfile_prefix_chars: Optional[str] = ..., argument_default: Optional[str] = ..., conflict_handler: _Text = ..., add_help: bool = ..., allow_abbrev: bool = ...) -> None: ... else: def __init__(self, prog: Optional[_Text] = ..., usage: Optional[_Text] = ..., description: Optional[_Text] = ..., epilog: Optional[_Text] = ..., parents: Sequence[ArgumentParser] = ..., formatter_class: Type[HelpFormatter] = ..., prefix_chars: _Text = ..., fromfile_prefix_chars: Optional[_Text] = ..., argument_default: Optional[_Text] = ..., conflict_handler: _Text = ..., add_help: bool = ...) -> None: ... def add_argument(self, *name_or_flags: Union[_Text, Sequence[_Text]], action: Union[_Text, Type[Action]] = ..., nargs: Union[int, _Text] = ..., const: Any = ..., default: Any = ..., type: Union[Callable[[str], _T], FileType] = ..., choices: Iterable[_T] = ..., required: bool = ..., help: _Text = ..., metavar: Union[_Text, Tuple[_Text, ...]] = ..., dest: _Text = ..., version: _Text = ...) -> None: ... # weirdly documented def parse_args(self, args: Optional[Sequence[_Text]] = ..., namespace: Optional[Namespace] = ...) -> Namespace: ... def add_subparsers(self, title: _Text = ..., description: Optional[_Text] = ..., prog: _Text = ..., parser_class: Type[ArgumentParser] = ..., action: Type[Action] = ..., option_string: _Text = ..., dest: Optional[_Text] = ..., help: Optional[_Text] = ..., metavar: Optional[_Text] = ...) -> _SubParsersAction: ... def add_argument_group(self, title: Optional[_Text] = ..., description: Optional[_Text] = ...) -> _ArgumentGroup: ... def add_mutually_exclusive_group(self, required: bool = ...) -> _MutuallyExclusiveGroup: ... def set_defaults(self, **kwargs: Any) -> None: ... def get_default(self, dest: _Text) -> Any: ... def print_usage(self, file: Optional[IO[str]] = ...) -> None: ... def print_help(self, file: Optional[IO[str]] = ...) -> None: ... def format_usage(self) -> str: ... def format_help(self) -> str: ... def parse_known_args(self, args: Optional[Sequence[_Text]] = ..., namespace: Optional[Namespace] = ...) -> Tuple[Namespace, List[str]]: ... def convert_arg_line_to_args(self, arg_line: _Text) -> List[str]: ... def exit(self, status: int = ..., message: Optional[_Text] = ...) -> None: ... def error(self, message: _Text) -> None: ... class HelpFormatter: # not documented def __init__(self, prog: _Text, indent_increment: int = ..., max_help_position: int = ..., width: Optional[int] = ...) -> None: ... class RawDescriptionHelpFormatter(HelpFormatter): ... class RawTextHelpFormatter(HelpFormatter): ... class ArgumentDefaultsHelpFormatter(HelpFormatter): ... if sys.version_info >= (3,): class MetavarTypeHelpFormatter(HelpFormatter): ... class Action: def __init__(self, option_strings: Sequence[_Text], dest: _Text = ..., nargs: Optional[Union[int, _Text]] = ..., const: Any = ..., default: Any = ..., type: Union[Callable[[str], _T], FileType, None] = ..., choices: Optional[Iterable[_T]] = ..., required: bool = ..., help: Optional[_Text] = ..., metavar: Union[_Text, Tuple[_Text, ...]] = ...) -> None: ... def __call__(self, parser: ArgumentParser, namespace: Namespace, values: Union[_Text, Sequence[Any], None], option_string: _Text = ...) -> None: ... class Namespace: def __init__(self, **kwargs: Any) -> None: ... def __getattr__(self, name: _Text) -> Any: ... def __setattr__(self, name: _Text, value: Any) -> None: ... def __contains__(self, key: str) -> bool: ... class FileType: if sys.version_info >= (3, 4): def __init__(self, mode: _Text = ..., bufsize: int = ..., encoding: Optional[_Text] = ..., errors: Optional[_Text] = ...) -> None: ... elif sys.version_info >= (3,): def __init__(self, mode: _Text = ..., bufsize: int = ...) -> None: ... else: def __init__(self, mode: _Text = ..., bufsize: Optional[int] = ...) -> None: ... def __call__(self, string: _Text) -> IO[Any]: ... class _ArgumentGroup: def add_argument(self, *name_or_flags: Union[_Text, Sequence[_Text]], action: Union[_Text, Type[Action]] = ..., nargs: Union[int, _Text] = ..., const: Any = ..., default: Any = ..., type: Union[Callable[[str], _T], FileType] = ..., choices: Iterable[_T] = ..., required: bool = ..., help: _Text = ..., metavar: Union[_Text, Tuple[_Text, ...]] = ..., dest: _Text = ..., version: _Text = ...) -> None: ... def add_mutually_exclusive_group(self, required: bool = ...) -> _MutuallyExclusiveGroup: ... class _MutuallyExclusiveGroup(_ArgumentGroup): ... class _SubParsersAction: # TODO: Type keyword args properly. def add_parser(self, name: _Text, **kwargs: Any) -> ArgumentParser: ... # not documented class ArgumentTypeError(Exception): ... mypy-0.560/typeshed/stdlib/2and3/array.pyi0000644€tŠÔÚ€2›s®0000000552713215007212024554 0ustar jukkaDROPBOX\Domain Users00000000000000# Stubs for array # Based on http://docs.python.org/3.6/library/array.html import sys from typing import (Any, BinaryIO, Generic, Iterable, Iterator, List, MutableSequence, overload, Text, Tuple, TypeVar, Union) _T = TypeVar('_T', int, float, Text) if sys.version_info >= (3,): typecodes = ... # type: str class array(MutableSequence[_T], Generic[_T]): typecode = ... # type: str itemsize = ... # type: int def __init__(self, typecode: str, __initializer: Union[bytes, Iterable[_T]] = ...) -> None: ... def append(self, x: _T) -> None: ... def buffer_info(self) -> Tuple[int, int]: ... def byteswap(self) -> None: ... def count(self, x: Any) -> int: ... def extend(self, iterable: Iterable[_T]) -> None: ... if sys.version_info >= (3, 2): def frombytes(self, s: bytes) -> None: ... def fromfile(self, f: BinaryIO, n: int) -> None: ... def fromlist(self, list: List[_T]) -> None: ... def fromstring(self, s: bytes) -> None: ... def fromunicode(self, s: str) -> None: ... def index(self, x: _T) -> int: ... # type: ignore # Overrides Sequence def insert(self, i: int, x: _T) -> None: ... def pop(self, i: int = ...) -> _T: ... if sys.version_info < (3,): def read(self, f: BinaryIO, n: int) -> None: ... def remove(self, x: Any) -> None: ... def reverse(self) -> None: ... if sys.version_info >= (3, 2): def tobytes(self) -> bytes: ... def tofile(self, f: BinaryIO) -> None: ... def tolist(self) -> List[_T]: ... def tostring(self) -> bytes: ... def tounicode(self) -> str: ... if sys.version_info < (3,): def write(self, f: BinaryIO) -> None: ... def __len__(self) -> int: ... @overload def __getitem__(self, i: int) -> _T: ... @overload def __getitem__(self, s: slice) -> array[_T]: ... @overload # type: ignore # Overrides MutableSequence def __setitem__(self, i: int, o: _T) -> None: ... @overload def __setitem__(self, s: slice, o: array[_T]) -> None: ... def __delitem__(self, i: Union[int, slice]) -> None: ... def __add__(self, x: array[_T]) -> array[_T]: ... def __ge__(self, other: array[_T]) -> bool: ... def __gt__(self, other: array[_T]) -> bool: ... def __iadd__(self, x: array[_T]) -> array[_T]: ... # type: ignore # Overrides MutableSequence def __imul__(self, n: int) -> array[_T]: ... def __le__(self, other: array[_T]) -> bool: ... def __lt__(self, other: array[_T]) -> bool: ... def __mul__(self, n: int) -> array[_T]: ... def __rmul__(self, n: int) -> array[_T]: ... if sys.version_info < (3,): def __delslice__(self, i: int, j: int) -> None: ... def __getslice__(self, i: int, j: int) -> array[_T]: ... def __setslice__(self, i: int, j: int, y: array[_T]) -> None: ... ArrayType = array mypy-0.560/typeshed/stdlib/2and3/asynchat.pyi0000644€tŠÔÚ€2›s®0000000306113215007212025237 0ustar jukkaDROPBOX\Domain Users00000000000000from abc import abstractmethod import asyncore import socket import sys from typing import Optional, Sequence, Tuple, Union class simple_producer: def __init__(self, data: bytes, buffer_size: int = ...) -> None: ... def more(self) -> bytes: ... class async_chat(asyncore.dispatcher): ac_in_buffer_size = ... # type: int ac_out_buffer_size = ... # type: int def __init__(self, sock: Optional[socket.socket] = ..., map: Optional[asyncore._maptype] = ...) -> None: ... @abstractmethod def collect_incoming_data(self, data: bytes) -> None: ... @abstractmethod def found_terminator(self) -> None: ... def set_terminator(self, term: Union[bytes, int, None]) -> None: ... def get_terminator(self) -> Union[bytes, int, None]: ... def handle_read(self) -> None: ... def handle_write(self) -> None: ... def handle_close(self) -> None: ... def push(self, data: bytes) -> None: ... def push_with_producer(self, producer: simple_producer) -> None: ... def readable(self) -> bool: ... def writable(self) -> bool: ... def close_when_done(self) -> None: ... def initiate_send(self) -> None: ... def discard_buffers(self) -> None: ... if sys.version_info < (3, 0): class fifo: def __init__(self, list: Sequence[Union[bytes, simple_producer]] = ...) -> None: ... def __len__(self) -> int: ... def is_empty(self) -> bool: ... def first(self) -> bytes: ... def push(self, data: Union[bytes, simple_producer]) -> None: ... def pop(self) -> Tuple[int, bytes]: ... mypy-0.560/typeshed/stdlib/2and3/asyncore.pyi0000644€tŠÔÚ€2›s®0000001272313215007212025255 0ustar jukkaDROPBOX\Domain Users00000000000000from typing import Tuple, Union, Optional, Any, Dict, overload import os import select import socket import sys import time import warnings from errno import (EALREADY, EINPROGRESS, EWOULDBLOCK, ECONNRESET, EINVAL, ENOTCONN, ESHUTDOWN, EINTR, EISCONN, EBADF, ECONNABORTED, EPIPE, EAGAIN, errorcode) # cyclic dependence with asynchat _maptype = Dict[str, Any] class ExitNow(Exception): ... def read(obj: Any) -> None: ... def write(obj: Any) -> None: ... def readwrite(obj: Any, flags: int) -> None: ... def poll(timeout: float = ..., map: _maptype = ...) -> None: ... def poll2(timeout: float = ..., map: _maptype = ...) -> None: ... poll3 = poll2 def loop(timeout: float = ..., use_poll: bool = ..., map: _maptype = ..., count: Optional[int] = ...) -> None: ... # Not really subclass of socket.socket; it's only delegation. # It is not covariant to it. class dispatcher: debug = ... # type: bool connected = ... # type: bool accepting = ... # type: bool connecting = ... # type: bool closing = ... # type: bool ignore_log_types = ... # type: frozenset[str] def __init__(self, sock: Optional[socket.socket] = ..., map: _maptype = ...) -> None: ... def add_channel(self, map: _maptype = ...) -> None: ... def del_channel(self, map: _maptype = ...) -> None: ... def create_socket(self, family: int, type: int) -> None: ... def set_socket(self, sock: socket.socket, map: _maptype = ...) -> None: ... def set_reuse_addr(self) -> None: ... def readable(self) -> bool: ... def writable(self) -> bool: ... def listen(self, backlog: int) -> None: ... def bind(self, address: Union[tuple, str]) -> None: ... def connect(self, address: Union[tuple, str]) -> None: ... def accept(self) -> Optional[Tuple[socket.socket, Any]]: ... def send(self, data: bytes) -> int: ... def recv(self, buffer_size: int) -> bytes: ... def close(self) -> None: ... def log(self, message: Any) -> None: ... def log_info(self, message: Any, type: str = ...) -> None: ... def handle_read_event(self) -> None: ... def handle_connect_event(self) -> None: ... def handle_write_event(self) -> None: ... def handle_expt_event(self) -> None: ... def handle_error(self) -> None: ... def handle_expt(self) -> None: ... def handle_read(self) -> None: ... def handle_write(self) -> None: ... def handle_connect(self) -> None: ... def handle_accept(self) -> None: ... def handle_close(self) -> None: ... if sys.version_info < (3, 5): # Historically, some methods were "imported" from `self.socket` by # means of `__getattr__`. This was long deprecated, and as of Python # 3.5 has been removed; simply call the relevant methods directly on # self.socket if necessary. def detach(self) -> int: ... def fileno(self) -> int: ... # return value is an address def getpeername(self) -> Any: ... def getsockname(self) -> Any: ... @overload def getsockopt(self, level: int, optname: int) -> int: ... @overload def getsockopt(self, level: int, optname: int, buflen: int) -> bytes: ... def gettimeout(self) -> float: ... def ioctl(self, control: object, option: Tuple[int, int, int]) -> None: ... # TODO the return value may be BinaryIO or TextIO, depending on mode def makefile(self, mode: str = ..., buffering: int = ..., encoding: str = ..., errors: str = ..., newline: str = ...) -> Any: ... # return type is an address def recvfrom(self, bufsize: int, flags: int = ...) -> Any: ... def recvfrom_into(self, buffer: bytes, nbytes: int, flags: int = ...) -> Any: ... def recv_into(self, buffer: bytes, nbytes: int, flags: int = ...) -> Any: ... def sendall(self, data: bytes, flags: int = ...) -> None: ... def sendto(self, data: bytes, address: Union[tuple, str], flags: int = ...) -> int: ... def setblocking(self, flag: bool) -> None: ... def settimeout(self, value: Union[float, None]) -> None: ... def setsockopt(self, level: int, optname: int, value: Union[int, bytes]) -> None: ... def shutdown(self, how: int) -> None: ... class dispatcher_with_send(dispatcher): def __init__(self, sock: socket.socket = ..., map: _maptype = ...) -> None: ... def initiate_send(self) -> None: ... def handle_write(self) -> None: ... # incompatible signature: # def send(self, data: bytes) -> Optional[int]: ... def compact_traceback() -> Tuple[Tuple[str, str, str], type, type, str]: ... def close_all(map: _maptype = ..., ignore_all: bool = ...) -> None: ... # if os.name == 'posix': # import fcntl class file_wrapper: fd = ... # type: int def __init__(self, fd: int) -> None: ... def recv(self, bufsize: int, flags: int = ...) -> bytes: ... def send(self, data: bytes, flags: int = ...) -> int: ... @overload def getsockopt(self, level: int, optname: int) -> int: ... @overload def getsockopt(self, level: int, optname: int, buflen: int) -> bytes: ... def read(self, bufsize: int, flags: int = ...) -> bytes: ... def write(self, data: bytes, flags: int = ...) -> int: ... def close(self) -> None: ... def fileno(self) -> int: ... class file_dispatcher(dispatcher): def __init__(self, fd: int, map: _maptype = ...) -> None: ... def set_file(self, fd: int) -> None: ... mypy-0.560/typeshed/stdlib/2and3/base64.pyi0000644€tŠÔÚ€2›s®0000000345113215007212024514 0ustar jukkaDROPBOX\Domain Users00000000000000# Stubs for base64 from typing import IO, Union, Text import sys if sys.version_info < (3,): _encodable = Union[bytes, Text] _decodable = Union[bytes, Text] elif sys.version_info < (3, 3): _encodable = bytes _decodable = bytes elif sys.version_info[:2] == (3, 3): _encodable = bytes _decodable = Union[bytes, str] elif sys.version_info >= (3, 4): _encodable = Union[bytes, bytearray, memoryview] _decodable = Union[bytes, bytearray, memoryview, str] def b64encode(s: _encodable, altchars: bytes = ...) -> bytes: ... def b64decode(s: _decodable, altchars: bytes = ..., validate: bool = ...) -> bytes: ... def standard_b64encode(s: _encodable) -> bytes: ... def standard_b64decode(s: _decodable) -> bytes: ... def urlsafe_b64encode(s: _encodable) -> bytes: ... def urlsafe_b64decode(s: _decodable) -> bytes: ... def b32encode(s: _encodable) -> bytes: ... def b32decode(s: _decodable, casefold: bool = ..., map01: bytes = ...) -> bytes: ... def b16encode(s: _encodable) -> bytes: ... def b16decode(s: _decodable, casefold: bool = ...) -> bytes: ... if sys.version_info >= (3, 4): def a85encode(b: _encodable, *, foldspaces: bool = ..., wrapcol: int = ..., pad: bool = ..., adobe: bool = ...) -> bytes: ... def a85decode(b: _decodable, *, foldspaces: bool = ..., adobe: bool = ..., ignorechars: Union[str, bytes] = ...) -> bytes: ... def b85encode(b: _encodable, pad: bool = ...) -> bytes: ... def b85decode(b: _decodable) -> bytes: ... def decode(input: IO[bytes], output: IO[bytes]) -> None: ... def decodebytes(s: bytes) -> bytes: ... def decodestring(s: bytes) -> bytes: ... def encode(input: IO[bytes], output: IO[bytes]) -> None: ... def encodebytes(s: bytes) -> bytes: ... def encodestring(s: bytes) -> bytes: ... mypy-0.560/typeshed/stdlib/2and3/binascii.pyi0000644€tŠÔÚ€2›s®0000000274513215007212025216 0ustar jukkaDROPBOX\Domain Users00000000000000# Stubs for binascii # Based on http://docs.python.org/3.2/library/binascii.html import sys from typing import Union, Text if sys.version_info < (3,): # Python 2 accepts unicode ascii pretty much everywhere. _Bytes = Union[bytes, Text] _Ascii = Union[bytes, Text] elif sys.version_info < (3, 3): # Python 3.2 and below only accepts bytes. _Bytes = bytes _Ascii = bytes else: # But since Python 3.3 ASCII-only unicode strings are accepted by the # a2b_* functions. _Bytes = bytes _Ascii = Union[bytes, Text] def a2b_uu(string: _Ascii) -> bytes: ... def b2a_uu(data: _Bytes) -> bytes: ... def a2b_base64(string: _Ascii) -> bytes: ... if sys.version_info >= (3, 6): def b2a_base64(data: _Bytes, *, newline: bool = ...) -> bytes: ... else: def b2a_base64(data: _Bytes) -> bytes: ... def a2b_qp(string: _Ascii, header: bool = ...) -> bytes: ... def b2a_qp(data: _Bytes, quotetabs: bool = ..., istext: bool = ..., header: bool = ...) -> bytes: ... def a2b_hqx(string: _Ascii) -> bytes: ... def rledecode_hqx(data: _Bytes) -> bytes: ... def rlecode_hqx(data: _Bytes) -> bytes: ... def b2a_hqx(data: _Bytes) -> bytes: ... def crc_hqx(data: _Bytes, crc: int) -> int: ... def crc32(data: _Bytes, crc: int = ...) -> int: ... def b2a_hex(data: _Bytes) -> bytes: ... def hexlify(data: _Bytes) -> bytes: ... def a2b_hex(hexstr: _Ascii) -> bytes: ... def unhexlify(hexlify: _Ascii) -> bytes: ... class Error(Exception): ... class Incomplete(Exception): ... mypy-0.560/typeshed/stdlib/2and3/binhex.pyi0000644€tŠÔÚ€2›s®0000000234513215007212024706 0ustar jukkaDROPBOX\Domain Users00000000000000from typing import ( Any, IO, Tuple, Union, ) class Error(Exception): ... REASONABLY_LARGE = ... # type: int LINELEN = ... # type: int RUNCHAR = ... # type: bytes class FInfo: def __init__(self) -> None: ... Type = ... # type: str Creator = ... # type: str Flags = ... # type: int _FileInfoTuple = Tuple[str, FInfo, int, int] _FileHandleUnion = Union[str, IO[bytes]] def getfileinfo(name: str) -> _FileInfoTuple: ... class openrsrc: def __init__(self, *args: Any) -> None: ... def read(self, *args: Any) -> bytes: ... def write(self, *args: Any) -> None: ... def close(self) -> None: ... class BinHex: def __init__(self, name_finfo_dlen_rlen: _FileInfoTuple, ofp: _FileHandleUnion) -> None: ... def write(self, data: bytes) -> None: ... def close_data(self) -> None: ... def write_rsrc(self, data: bytes) -> None: ... def close(self) -> None: ... def binhex(inp: str, out: str) -> None: ... class HexBin: def __init__(self, ifp: _FileHandleUnion) -> None: ... def read(self, *n: int) -> bytes: ... def close_data(self) -> None: ... def read_rsrc(self, *n: int) -> bytes: ... def close(self) -> None: ... def hexbin(inp: str, out: str) -> None: ... mypy-0.560/typeshed/stdlib/2and3/bisect.pyi0000644€tŠÔÚ€2›s®0000000210313215007212024672 0ustar jukkaDROPBOX\Domain Users00000000000000# Stubs for bisect from typing import Any, Sequence, TypeVar _T = TypeVar('_T') # TODO uncomment when mypy# 2035 is fixed # def bisect_left(a: Sequence[_T], x: _T, lo: int = ..., hi: int = ...) -> int: ... # def bisect_right(a: Sequence[_T], x: _T, lo: int = ..., hi: int = ...) -> int: ... # def bisect(a: Sequence[_T], x: _T, lo: int = ..., hi: int = ...) -> int: ... # # def insort_left(a: Sequence[_T], x: _T, lo: int = ..., hi: int = ...) -> int: ... # def insort_right(a: Sequence[_T], x: _T, lo: int = ..., hi: int = ...) -> int: ... # def insort(a: Sequence[_T], x: _T, lo: int = ..., hi: int = ...) -> int: ... def bisect_left(a: Sequence, x: Any, lo: int = ..., hi: int = ...) -> int: ... def bisect_right(a: Sequence, x: Any, lo: int = ..., hi: int = ...) -> int: ... def bisect(a: Sequence, x: Any, lo: int = ..., hi: int = ...) -> int: ... def insort_left(a: Sequence, x: Any, lo: int = ..., hi: int = ...) -> int: ... def insort_right(a: Sequence, x: Any, lo: int = ..., hi: int = ...) -> int: ... def insort(a: Sequence, x: Any, lo: int = ..., hi: int = ...) -> int: ... mypy-0.560/typeshed/stdlib/2and3/bz2.pyi0000644€tŠÔÚ€2›s®0000000300013215007212024113 0ustar jukkaDROPBOX\Domain Users00000000000000import sys from typing import Any, BinaryIO, IO, Optional, Union if sys.version_info >= (3, 6): from os import PathLike _PathOrFile = Union[str, bytes, IO[Any], PathLike[Any]] elif sys.version_info >= (3, 3): _PathOrFile = Union[str, bytes, IO[Any]] else: _PathOrFile = str def compress(data: bytes, compresslevel: int = ...) -> bytes: ... def decompress(data: bytes) -> bytes: ... if sys.version_info >= (3, 3): def open(filename: _PathOrFile, mode: str = ..., compresslevel: int = ..., encoding: Optional[str] = ..., errors: Optional[str] = ..., newline: Optional[str] = ...) -> IO[Any]: ... class BZ2File(BinaryIO): def __init__(self, filename: _PathOrFile, mode: str = ..., buffering: Optional[Any] = ..., compresslevel: int = ...) -> None: ... class BZ2Compressor(object): def __init__(self, compresslevel: int = ...) -> None: ... def compress(self, data: bytes) -> bytes: ... def flush(self) -> bytes: ... class BZ2Decompressor(object): if sys.version_info >= (3, 5): def decompress(self, data: bytes, max_length: int = ...) -> bytes: ... else: def decompress(self, data: bytes) -> bytes: ... if sys.version_info >= (3, 3): @property def eof(self) -> bool: ... if sys.version_info >= (3, 5): @property def needs_input(self) -> bool: ... @property def unused_data(self) -> bytes: ... mypy-0.560/typeshed/stdlib/2and3/calendar.pyi0000644€tŠÔÚ€2›s®0000001251613215007212025203 0ustar jukkaDROPBOX\Domain Users00000000000000import datetime import sys from time import struct_time from typing import Any, Iterable, List, Optional, Sequence, Tuple, Union _LocaleType = Tuple[Optional[str], Optional[str]] class IllegalMonthError(ValueError): def __init__(self, month: int) -> None: ... def __str__(self) -> str: ... class IllegalWeekdayError(ValueError): def __init__(self, weekday: int) -> None: ... def __str__(self) -> str: ... def isleap(year: int) -> bool: ... def leapdays(y1: int, y2: int) -> int: ... def weekday(year: int, month: int, day: int) -> int: ... def monthrange(year: int, month: int) -> Tuple[int, int]: ... class Calendar: def __init__(self, firstweekday: int = ...) -> None: ... def getfirstweekday(self) -> int: ... def setfirstweekday(self, firstweekday: int) -> None: ... def iterweekdays(self) -> Iterable[int]: ... def itermonthdates(self, year: int, month: int) -> Iterable[datetime.date]: ... def itermonthdays2(self, year: int, month: int) -> Iterable[Tuple[int, int]]: ... def itermonthdays(self, year: int, month: int) -> Iterable[int]: ... def monthdatescalendar(self, year: int, month: int) -> List[List[datetime.date]]: ... def monthdays2calendar(self, year: int, month: int) -> List[List[Tuple[int, int]]]: ... def monthdayscalendar(self, year: int, month: int) -> List[List[int]]: ... def yeardatescalendar(self, year: int, width: int = ...) -> List[List[int]]: ... def yeardays2calendar(self, year: int, width: int = ...) -> List[List[Tuple[int, int]]]: ... def yeardayscalendar(self, year: int, width: int = ...) -> List[List[int]]: ... class TextCalendar(Calendar): def prweek(self, theweek: int, width: int) -> None: ... def formatday(self, day: int, weekday: int, width: int) -> str: ... def formatweek(self, theweek: int, width: int) -> str: ... def formatweekday(self, day: int, width: int) -> str: ... def formatweekheader(self, width: int) -> str: ... def formatmonthname(self, theyear: int, themonth: int, width: int, withyear: bool = ...) -> str: ... def prmonth(self, theyear: int, themonth: int, w: int = ..., l: int = ...) -> None: ... def formatmonth(self, theyear: int, themonth: int, w: int = ..., l: int = ...) -> str: ... def formatyear(self, theyear: int, w: int = ..., l: int = ..., c: int = ..., m: int = ...) -> str: ... def pryear(self, theyear: int, w: int = ..., l: int = ..., c: int = ..., m: int = ...) -> None: ... def firstweekday() -> int: ... def monthcalendar(year: int, month: int) -> List[List[int]]: ... def prweek(theweek: int, width: int) -> None: ... def week(theweek: int, width: int) -> str: ... def weekheader(width: int) -> str: ... def prmonth(theyear: int, themonth: int, w: int = ..., l: int = ...) -> None: ... def month(theyear: int, themonth: int, w: int = ..., l: int = ...) -> str: ... def calendar(theyear: int, w: int = ..., l: int = ..., c: int = ..., m: int = ...) -> str: ... def prcal(theyear: int, w: int = ..., l: int = ..., c: int = ..., m: int = ...) -> None: ... class HTMLCalendar(Calendar): def formatday(self, day: int, weekday: int) -> str: ... def formatweek(self, theweek: int) -> str: ... def formatweekday(self, day: int) -> str: ... def formatweekheader(self) -> str: ... def formatmonthname(self, theyear: int, themonth: int, withyear: bool = ...) -> str: ... def formatmonth(self, theyear: int, themonth: int, withyear: bool = ...) -> str: ... def formatyear(self, theyear: int, width: int = ...) -> str: ... def formatyearpage(self, theyear: int, width: int = ..., css: Optional[str] = ..., encoding: Optional[str] = ...) -> str: ... if sys.version_info < (3, 0): class TimeEncoding: def __init__(self, locale: _LocaleType) -> None: ... def __enter__(self) -> _LocaleType: ... def __exit__(self, *args: Any) -> None: ... else: class different_locale: def __init__(self, locale: _LocaleType) -> None: ... def __enter__(self) -> _LocaleType: ... def __exit__(self, *args: Any) -> None: ... class LocaleTextCalendar(TextCalendar): def __init__(self, firstweekday: int = ..., locale: Optional[_LocaleType] = ...) -> None: ... def formatweekday(self, day: int, width: int) -> str: ... def formatmonthname(self, theyear: int, themonth: int, width: int, withyear: bool = ...) -> str: ... class LocaleHTMLCalendar(HTMLCalendar): def __init__(self, firstweekday: int = ..., locale: Optional[_LocaleType] = ...) -> None: ... def formatweekday(self, day: int) -> str: ... def formatmonthname(self, theyear: int, themonth: int, withyear: bool = ...) -> str: ... c = ... # type: TextCalendar def setfirstweekday(firstweekday: int) -> None: ... def format(cols: int, colwidth: int = ..., spacing: int = ...) -> str: ... def formatstring(cols: int, colwidth: int = ..., spacing: int = ...) -> str: ... def timegm(tuple: Union[Tuple[int, ...], struct_time]) -> int: ... # Data attributes day_name = ... # type: Sequence[str] day_abbr = ... # type: Sequence[str] month_name = ... # type: Sequence[str] month_abbr = ... # type: Sequence[str] # Below constants are not in docs or __all__, but enough people have used them # they are now effectively public. MONDAY = ... # type: int TUESDAY = ... # type: int WEDNESDAY = ... # type: int THURSDAY = ... # type: int FRIDAY = ... # type: int SATURDAY = ... # type: int SUNDAY = ... # type: int mypy-0.560/typeshed/stdlib/2and3/cgi.pyi0000644€tŠÔÚ€2›s®0000001215013215007212024166 0ustar jukkaDROPBOX\Domain Users00000000000000import sys from typing import Any, AnyStr, Dict, IO, Iterable, List, Mapping, Optional, Tuple, TypeVar, Union _T = TypeVar('_T', bound=FieldStorage) def parse(fp: IO[Any] = ..., environ: Mapping[str, str] = ..., keep_blank_values: bool = ..., strict_parsing: bool = ...) -> Dict[str, List[str]]: ... def parse_qs(qs: str, keep_blank_values: bool = ..., strict_parsing: bool = ...) -> Dict[str, List[str]]: ... def parse_qsl(qs: str, keep_blank_values: bool = ..., strict_parsing: bool = ...) -> Dict[str, List[str]]: ... def parse_multipart(fp: IO[Any], pdict: Mapping[str, bytes]) -> Dict[str, List[bytes]]: ... def parse_header(s: str) -> Tuple[str, Dict[str, str]]: ... def test(environ: Mapping[str, str] = ...) -> None: ... def print_environ(environ: Mapping[str, str] = ...) -> None: ... def print_form(form: Dict[str, Any]) -> None: ... def print_directory() -> None: ... def print_environ_usage() -> None: ... if sys.version_info >= (3, 0): def escape(s: str, quote: bool = ...) -> str: ... else: def escape(s: AnyStr, quote: bool = ...) -> AnyStr: ... class MiniFieldStorage: # The first five "Any" attributes here are always None, but mypy doesn't support that filename = ... # type: Any list = ... # type: Any type = ... # type: Any file = ... # type: Optional[IO[bytes]] # Always None type_options = ... # type: Dict[Any, Any] disposition = ... # type: Any disposition_options = ... # type: Dict[Any, Any] headers = ... # type: Dict[Any, Any] name = ... # type: Any value = ... # type: Any def __init__(self, name: Any, value: Any) -> None: ... def __repr__(self) -> str: ... class FieldStorage(object): FieldStorageClass = ... # type: Optional[type] keep_blank_values = ... # type: int strict_parsing = ... # type: int qs_on_post = ... # type: Optional[str] headers = ... # type: Mapping[str, str] fp = ... # type: IO[bytes] encoding = ... # type: str errors = ... # type: str outerboundary = ... # type: bytes bytes_read = ... # type: int limit = ... # type: Optional[int] disposition = ... # type: str disposition_options = ... # type: Dict[str, str] filename = ... # type: Optional[str] file = ... # type: Optional[IO[bytes]] type = ... # type: str type_options = ... # type: Dict[str, str] innerboundary = ... # type: bytes length = ... # type: int done = ... # type: int list = ... # type: Optional[List[Any]] value = ... # type: Union[None, bytes, List[Any]] if sys.version_info >= (3, 0): def __init__(self, fp: IO[Any] = ..., headers: Mapping[str, str] = ..., outerboundary: bytes = ..., environ: Mapping[str, str] = ..., keep_blank_values: int = ..., strict_parsing: int = ..., limit: int = ..., encoding: str = ..., errors: str = ...) -> None: ... else: def __init__(self, fp: IO[Any] = ..., headers: Mapping[str, str] = ..., outerboundary: bytes = ..., environ: Mapping[str, str] = ..., keep_blank_values: int = ..., strict_parsing: int = ...) -> None: ... if sys.version_info >= (3, 0): def __enter__(self: _T) -> _T: ... def __exit__(self, *args: Any) -> None: ... def __repr__(self) -> str: ... def __iter__(self) -> Iterable[str]: ... def __getitem__(self, key: str) -> Any: ... def getvalue(self, key: str, default: Any = ...) -> Any: ... def getfirst(self, key: str, default: Any = ...) -> Any: ... def getlist(self, key: str) -> List[Any]: ... def keys(self) -> List[str]: ... if sys.version_info < (3, 0): def has_key(self, key: str) -> bool: ... def __contains__(self, key: str) -> bool: ... def __len__(self) -> int: ... if sys.version_info >= (3, 0): def __bool__(self) -> bool: ... else: def __nonzero__(self) -> bool: ... if sys.version_info >= (3, 0): # In Python 3 it returns bytes or str IO depending on an internal flag def make_file(self) -> IO[Any]: ... else: # In Python 2 it always returns bytes and ignores the "binary" flag def make_file(self, binary: Any = ...) -> IO[bytes]: ... if sys.version_info < (3, 0): from UserDict import UserDict class FormContentDict(UserDict): query_string = ... # type: str def __init__(self, environ: Mapping[str, str] = ..., keep_blank_values: int = ..., strict_parsing: int = ...) -> None: ... class SvFormContentDict(FormContentDict): def getlist(self, key: Any) -> Any: ... class InterpFormContentDict(SvFormContentDict): ... class FormContent(FormContentDict): # TODO this should have # def values(self, key: Any) -> Any: ... # but this is incompatible with the supertype, and adding '# type: ignore' triggers # a parse error in pytype (https://github.com/google/pytype/issues/53) def indexed_value(self, key: Any, location: int) -> Any: ... def value(self, key: Any) -> Any: ... def length(self, key: Any) -> int: ... def stripped(self, key: Any) -> Any: ... def pars(self) -> Dict[Any, Any]: ... mypy-0.560/typeshed/stdlib/2and3/chunk.pyi0000644€tŠÔÚ€2›s®0000000154313215007212024540 0ustar jukkaDROPBOX\Domain Users00000000000000# Source(py2): https://hg.python.org/cpython/file/2.7/Lib/chunk.py # Source(py3): https://github.com/python/cpython/blob/master/Lib/chunk.py from typing import IO class Chunk: closed = ... # type: bool align = ... # type: bool file = ... # type: IO[bytes] chunkname = ... # type: bytes chunksize = ... # type: int size_read = ... # type: int offset = ... # type: int seekable = ... # type: bool def __init__(self, file: IO[bytes], align: bool = ..., bigendian: bool = ..., inclheader: bool = ...) -> None: ... def getname(self) -> bytes: ... def getsize(self) -> int: ... def close(self) -> None: ... def isatty(self) -> bool: ... def seek(self, pos: int, whence: int = ...) -> None: ... def tell(self) -> int: ... def read(self, size: int = ...) -> bytes: ... def skip(self) -> None: ... mypy-0.560/typeshed/stdlib/2and3/cmath.pyi0000644€tŠÔÚ€2›s®0000000165213215007212024525 0ustar jukkaDROPBOX\Domain Users00000000000000"""Stub file for the 'cmath' module.""" import sys from typing import Union, Tuple e = ... # type: float pi = ... # type: float _C = Union[float, complex] def acos(x: _C) -> complex: ... def acosh(x: _C) -> complex: ... def asin(x: _C) -> complex: ... def asinh(x: _C) -> complex: ... def atan(x: _C) -> complex: ... def atanh(x: _C) -> complex: ... def cos(x: _C) -> complex: ... def cosh(x: _C) -> complex: ... def exp(x: _C) -> complex: ... def isinf(z: _C) -> bool: ... def isnan(z: _C) -> bool: ... def log(x: _C, base: _C = ...) -> complex: ... def log10(x: _C) -> complex: ... def phase(z: _C) -> float: ... def polar(z: _C) -> Tuple[float, float]: ... def rect(r: float, phi: float) -> complex: ... def sin(x: _C) -> complex: ... def sinh(x: _C) -> complex: ... def sqrt(x: _C) -> complex: ... def tan(x: _C) -> complex: ... def tanh(x: _C) -> complex: ... if sys.version_info >= (3,): def isfinite(z: _C) -> bool: ... mypy-0.560/typeshed/stdlib/2and3/cmd.pyi0000644€tŠÔÚ€2›s®0000000361413215007212024174 0ustar jukkaDROPBOX\Domain Users00000000000000# Stubs for cmd (Python 2/3) from typing import Any, Optional, Text, IO, List, Callable, Tuple class Cmd: prompt = ... # type: str identchars = ... # type: str ruler = ... # type: str lastcmd = ... # type: str intro = ... # type: Optional[Any] doc_leader = ... # type: str doc_header = ... # type: str misc_header = ... # type: str undoc_header = ... # type: str nohelp = ... # type: str use_rawinput = ... # type: bool stdin = ... # type: IO[str] stdout = ... # type: IO[str] cmdqueue = ... # type: List[str] completekey = ... # type: str def __init__(self, completekey: str = ..., stdin: Optional[IO[str]] = ..., stdout: Optional[IO[str]] = ...) -> None: ... old_completer = ... # type: Optional[Callable[[str, int], Optional[str]]] def cmdloop(self, intro: Optional[Any] = ...) -> None: ... def precmd(self, line: str) -> str: ... def postcmd(self, stop: bool, line: str) -> bool: ... def preloop(self) -> None: ... def postloop(self) -> None: ... def parseline(self, line: str) -> Tuple[Optional[str], Optional[str], str]: ... def onecmd(self, line: str) -> bool: ... def emptyline(self) -> bool: ... def default(self, line: str) -> bool: ... def completedefault(self, *ignored: Any) -> List[str]: ... def completenames(self, text: str, *ignored: Any) -> List[str]: ... completion_matches = ... # type: Optional[List[str]] def complete(self, text: str, state: int) -> Optional[List[str]]: ... def get_names(self) -> List[str]: ... # Only the first element of args matters. def complete_help(self, *args: Any) -> List[str]: ... def do_help(self, arg: Optional[str]) -> None: ... def print_topics(self, header: str, cmds: Optional[List[str]], cmdlen: Any, maxcol: int) -> None: ... def columnize(self, list: Optional[List[str]], displaywidth: int = ...) -> None: ... mypy-0.560/typeshed/stdlib/2and3/code.pyi0000644€tŠÔÚ€2›s®0000000224313215007212024340 0ustar jukkaDROPBOX\Domain Users00000000000000# Stubs for code from typing import Any, Callable, Mapping, Optional from types import CodeType class InteractiveInterpreter: def __init__(self, locals: Optional[Mapping[str, Any]] = ...) -> None: ... def runsource(self, source: str, filename: str = ..., symbol: str = ...) -> bool: ... def runcode(self, code: CodeType) -> None: ... def showsyntaxerror(self, filename: Optional[str] = ...) -> None: ... def showtraceback(self) -> None: ... def write(self, data: str) -> None: ... class InteractiveConsole(InteractiveInterpreter): def __init__(self, locals: Optional[Mapping[str, Any]] = ..., filename: str = ...) -> None: ... def interact(self, banner: Optional[str] = ...) -> None: ... def push(self, line: str) -> bool: ... def resetbuffer(self) -> None: ... def raw_input(self, prompt: str = ...) -> str: ... def interact(banner: Optional[str] = ..., readfunc: Optional[Callable[[str], str]] = ..., local: Optional[Mapping[str, Any]] = ...) -> None: ... def compile_command(source: str, filename: str = ..., symbol: str = ...) -> Optional[CodeType]: ... mypy-0.560/typeshed/stdlib/2and3/codecs.pyi0000644€tŠÔÚ€2›s®0000002075413215007212024675 0ustar jukkaDROPBOX\Domain Users00000000000000# Better codecs stubs hand-written by o11c. # https://docs.python.org/2/library/codecs.html and https://docs.python.org/3/library/codecs.html import sys from typing import ( BinaryIO, Callable, IO, Iterable, Iterator, List, Optional, Text, TextIO, Tuple, Type, TypeVar, Union, ) from abc import abstractmethod import types # TODO: this only satisfies the most common interface, where # bytes (py2 str) is the raw form and str (py2 unicode) is the cooked form. # In the long run, both should become template parameters maybe? # There *are* bytes->bytes and str->str encodings in the standard library. # They are much more common in Python 2 than in Python 3. # Python 3.5 supposedly might change something there. _decoded = Text _encoded = bytes # TODO: It is not possible to specify these signatures correctly, because # they have an optional positional or keyword argument for errors=. _encode_type = Callable[[_decoded], _encoded] # signature of Codec().encode _decode_type = Callable[[_encoded], _decoded] # signature of Codec().decode _stream_reader_type = Callable[[IO[_encoded]], 'StreamReader'] # signature of StreamReader __init__ _stream_writer_type = Callable[[IO[_encoded]], 'StreamWriter'] # signature of StreamWriter __init__ _incremental_encoder_type = Callable[[], 'IncrementalEncoder'] # signature of IncrementalEncoder __init__ _incremental_decoder_type = Callable[[], 'IncrementalDecoder'] # signature of IncrementalDecoder __init__ def encode(obj: _decoded, encoding: str = ..., errors: str = ...) -> _encoded: ... def decode(obj: _encoded, encoding: str = ..., errors: str = ...) -> _decoded: ... def lookup(encoding: str) -> 'CodecInfo': ... class CodecInfo(Tuple[_encode_type, _decode_type, _stream_reader_type, _stream_writer_type]): encode = ... # type: _encode_type decode = ... # type: _decode_type streamreader = ... # type: _stream_reader_type streamwriter = ... # type: _stream_writer_type incrementalencoder = ... # type: _incremental_encoder_type incrementaldecoder = ... # type: _incremental_decoder_type name = ... # type: str def __init__(self, encode: _encode_type, decode: _decode_type, streamreader: _stream_reader_type = ..., streamwriter: _stream_writer_type = ..., incrementalencoder: _incremental_encoder_type = ..., incrementaldecoder: _incremental_decoder_type = ..., name: str = ...) -> None: ... def getencoder(encoding: str) -> _encode_type: ... def getdecoder(encoding: str) -> _decode_type: ... def getincrementalencoder(encoding: str) -> _incremental_encoder_type: ... def getincrementaldecoder(encoding: str) -> _incremental_decoder_type: ... def getreader(encoding: str) -> _stream_reader_type: ... def getwriter(encoding: str) -> _stream_writer_type: ... def register(search_function: Callable[[str], CodecInfo]) -> None: ... def open(filename: str, mode: str = ..., encoding: str = ..., errors: str = ..., buffering: int = ...) -> StreamReaderWriter: ... def EncodedFile(file: IO[_encoded], data_encoding: str, file_encoding: str = ..., errors: str = ...) -> 'StreamRecoder': ... def iterencode(iterator: Iterable[_decoded], encoding: str, errors: str = ...) -> Iterator[_encoded]: ... def iterdecode(iterator: Iterable[_encoded], encoding: str, errors: str = ...) -> Iterator[_decoded]: ... BOM = b'' BOM_BE = b'' BOM_LE = b'' BOM_UTF8 = b'' BOM_UTF16 = b'' BOM_UTF16_BE = b'' BOM_UTF16_LE = b'' BOM_UTF32 = b'' BOM_UTF32_BE = b'' BOM_UTF32_LE = b'' # It is expected that different actions be taken depending on which of the # three subclasses of `UnicodeError` is actually ...ed. However, the Union # is still needed for at least one of the cases. def register_error(name: str, error_handler: Callable[[UnicodeError], Tuple[Union[str, bytes], int]]) -> None: ... def lookup_error(name: str) -> Callable[[UnicodeError], Tuple[Union[str, bytes], int]]: ... def strict_errors(exception: UnicodeError) -> Tuple[Union[str, bytes], int]: ... def replace_errors(exception: UnicodeError) -> Tuple[Union[str, bytes], int]: ... def ignore_errors(exception: UnicodeError) -> Tuple[Union[str, bytes], int]: ... def xmlcharrefreplace_errors(exception: UnicodeError) -> Tuple[Union[str, bytes], int]: ... def backslashreplace_errors(exception: UnicodeError) -> Tuple[Union[str, bytes], int]: ... class Codec: # These are sort of @abstractmethod but sort of not. # The StreamReader and StreamWriter subclasses only implement one. def encode(self, input: _decoded, errors: str = ...) -> Tuple[_encoded, int]: ... def decode(self, input: _encoded, errors: str = ...) -> Tuple[_decoded, int]: ... class IncrementalEncoder: errors = ... # type: str def __init__(self, errors: str = ...) -> None: ... @abstractmethod def encode(self, object: _decoded, final: bool = ...) -> _encoded: ... def reset(self) -> None: ... # documentation says int but str is needed for the subclass. def getstate(self) -> Union[int, _decoded]: ... def setstate(self, state: Union[int, _decoded]) -> None: ... class IncrementalDecoder: errors = ... # type: str def __init__(self, errors: str = ...) -> None: ... @abstractmethod def decode(self, object: _encoded, final: bool = ...) -> _decoded: ... def reset(self) -> None: ... def getstate(self) -> Tuple[_encoded, int]: ... def setstate(self, state: Tuple[_encoded, int]) -> None: ... # These are not documented but used in encodings/*.py implementations. class BufferedIncrementalEncoder(IncrementalEncoder): buffer = ... # type: str def __init__(self, errors: str = ...) -> None: ... @abstractmethod def _buffer_encode(self, input: _decoded, errors: str, final: bool) -> _encoded: ... def encode(self, input: _decoded, final: bool = ...) -> _encoded: ... class BufferedIncrementalDecoder(IncrementalDecoder): buffer = ... # type: bytes def __init__(self, errors: str = ...) -> None: ... @abstractmethod def _buffer_decode(self, input: _encoded, errors: str, final: bool) -> Tuple[_decoded, int]: ... def decode(self, object: _encoded, final: bool = ...) -> _decoded: ... # TODO: it is not possible to specify the requirement that all other # attributes and methods are passed-through from the stream. class StreamWriter(Codec): errors = ... # type: str def __init__(self, stream: IO[_encoded], errors: str = ...) -> None: ... def write(self, obj: _decoded) -> None: ... def writelines(self, list: Iterable[_decoded]) -> None: ... def reset(self) -> None: ... class StreamReader(Codec): errors = ... # type: str def __init__(self, stream: IO[_encoded], errors: str = ...) -> None: ... def read(self, size: int = ..., chars: int = ..., firstline: bool = ...) -> _decoded: ... def readline(self, size: int = ..., keepends: bool = ...) -> _decoded: ... def readlines(self, sizehint: int = ..., keepends: bool = ...) -> List[_decoded]: ... def reset(self) -> None: ... _T = TypeVar('_T', bound='StreamReaderWriter') # Doesn't actually inherit from TextIO, but wraps a BinaryIO to provide text reading and writing # and delegates attributes to the underlying binary stream with __getattr__. class StreamReaderWriter(TextIO): def __init__(self, stream: IO[_encoded], Reader: _stream_reader_type, Writer: _stream_writer_type, errors: str = ...) -> None: ... def read(self, size: int= ...) -> _decoded: ... def readline(self, size: Optional[int] = ...) -> _decoded: ... def readlines(self, sizehint: Optional[int] = ...) -> List[_decoded]: ... def __next__(self) -> _decoded: ... def __iter__(self: _T) -> _T: ... # This actually returns None, but that's incompatible with the supertype def write(self, data: _decoded) -> int: ... def writelines(self, list: Iterable[_decoded]) -> None: ... def reset(self) -> None: ... # Same as write() def seek(self, offset: int, whence: int = ...) -> int: ... def __enter__(self: _T) -> _T: ... def __exit__(self, typ: Optional[Type[BaseException]], exc: Optional[BaseException], tb: Optional[types.TracebackType]) -> bool: ... class StreamRecoder(BinaryIO): def __init__(self, stream: IO[_encoded], encode: _encode_type, decode: _decode_type, Reader: _stream_reader_type, Writer: _stream_writer_type, errors: str = ...) -> None: ... mypy-0.560/typeshed/stdlib/2and3/codeop.pyi0000644€tŠÔÚ€2›s®0000000122513215007212024676 0ustar jukkaDROPBOX\Domain Users00000000000000# Source(py2): https://hg.python.org/cpython/file/2.7/Lib/codeop.py # Source(py3): https://github.com/python/cpython/blob/master/Lib/codeop.py from types import CodeType from typing import Optional def compile_command(source: str, filename: str = ..., symbol: str = ...) -> Optional[CodeType]: ... class Compile: flags = ... # type: int def __init__(self) -> None: ... def __call__(self, source: str, filename: str, symbol: str) -> CodeType: ... class CommandCompiler: compiler = ... # type: Compile def __init__(self) -> None: ... def __call__(self, source: str, filename: str = ..., symbol: str = ...) -> Optional[CodeType]: ... mypy-0.560/typeshed/stdlib/2and3/colorsys.pyi0000644€tŠÔÚ€2›s®0000000120213215007212025275 0ustar jukkaDROPBOX\Domain Users00000000000000# Stubs for colorsys from typing import Tuple def rgb_to_yiq(r: float, g: float, b: float) -> Tuple[float, float, float]: ... def yiq_to_rgb(y: float, i: float, q: float) -> Tuple[float, float, float]: ... def rgb_to_hls(r: float, g: float, b: float) -> Tuple[float, float, float]: ... def hls_to_rgb(h: float, l: float, s: float) -> Tuple[float, float, float]: ... def rgb_to_hsv(r: float, g: float, b: float) -> Tuple[float, float, float]: ... def hsv_to_rgb(h: float, s: float, v: float) -> Tuple[float, float, float]: ... # TODO undocumented ONE_SIXTH = ... # type: float ONE_THIRD = ... # type: float TWO_THIRD = ... # type: float mypy-0.560/typeshed/stdlib/2and3/contextlib.pyi0000644€tŠÔÚ€2›s®0000000461313215007212025604 0ustar jukkaDROPBOX\Domain Users00000000000000# Stubs for contextlib from typing import ( Any, Callable, Generator, IO, Iterable, Iterator, Optional, Type, Generic, TypeVar ) from types import TracebackType import sys # Aliased here for backwards compatibility; TODO eventually remove this from typing import ContextManager as ContextManager if sys.version_info >= (3, 5): from typing import AsyncContextManager, AsyncIterator if sys.version_info >= (3, 6): from typing import ContextManager as AbstractContextManager _T = TypeVar('_T') _ExitFunc = Callable[[Optional[Type[BaseException]], Optional[BaseException], Optional[TracebackType]], bool] _CM_EF = TypeVar('_CM_EF', ContextManager, _ExitFunc) if sys.version_info >= (3, 2): class GeneratorContextManager(ContextManager[_T], Generic[_T]): def __call__(self, func: Callable[..., _T]) -> Callable[..., _T]: ... def contextmanager(func: Callable[..., Iterator[_T]]) -> Callable[..., GeneratorContextManager[_T]]: ... else: def contextmanager(func: Callable[..., Iterator[_T]]) -> Callable[..., ContextManager[_T]]: ... if sys.version_info >= (3, 7): def asynccontextmanager(func: Callable[..., AsyncIterator[_T]]) -> Callable[..., AsyncContextManager[_T]]: ... if sys.version_info < (3,): def nested(*mgr: ContextManager[Any]) -> ContextManager[Iterable[Any]]: ... class closing(ContextManager[_T], Generic[_T]): def __init__(self, thing: _T) -> None: ... if sys.version_info >= (3, 4): class suppress(ContextManager[None]): def __init__(self, *exceptions: Type[BaseException]) -> None: ... class redirect_stdout(ContextManager[None]): def __init__(self, new_target: IO[str]) -> None: ... if sys.version_info >= (3, 5): class redirect_stderr(ContextManager[None]): def __init__(self, new_target: IO[str]) -> None: ... if sys.version_info >= (3,): class ContextDecorator: def __call__(self, func: Callable[..., None]) -> Callable[..., ContextManager[None]]: ... class ExitStack(ContextManager[ExitStack]): def __init__(self) -> None: ... def enter_context(self, cm: ContextManager[_T]) -> _T: ... def push(self, exit: _CM_EF) -> _CM_EF: ... def callback(self, callback: Callable[..., None], *args: Any, **kwds: Any) -> Callable[..., None]: ... def pop_all(self) -> ExitStack: ... def close(self) -> None: ... mypy-0.560/typeshed/stdlib/2and3/copy.pyi0000644€tŠÔÚ€2›s®0000000052713215007212024403 0ustar jukkaDROPBOX\Domain Users00000000000000# Stubs for copy from typing import TypeVar, Optional, Dict, Any _T = TypeVar('_T') # None in CPython but non-None in Jython PyStringMap: Any # Note: memo and _nil are internal kwargs. def deepcopy(x: _T, memo: Optional[Dict[int, _T]] = ..., _nil: Any = ...) -> _T: ... def copy(x: _T) -> _T: ... class Error(Exception): ... error = Error mypy-0.560/typeshed/stdlib/2and3/cProfile.pyi0000644€tŠÔÚ€2›s®0000000167313215007212025177 0ustar jukkaDROPBOX\Domain Users00000000000000from typing import Any, Callable, Dict, Optional, TypeVar def run(statement: str, filename: Optional[str] = ..., sort: int = ...) -> None: ... def runctx(statement: str, globals: Dict[str, Any], locals: Dict[str, Any], filename: Optional[str] = ..., sort: int = ...) -> None: ... _SelfT = TypeVar('_SelfT', bound='Profile') _T = TypeVar('_T') class Profile: def __init__(self, custom_timer: Callable[[], float] = ..., time_unit: float = ..., subcalls: bool = ..., builtins: bool = ...) -> None: ... def enable(self) -> None: ... def disable(self) -> None: ... def print_stats(self, sort: int = ...) -> None: ... def dump_stats(self, file: str) -> None: ... def create_stats(self) -> None: ... def run(self: _SelfT, cmd: str) -> _SelfT: ... def runctx(self: _SelfT, cmd: str, globals: Dict[str, Any], locals: Dict[str, Any]) -> _SelfT: ... def runcall(self, func: Callable[..., _T], *args: Any, **kw: Any) -> _T: ... mypy-0.560/typeshed/stdlib/2and3/crypt.pyi0000644€tŠÔÚ€2›s®0000000070013215007212024563 0ustar jukkaDROPBOX\Domain Users00000000000000import sys from typing import List, NamedTuple, Optional, Union if sys.version_info >= (3, 3): class _Method: ... METHOD_CRYPT: _Method METHOD_MD5: _Method METHOD_SHA256: _Method METHOD_SHA512: _Method methods: List[_Method] def mksalt(method: Optional[_Method] = ...) -> str: ... def crypt(word: str, salt: Optional[Union[str, _Method]] = ...) -> str: ... else: def crypt(word: str, salt: str) -> str: ... mypy-0.560/typeshed/stdlib/2and3/csv.pyi0000644€tŠÔÚ€2›s®0000000636513215007212024232 0ustar jukkaDROPBOX\Domain Users00000000000000from collections import OrderedDict import sys from typing import Any, Dict, Iterable, Iterator, List, Optional, Sequence, Union from _csv import (_reader, _writer, reader as reader, writer as writer, register_dialect as register_dialect, unregister_dialect as unregister_dialect, get_dialect as get_dialect, list_dialects as list_dialects, field_size_limit as field_size_limit, QUOTE_ALL as QUOTE_ALL, QUOTE_MINIMAL as QUOTE_MINIMAL, QUOTE_NONE as QUOTE_NONE, QUOTE_NONNUMERIC as QUOTE_NONNUMERIC, Error as Error, ) _Dialect = Union[str, Dialect] _DictRow = Dict[str, Any] class Dialect(object): delimiter = ... # type: str quotechar = ... # type: Optional[str] escapechar = ... # type: Optional[str] doublequote = ... # type: bool skipinitialspace = ... # type: bool lineterminator = ... # type: str quoting = ... # type: int def __init__(self) -> None: ... class excel(Dialect): delimiter = ... # type: str quotechar = ... # type: str doublequote = ... # type: bool skipinitialspace = ... # type: bool lineterminator = ... # type: str quoting = ... # type: int class excel_tab(excel): delimiter = ... # type: str if sys.version_info >= (3,): class unix_dialect(Dialect): delimiter = ... # type: str quotechar = ... # type: str doublequote = ... # type: bool skipinitialspace = ... # type: bool lineterminator = ... # type: str quoting = ... # type: int if sys.version_info >= (3, 6): _DRMapping = OrderedDict[str, str] else: _DRMapping = Dict[str, str] class DictReader(Iterator[_DRMapping]): restkey = ... # type: Optional[str] restval = ... # type: Optional[str] reader = ... # type: _reader dialect = ... # type: _Dialect line_num = ... # type: int fieldnames = ... # type: Sequence[str] def __init__(self, f: Iterable[str], fieldnames: Sequence[str] = ..., restkey: Optional[str] = ..., restval: Optional[str] = ..., dialect: _Dialect = ..., *args: Any, **kwds: Any) -> None: ... def __iter__(self) -> 'DictReader': ... if sys.version_info >= (3,): def __next__(self) -> _DRMapping: ... else: def next(self) -> _DRMapping: ... class DictWriter(object): fieldnames = ... # type: Sequence[str] restval = ... # type: Optional[Any] extrasaction = ... # type: str writer = ... # type: _writer def __init__(self, f: Any, fieldnames: Sequence[str], restval: Optional[Any] = ..., extrasaction: str = ..., dialect: _Dialect = ..., *args: Any, **kwds: Any) -> None: ... def writeheader(self) -> None: ... def writerow(self, rowdict: _DictRow) -> None: ... def writerows(self, rowdicts: Iterable[_DictRow]) -> None: ... class Sniffer(object): preferred = ... # type: List[str] def __init__(self) -> None: ... def sniff(self, sample: str, delimiters: Optional[str] = ...) -> Dialect: ... def has_header(self, sample: str) -> bool: ... mypy-0.560/typeshed/stdlib/2and3/difflib.pyi0000644€tŠÔÚ€2›s®0000000670413215007212025033 0ustar jukkaDROPBOX\Domain Users00000000000000# Based on https://docs.python.org/2.7/library/difflib.html and https://docs.python.org/3.2/library/difflib.html # TODO: Support unicode in Python 2? import sys from typing import ( TypeVar, Callable, Iterable, Iterator, List, NamedTuple, Sequence, Tuple, Generic, Optional ) _T = TypeVar('_T') Match = NamedTuple('Match', [ ('a', int), ('b', int), ('size', int), ]) class SequenceMatcher(Generic[_T]): def __init__(self, isjunk: Optional[Callable[[_T], bool]] = ..., a: Sequence[_T] = ..., b: Sequence[_T] = ..., autojunk: bool = ...) -> None: ... def set_seqs(self, a: Sequence[_T], b: Sequence[_T]) -> None: ... def set_seq1(self, a: Sequence[_T]) -> None: ... def set_seq2(self, b: Sequence[_T]) -> None: ... def find_longest_match(self, alo: int, ahi: int, blo: int, bhi: int) -> Tuple[int, int, int]: ... def get_matching_blocks(self) -> List[Match]: ... def get_opcodes(self) -> List[Tuple[str, int, int, int, int]]: ... def get_grouped_opcodes(self, n: int = ... ) -> Iterable[Tuple[str, int, int, int, int]]: ... def ratio(self) -> float: ... def quick_ratio(self) -> float: ... def real_quick_ratio(self) -> float: ... def get_close_matches(word: Sequence[_T], possibilities: List[Sequence[_T]], n: int = ..., cutoff: float = ...) -> List[Sequence[_T]]: ... class Differ: def __init__(self, linejunk: Callable[[str], bool] = ..., charjunk: Callable[[str], bool] = ...) -> None: ... def compare(self, a: Sequence[str], b: Sequence[str]) -> Iterator[str]: ... def IS_LINE_JUNK(str) -> bool: ... def IS_CHARACTER_JUNK(str) -> bool: ... def unified_diff(a: Sequence[str], b: Sequence[str], fromfile: str = ..., tofile: str = ..., fromfiledate: str = ..., tofiledate: str = ..., n: int = ..., lineterm: str = ...) -> Iterator[str]: ... def context_diff(a: Sequence[str], b: Sequence[str], fromfile: str=..., tofile: str = ..., fromfiledate: str = ..., tofiledate: str = ..., n: int = ..., lineterm: str = ...) -> Iterator[str]: ... def ndiff(a: Sequence[str], b: Sequence[str], linejunk: Callable[[str], bool] = ..., charjunk: Callable[[str], bool] = ... ) -> Iterator[str]: ... class HtmlDiff(object): def __init__(self, tabsize: int = ..., wrapcolumn: int = ..., linejunk: Callable[[str], bool] = ..., charjunk: Callable[[str], bool] = ... ) -> None: ... def make_file(self, fromlines: Sequence[str], tolines: Sequence[str], fromdesc: str = ..., todesc: str = ..., context: bool = ..., numlines: int = ...) -> str: ... def make_table(self, fromlines: Sequence[str], tolines: Sequence[str], fromdesc: str = ..., todesc: str = ..., context: bool = ..., numlines: int = ...) -> str: ... def restore(delta: Iterable[str], which: int) -> Iterator[int]: ... if sys.version_info >= (3, 5): def diff_bytes( dfunc: Callable[[Sequence[str], Sequence[str], str, str, str, str, int, str], Iterator[str]], a: Sequence[bytes], b: Sequence[bytes], fromfile: bytes = ..., tofile: bytes = ..., fromfiledate: bytes = ..., tofiledate: bytes = ..., n: int = ..., lineterm: bytes = ... ) -> Iterator[bytes]: ... mypy-0.560/typeshed/stdlib/2and3/dis.pyi0000644€tŠÔÚ€2›s®0000000552413215007212024212 0ustar jukkaDROPBOX\Domain Users00000000000000from typing import List, Union, Iterator, Tuple, Optional, Any, IO, NamedTuple, Dict import sys import types from opcode import (hasconst as hasconst, hasname as hasname, hasjrel as hasjrel, hasjabs as hasjabs, haslocal as haslocal, hascompare as hascompare, hasfree as hasfree, cmp_op as cmp_op, opname as opname, opmap as opmap, HAVE_ARGUMENT as HAVE_ARGUMENT, EXTENDED_ARG as EXTENDED_ARG) if sys.version_info >= (3, 4): from opcode import stack_effect as stack_effect if sys.version_info >= (3, 6): from opcode import hasnargs as hasnargs _have_code = Union[types.MethodType, types.FunctionType, types.CodeType, type] _have_code_or_string = Union[_have_code, str, bytes] if sys.version_info >= (3, 4): Instruction = NamedTuple( "Instruction", [ ('opname', str), ('opcode', int), ('arg', Optional[int]), ('argval', Any), ('argrepr', str), ('offset', int), ('starts_line', Optional[int]), ('is_jump_target', bool) ] ) class Bytecode: codeobj = ... # type: types.CodeType first_line = ... # type: int def __init__(self, x: _have_code_or_string, *, first_line: Optional[int] = ..., current_offset: Optional[int] = ...) -> None: ... def __iter__(self) -> Iterator[Instruction]: ... def __repr__(self) -> str: ... def info(self) -> str: ... def dis(self) -> str: ... @classmethod def from_traceback(cls, tb: types.TracebackType) -> Bytecode: ... COMPILER_FLAG_NAMES = ... # type: Dict[int, str] def findlabels(code: _have_code) -> List[int]: ... def findlinestarts(code: _have_code) -> Iterator[Tuple[int, int]]: ... if sys.version_info >= (3, 0): def pretty_flags(flags: int) -> str: ... def code_info(x: _have_code_or_string) -> str: ... if sys.version_info >= (3, 4): def dis(x: _have_code_or_string = ..., *, file: Optional[IO[str]] = ...) -> None: ... def distb(tb: Optional[types.TracebackType] = ..., *, file: Optional[IO[str]] = ...) -> None: ... def disassemble(co: _have_code, lasti: int = ..., *, file: Optional[IO[str]] = ...) -> None: ... def disco(co: _have_code, lasti: int = ..., *, file: Optional[IO[str]] = ...) -> None: ... def show_code(co: _have_code, *, file: Optional[IO[str]] = ...) -> None: ... def get_instructions(x: _have_code, *, first_line: Optional[int] = ...) -> Iterator[Instruction]: ... else: def dis(x: _have_code_or_string = ...) -> None: ... def distb(tb: types.TracebackType = ...) -> None: ... def disassemble(co: _have_code, lasti: int = ...) -> None: ... def disco(co: _have_code, lasti: int = ...) -> None: ... if sys.version_info >= (3, 0): def show_code(co: _have_code) -> None: ... mypy-0.560/typeshed/stdlib/2and3/distutils/0000755€tŠÔÚ€2›s®0000000000013215007244024733 5ustar jukkaDROPBOX\Domain Users00000000000000mypy-0.560/typeshed/stdlib/2and3/distutils/__init__.pyi0000644€tŠÔÚ€2›s®0000000000013215007212027176 0ustar jukkaDROPBOX\Domain Users00000000000000mypy-0.560/typeshed/stdlib/2and3/distutils/archive_util.pyi0000644€tŠÔÚ€2›s®0000000101513215007212030124 0ustar jukkaDROPBOX\Domain Users00000000000000# Stubs for distutils.archive_util from typing import Optional def make_archive(base_name: str, format: str, root_dir: Optional[str] = ..., base_dir: Optional[str] = ..., verbose: int = ..., dry_run: int = ...) -> str: ... def make_tarball(base_name: str, base_dir: str, compress: Optional[str] = ..., verbose: int = ..., dry_run: int = ...) -> str: ... def make_zipfile(base_name: str, base_dir: str, verbose: int = ..., dry_run: int = ...) -> str: ... mypy-0.560/typeshed/stdlib/2and3/distutils/bcppcompiler.pyi0000644€tŠÔÚ€2›s®0000000016313215007212030130 0ustar jukkaDROPBOX\Domain Users00000000000000# Stubs for distutils.bcppcompiler from distutils.ccompiler import CCompiler class BCPPCompiler(CCompiler): ... mypy-0.560/typeshed/stdlib/2and3/distutils/ccompiler.pyi0000644€tŠÔÚ€2›s®0000001546113215007212027435 0ustar jukkaDROPBOX\Domain Users00000000000000# Stubs for distutils.ccompiler from typing import Any, Callable, List, Optional, Tuple, Union _Macro = Union[Tuple[str], Tuple[str, str]] def gen_lib_options(compiler: CCompiler, library_dirs: List[str], runtime_library_dirs: List[str], libraries: List[str]) -> List[str]: ... def gen_preprocess_options(macros: List[_Macro], include_dirs: List[str]) -> List[str]: ... def get_default_compiler(osname: Optional[str] = ..., platform: Optional[str] = ...) -> str: ... def new_compiler(plat: Optional[str] = ..., compiler: Optional[str] = ..., verbose: int = ..., dry_run: int = ..., force: int = ...) -> CCompiler: ... def show_compilers() -> None: ... class CCompiler: def __init__(self, verbose: int = ..., dry_run: int = ..., force: int = ...) -> None: ... def add_include_dir(self, dir: str) -> None: ... def set_include_dirs(self, dirs: List[str]) -> None: ... def add_library(self, libname: str) -> None: ... def set_libraries(self, libnames: List[str]) -> None: ... def add_library_dir(self, dir: str) -> None: ... def set_library_dirs(self, dirs: List[str]) -> None: ... def add_runtime_library_dir(self, dir: str) -> None: ... def set_runtime_library_dirs(self, dirs: List[str]) -> None: ... def define_macro(self, name: str, value: Optional[str] = ...) -> None: ... def undefine_macro(self, name: str) -> None: ... def add_link_object(self, object: str) -> None: ... def set_link_objects(self, objects: List[str]) -> None: ... def detect_language(self, sources: Union[str, List[str]]) -> Optional[str]: ... def find_library_file(self, dirs: List[str], lib: str, debug: bool = ...) -> Optional[str]: ... def has_function(self, funcname: str, includes: Optional[List[str]] = ..., include_dirs: Optional[List[str]] = ..., libraries: Optional[List[str]] = ..., library_dirs: Optional[List[str]] = ...) -> bool: ... def library_dir_option(self, dir: str) -> str: ... def library_option(self, lib: str) -> str: ... def runtime_library_dir_option(self, dir: str) -> str: ... def set_executables(self, **args: str) -> None: ... def compile(self, sources: List[str], output_dir: Optional[str] = ..., macros: Optional[_Macro] = ..., include_dirs: Optional[List[str]] = ..., debug: bool = ..., extra_preargs: Optional[List[str]] = ..., extra_postargs: Optional[List[str]] = ..., depends: Optional[List[str]] = ...) -> List[str]: ... def create_static_lib(self, objects: List[str], output_libname: str, output_dir: Optional[str] = ..., debug: bool = ..., target_lang: Optional[str] = ...) -> None: ... def link(self, target_desc: str, objects: List[str], output_filename: str, output_dir: Optional[str] = ..., libraries: Optional[List[str]] = ..., library_dirs: Optional[List[str]] = ..., runtime_library_dirs: Optional[List[str]] = ..., export_symbols: Optional[List[str]] = ..., debug: bool = ..., extra_preargs: Optional[List[str]] = ..., extra_postargs: Optional[List[str]] = ..., build_temp: Optional[str] = ..., target_lang: Optional[str] = ...) -> None: ... def link_executable(self, objects: List[str], output_progname: str, output_dir: Optional[str] = ..., libraries: Optional[List[str]] = ..., library_dirs: Optional[List[str]] = ..., runtime_library_dirs: Optional[List[str]] = ..., debug: bool = ..., extra_preargs: Optional[List[str]] = ..., extra_postargs: Optional[List[str]] = ..., target_lang: Optional[str] = ...) -> None: ... def link_shared_lib(self, objects: List[str], output_libname: str, output_dir: Optional[str] = ..., libraries: Optional[List[str]] = ..., library_dirs: Optional[List[str]] = ..., runtime_library_dirs: Optional[List[str]] = ..., export_symbols: Optional[List[str]] = ..., debug: bool = ..., extra_preargs: Optional[List[str]] = ..., extra_postargs: Optional[List[str]] = ..., build_temp: Optional[str] = ..., target_lang: Optional[str] = ...) -> None: ... def link_shared_object(self, objects: List[str], output_filename: str, output_dir: Optional[str] = ..., libraries: Optional[List[str]] = ..., library_dirs: Optional[List[str]] = ..., runtime_library_dirs: Optional[List[str]] = ..., export_symbols: Optional[List[str]] = ..., debug: bool = ..., extra_preargs: Optional[List[str]] = ..., extra_postargs: Optional[List[str]] = ..., build_temp: Optional[str] = ..., target_lang: Optional[str] = ...) -> None: ... def preprocess(self, source: str, output_file: Optional[str] = ..., macros: Optional[List[_Macro]] = ..., include_dirs: Optional[List[str]] = ..., extra_preargs: Optional[List[str]] = ..., extra_postargs: Optional[List[str]] = ...) -> None: ... def executable_filename(self, basename: str, strip_dir: int = ..., output_dir: str = ...) -> str: ... def library_filename(self, libname: str, lib_type: str = ..., strip_dir: int = ..., output_dir: str = ...) -> str: ... def object_filenames(self, source_filenames: List[str], strip_dir: int = ..., output_dir: str = ...) -> List[str]: ... def shared_object_filename(self, basename: str, strip_dir: int = ..., output_dir: str = ...) -> str: ... def execute(self, func: Callable[..., None], args: Tuple[Any, ...], msg: Optional[str] = ..., level: int = ...) -> None: ... def spawn(self, cmd: List[str]) -> None: ... def mkpath(self, name: str, mode: int = ...) -> None: ... def move_file(self, src: str, dst: str) -> str: ... def announce(self, msg: str, level: int = ...) -> None: ... def warn(self, msg: str) -> None: ... def debug_print(self, msg: str) -> None: ... mypy-0.560/typeshed/stdlib/2and3/distutils/cmd.pyi0000644€tŠÔÚ€2›s®0000000505413215007212026220 0ustar jukkaDROPBOX\Domain Users00000000000000# Stubs for distutils.cmd from typing import Callable, List, Tuple, Union, Optional, Iterable, Any, Text from abc import abstractmethod from distutils.dist import Distribution class Command: sub_commands = ... # type: List[Tuple[str, Union[Callable[[], bool], str, None]]] def __init__(self, dist: Distribution) -> None: ... @abstractmethod def initialize_options(self) -> None: ... @abstractmethod def finalize_options(self) -> None: ... @abstractmethod def run(self) -> None: ... def announce(self, msg: Text, level: int = ...) -> None: ... def debug_print(self, msg: Text) -> None: ... def ensure_string(self, option: str, default: Optional[str] = ...) -> None: ... def ensure_string_list(self, option: Union[str, List[str]]) -> None: ... def ensure_filename(self, option: str) -> None: ... def ensure_dirname(self, option: str) -> None: ... def get_command_name(self) -> str: ... def set_undefined_options(self, src_cmd: Text, *option_pairs: Tuple[str, str]) -> None: ... def get_finalized_command(self, command: Text, create: int = ...) -> Command: ... def reinitialize_command(self, command: Union[Command, Text], reinit_subcommands: int = ...) -> Command: ... def run_command(self, command: Text) -> None: ... def get_sub_commands(self) -> List[str]: ... def warn(self, msg: Text) -> None: ... def execute(self, func: Callable[..., Any], args: Iterable[Any], msg: Optional[Text] = ..., level: int = ...) -> None: ... def mkpath(self, name: str, mode: int = ...) -> None: ... def copy_file(self, infile: str, outfile: str, preserve_mode: int = ..., preserve_times: int = ..., link: Optional[str] = ..., level: Any = ...) -> Tuple[str, bool]: ... # level is not used def copy_tree(self, infile: str, outfile: str, preserve_mode: int = ..., preserve_times: int = ..., preserve_symlinks: int = ..., level: Any = ...) -> List[str]: ... # level is not used def move_file(self, src: str, dest: str, level: Any = ...) -> str: ... # level is not used def spawn(self, cmd: Iterable[str], search_path: int = ..., level: Any = ...) -> None: ... # level is not used def make_archive(self, base_name: str, format: str, root_dir: Optional[str] = ..., base_dir: Optional[str] = ..., owner: Optional[str] = ..., group: Optional[str] = ...) -> str: ... def make_file(self, infiles: Union[str, List[str], Tuple[str]], outfile: str, func: Callable[..., Any], args: List[Any], exec_msg: Optional[str] = ..., skip_msg: Optional[str] = ..., level: Any = ...) -> None: ... # level is not used mypy-0.560/typeshed/stdlib/2and3/distutils/command/0000755€tŠÔÚ€2›s®0000000000013215007244026351 5ustar jukkaDROPBOX\Domain Users00000000000000mypy-0.560/typeshed/stdlib/2and3/distutils/command/__init__.pyi0000644€tŠÔÚ€2›s®0000000000013215007212030614 0ustar jukkaDROPBOX\Domain Users00000000000000mypy-0.560/typeshed/stdlib/2and3/distutils/command/bdist.pyi0000644€tŠÔÚ€2›s®0000000000013215007212030162 0ustar jukkaDROPBOX\Domain Users00000000000000mypy-0.560/typeshed/stdlib/2and3/distutils/command/bdist_dumb.pyi0000644€tŠÔÚ€2›s®0000000000013215007212031171 0ustar jukkaDROPBOX\Domain Users00000000000000mypy-0.560/typeshed/stdlib/2and3/distutils/command/bdist_msi.pyi0000644€tŠÔÚ€2›s®0000000026613215007212031050 0ustar jukkaDROPBOX\Domain Users00000000000000from distutils.cmd import Command class bdist_msi(Command): def initialize_options(self) -> None: ... def finalize_options(self) -> None: ... def run(self) -> None: ... mypy-0.560/typeshed/stdlib/2and3/distutils/command/bdist_packager.pyi0000644€tŠÔÚ€2›s®0000000000013215007212032017 0ustar jukkaDROPBOX\Domain Users00000000000000mypy-0.560/typeshed/stdlib/2and3/distutils/command/bdist_rpm.pyi0000644€tŠÔÚ€2›s®0000000000013215007212031040 0ustar jukkaDROPBOX\Domain Users00000000000000mypy-0.560/typeshed/stdlib/2and3/distutils/command/bdist_wininst.pyi0000644€tŠÔÚ€2›s®0000000000013215007212031735 0ustar jukkaDROPBOX\Domain Users00000000000000mypy-0.560/typeshed/stdlib/2and3/distutils/command/build.pyi0000644€tŠÔÚ€2›s®0000000000013215007212030154 0ustar jukkaDROPBOX\Domain Users00000000000000mypy-0.560/typeshed/stdlib/2and3/distutils/command/build_clib.pyi0000644€tŠÔÚ€2›s®0000000000013215007212031145 0ustar jukkaDROPBOX\Domain Users00000000000000mypy-0.560/typeshed/stdlib/2and3/distutils/command/build_ext.pyi0000644€tŠÔÚ€2›s®0000000000013215007212031034 0ustar jukkaDROPBOX\Domain Users00000000000000mypy-0.560/typeshed/stdlib/2and3/distutils/command/build_py.pyi0000644€tŠÔÚ€2›s®0000000042513215007212030677 0ustar jukkaDROPBOX\Domain Users00000000000000from distutils.cmd import Command import sys if sys.version_info >= (3,): class build_py(Command): def initialize_options(self) -> None: ... def finalize_options(self) -> None: ... def run(self) -> None: ... class build_py_2to3(build_py): ... mypy-0.560/typeshed/stdlib/2and3/distutils/command/build_scripts.pyi0000644€tŠÔÚ€2›s®0000000000013215007212031723 0ustar jukkaDROPBOX\Domain Users00000000000000mypy-0.560/typeshed/stdlib/2and3/distutils/command/check.pyi0000644€tŠÔÚ€2›s®0000000000013215007212030132 0ustar jukkaDROPBOX\Domain Users00000000000000mypy-0.560/typeshed/stdlib/2and3/distutils/command/clean.pyi0000644€tŠÔÚ€2›s®0000000000013215007212030137 0ustar jukkaDROPBOX\Domain Users00000000000000mypy-0.560/typeshed/stdlib/2and3/distutils/command/config.pyi0000644€tŠÔÚ€2›s®0000000000013215007212030322 0ustar jukkaDROPBOX\Domain Users00000000000000mypy-0.560/typeshed/stdlib/2and3/distutils/command/install.pyi0000644€tŠÔÚ€2›s®0000000000013215007212030523 0ustar jukkaDROPBOX\Domain Users00000000000000mypy-0.560/typeshed/stdlib/2and3/distutils/command/install_data.pyi0000644€tŠÔÚ€2›s®0000000000013215007212031514 0ustar jukkaDROPBOX\Domain Users00000000000000mypy-0.560/typeshed/stdlib/2and3/distutils/command/install_headers.pyi0000644€tŠÔÚ€2›s®0000000000013215007212032216 0ustar jukkaDROPBOX\Domain Users00000000000000mypy-0.560/typeshed/stdlib/2and3/distutils/command/install_lib.pyi0000644€tŠÔÚ€2›s®0000000000013215007212031351 0ustar jukkaDROPBOX\Domain Users00000000000000mypy-0.560/typeshed/stdlib/2and3/distutils/command/install_scripts.pyi0000644€tŠÔÚ€2›s®0000000000013215007212032272 0ustar jukkaDROPBOX\Domain Users00000000000000mypy-0.560/typeshed/stdlib/2and3/distutils/command/register.pyi0000644€tŠÔÚ€2›s®0000000000013215007212030701 0ustar jukkaDROPBOX\Domain Users00000000000000mypy-0.560/typeshed/stdlib/2and3/distutils/command/sdist.pyi0000644€tŠÔÚ€2›s®0000000000013215007212030203 0ustar jukkaDROPBOX\Domain Users00000000000000mypy-0.560/typeshed/stdlib/2and3/distutils/core.pyi0000644€tŠÔÚ€2›s®0000000364713215007212026413 0ustar jukkaDROPBOX\Domain Users00000000000000# Stubs for distutils.core from typing import Any, List, Mapping, Optional, Tuple, Type, Union from distutils.cmd import Command as Command from distutils.dist import Distribution as Distribution from distutils.extension import Extension as Extension def setup(name: str = ..., version: str = ..., description: str = ..., long_description: str = ..., author: str = ..., author_email: str = ..., maintainer: str = ..., maintainer_email: str = ..., url: str = ..., download_url: str = ..., packages: List[str] = ..., py_modules: List[str] = ..., scripts: List[str] = ..., ext_modules: List[Extension] = ..., classifiers: List[str] = ..., distclass: Type[Distribution] = ..., script_name: str = ..., script_args: List[str] = ..., options: Mapping[str, Any] = ..., license: str = ..., keywords: Union[List[str], str] = ..., platforms: Union[List[str], str] = ..., cmdclass: Mapping[str, Type[Command]] = ..., data_files: List[Tuple[str, List[str]]] = ..., package_dir: Mapping[str, str] = ..., obsoletes: List[str] = ..., provides: List[str] = ..., requires: List[str] = ..., command_packages: List[str] = ..., command_options: Mapping[str, Mapping[str, Tuple[Any, Any]]] = ..., package_data: Mapping[str, List[str]] = ..., include_package_data: bool = ..., libraries: List[str] = ..., headers: List[str] = ..., ext_package: str = ..., include_dirs: List[str] = ..., password: str = ..., fullname: str = ..., **attrs: Any) -> None: ... def run_setup(script_name: str, script_args: Optional[List[str]] = ..., stop_after: str = ...) -> Distribution: ... mypy-0.560/typeshed/stdlib/2and3/distutils/cygwinccompiler.pyi0000644€tŠÔÚ€2›s®0000000026213215007212030647 0ustar jukkaDROPBOX\Domain Users00000000000000# Stubs for distutils.cygwinccompiler from distutils.unixccompiler import UnixCCompiler class CygwinCCompiler(UnixCCompiler): ... class Mingw32CCompiler(CygwinCCompiler): ... mypy-0.560/typeshed/stdlib/2and3/distutils/debug.pyi0000644€tŠÔÚ€2›s®0000000006713215007212026542 0ustar jukkaDROPBOX\Domain Users00000000000000# Stubs for distutils.debug DEBUG = ... # type: bool mypy-0.560/typeshed/stdlib/2and3/distutils/dep_util.pyi0000644€tŠÔÚ€2›s®0000000045713215007212027264 0ustar jukkaDROPBOX\Domain Users00000000000000# Stubs for distutils.dep_util from typing import List, Tuple def newer(source: str, target: str) -> bool: ... def newer_pairwise(sources: List[str], targets: List[str]) -> List[Tuple[str, str]]: ... def newer_group(sources: List[str], target: str, missing: str = ...) -> bool: ... mypy-0.560/typeshed/stdlib/2and3/distutils/dir_util.pyi0000644€tŠÔÚ€2›s®0000000117613215007212027271 0ustar jukkaDROPBOX\Domain Users00000000000000# Stubs for distutils.dir_util from typing import List def mkpath(name: str, mode: int = ..., verbose: int = ..., dry_run: int = ...) -> List[str]: ... def create_tree(base_dir: str, files: List[str], mode: int = ..., verbose: int = ..., dry_run: int = ...) -> None: ... def copy_tree(src: str, dst: str, preserve_mode: int = ..., preserve_times: int = ..., preserve_symlinks: int = ..., update: int = ..., verbose: int = ..., dry_run: int = ...) -> List[str]: ... def remove_tree(directory: str, verbose: int = ..., dry_run: int = ...) -> None: ... mypy-0.560/typeshed/stdlib/2and3/distutils/dist.pyi0000644€tŠÔÚ€2›s®0000000025213215007212026413 0ustar jukkaDROPBOX\Domain Users00000000000000# Stubs for distutils.dist from typing import Any, Mapping, Optional class Distribution: def __init__(self, attrs: Optional[Mapping[str, Any]] = ...) -> None: ... mypy-0.560/typeshed/stdlib/2and3/distutils/errors.pyi0000644€tŠÔÚ€2›s®0000000016013215007212026762 0ustar jukkaDROPBOX\Domain Users00000000000000# Stubs for distutils.errors class DistutilsExecError(Exception): ... class DistutilsFileError(Exception): ... mypy-0.560/typeshed/stdlib/2and3/distutils/extension.pyi0000644€tŠÔÚ€2›s®0000000326113215007212027467 0ustar jukkaDROPBOX\Domain Users00000000000000# Stubs for distutils.extension from typing import List, Optional, Tuple import sys class Extension: if sys.version_info >= (3,): def __init__(self, name: str, sources: List[str], include_dirs: List[str] = ..., define_macros: List[Tuple[str, Optional[str]]] = ..., undef_macros: List[str] = ..., library_dirs: List[str] = ..., libraries: List[str] = ..., runtime_library_dirs: List[str] = ..., extra_objects: List[str] = ..., extra_compile_args: List[str] = ..., extra_link_args: List[str] = ..., export_symbols: List[str] = ..., depends: List[str] = ..., language: str = ..., optional: bool = ...) -> None: ... else: def __init__(self, name: str, sources: List[str], include_dirs: List[str] = ..., define_macros: List[Tuple[str, Optional[str]]] = ..., undef_macros: List[str] = ..., library_dirs: List[str] = ..., libraries: List[str] = ..., runtime_library_dirs: List[str] = ..., extra_objects: List[str] = ..., extra_compile_args: List[str] = ..., extra_link_args: List[str] = ..., export_symbols: List[str] = ..., depends: List[str] = ..., language: str = ...) -> None: ... mypy-0.560/typeshed/stdlib/2and3/distutils/fancy_getopt.pyi0000644€tŠÔÚ€2›s®0000000167013215007212030137 0ustar jukkaDROPBOX\Domain Users00000000000000# Stubs for distutils.fancy_getopt from typing import ( Any, List, Mapping, Optional, Tuple, Union, TypeVar, overload, ) _Option = Tuple[str, str, str] _GR = Tuple[List[str], OptionDummy] def fancy_getopt(options: List[_Option], negative_opt: Mapping[_Option, _Option], object: Any, args: Optional[List[str]]) -> Union[List[str], _GR]: ... def wrap_text(text: str, width: int) -> List[str]: ... class FancyGetopt: def __init__(self, option_table: Optional[List[_Option]] = ...) -> None: ... # TODO kinda wrong, `getopt(object=object())` is invalid @overload def getopt(self, args: Optional[List[str]] = ...) -> _GR: ... @overload def getopt(self, args: Optional[List[str]], object: Any) -> List[str]: ... def get_option_order(self) -> List[Tuple[str, str]]: ... def generate_help(self, header: Optional[str] = ...) -> List[str]: ... class OptionDummy: ... mypy-0.560/typeshed/stdlib/2and3/distutils/file_util.pyi0000644€tŠÔÚ€2›s®0000000075613215007212027435 0ustar jukkaDROPBOX\Domain Users00000000000000# Stubs for distutils.file_util from typing import Optional, Sequence, Tuple def copy_file(src: str, dst: str, preserve_mode: bool = ..., preserve_times: bool = ..., update: bool = ..., link: Optional[str] = ..., verbose: bool = ..., dry_run: bool = ...) -> Tuple[str, str]: ... def move_file(src: str, dst: str, verbose: bool = ..., dry_run: bool = ...) -> str: ... def write_file(filename: str, contents: Sequence[str]) -> None: ... mypy-0.560/typeshed/stdlib/2and3/distutils/filelist.pyi0000644€tŠÔÚ€2›s®0000000006413215007212027264 0ustar jukkaDROPBOX\Domain Users00000000000000# Stubs for distutils.filelist class FileList: ... mypy-0.560/typeshed/stdlib/2and3/distutils/log.pyi0000644€tŠÔÚ€2›s®0000000141013215007212026226 0ustar jukkaDROPBOX\Domain Users00000000000000from typing import Any, Callable, Iterable, Text DEBUG: int INFO: int WARN: int ERROR: int FATAL: int class Log: def __init__(self, threshold: int = ...) -> None: ... def log(self, level: int, msg: Text, *args: Any) -> None: ... def debug(self, msg: Text, *args: Any) -> None: ... def info(self, msg: Text, *args: Any) -> None: ... def warn(self, msg: Text, *args: Any) -> None: ... def error(self, msg: Text, *args: Any) -> None: ... def fatal(self, msg: Text, *args: Any) -> None: ... _LogFunc = Callable[[Text, Iterable[Any]], None] log: Callable[[int, Text, Iterable[Any]], None] debug: _LogFunc info: _LogFunc warn: _LogFunc error: _LogFunc fatal: _LogFunc def set_threshold(level: int) -> int: ... def set_verbosity(v: int) -> None: ... mypy-0.560/typeshed/stdlib/2and3/distutils/msvccompiler.pyi0000644€tŠÔÚ€2›s®0000000016313215007212030154 0ustar jukkaDROPBOX\Domain Users00000000000000# Stubs for distutils.msvccompiler from distutils.ccompiler import CCompiler class MSVCCompiler(CCompiler): ... mypy-0.560/typeshed/stdlib/2and3/distutils/spawn.pyi0000644€tŠÔÚ€2›s®0000000043613215007212026604 0ustar jukkaDROPBOX\Domain Users00000000000000# Stubs for distutils.spawn from typing import List, Optional def spawn(cmd: List[str], search_path: bool = ..., verbose: bool = ..., dry_run: bool = ...) -> None: ... def find_executable(executable: str, path: Optional[str] = ...) -> Optional[str]: ... mypy-0.560/typeshed/stdlib/2and3/distutils/sysconfig.pyi0000644€tŠÔÚ€2›s®0000000132013215007212027451 0ustar jukkaDROPBOX\Domain Users00000000000000# Stubs for distutils.sysconfig from typing import Mapping, Optional, Union from distutils.ccompiler import CCompiler PREFIX = ... # type: str EXEC_PREFIX = ... # type: str def get_config_var(name: str) -> Union[int, str, None]: ... def get_config_vars(*args: str) -> Mapping[str, Union[int, str]]: ... def get_config_h_filename() -> str: ... def get_makefile_filename() -> str: ... def get_python_inc(plat_specific: bool = ..., prefix: Optional[str] = ...) -> str: ... def get_python_lib(plat_specific: bool = ..., standard_lib: bool = ..., prefix: Optional[str] = ...) -> str: ... def customize_compiler(compiler: CCompiler) -> None: ... def set_python_build() -> None: ... mypy-0.560/typeshed/stdlib/2and3/distutils/text_file.pyi0000644€tŠÔÚ€2›s®0000000137013215007212027435 0ustar jukkaDROPBOX\Domain Users00000000000000# Stubs for distutils.text_file from typing import IO, List, Optional, Tuple, Union class TextFile: def __init__(self, filename: Optional[str] = ..., file: Optional[IO[str]] = ..., *, strip_comments: bool = ..., lstrip_ws: bool = ..., rstrip_ws: bool = ..., skip_blanks: bool = ..., join_lines: bool = ..., collapse_join: bool = ...) -> None: ... def open(self, filename: str) -> None: ... def close(self) -> None: ... def warn(self, msg: str, line: Union[List[int], Tuple[int, int], int] = ...) -> None: ... def readline(self) -> Optional[str]: ... def readlines(self) -> List[str]: ... def unreadline(self, line: str) -> str: ... mypy-0.560/typeshed/stdlib/2and3/distutils/unixccompiler.pyi0000644€tŠÔÚ€2›s®0000000016513215007212030334 0ustar jukkaDROPBOX\Domain Users00000000000000# Stubs for distutils.unixccompiler from distutils.ccompiler import CCompiler class UnixCCompiler(CCompiler): ... mypy-0.560/typeshed/stdlib/2and3/distutils/util.pyi0000644€tŠÔÚ€2›s®0000000157413215007212026435 0ustar jukkaDROPBOX\Domain Users00000000000000# Stubs for distutils.util from typing import Any, Callable, List, Mapping, Optional, Tuple def get_platform() -> str: ... def convert_path(pathname: str) -> str: ... def change_root(new_root: str, pathname: str) -> str: ... def check_environ() -> None: ... def subst_vars(s: str, local_vars: Mapping[str, str]) -> None: ... def split_quoted(s: str) -> List[str]: ... def execute(func: Callable[..., None], args: Tuple[Any, ...], msg: Optional[str] = ..., verbose: bool = ..., dry_run: bool = ...) -> None: ... def strtobool(val: str) -> bool: ... def byte_compile(py_files: List[str], optimize: int = ..., force: bool = ..., prefix: Optional[str] = ..., base_dir: Optional[str] = ..., verbose: bool = ..., dry_run: bool = ..., direct: Optional[bool] = ...) -> None: ... def rfc822_escape(header: str) -> str: ... mypy-0.560/typeshed/stdlib/2and3/distutils/version.pyi0000644€tŠÔÚ€2›s®0000000146513215007212027144 0ustar jukkaDROPBOX\Domain Users00000000000000import sys from typing import Any class Version: def __init__(self, vstring=None): ... if sys.version_info >= (3,): def __eq__(self, other): ... def __lt__(self, other): ... def __le__(self, other): ... def __gt__(self, other): ... def __ge__(self, other): ... class StrictVersion(Version): version_re = ... # type: Any version = ... # type: Any prerelease = ... # type: Any def parse(self, vstring): ... if sys.version_info < (3,): def __cmp__(self, other): ... class LooseVersion(Version): component_re = ... # type: Any def __init__(self, vstring=None): ... vstring = ... # type: Any version = ... # type: Any def parse(self, vstring): ... if sys.version_info < (3,): def __cmp__(self, other): ... mypy-0.560/typeshed/stdlib/2and3/doctest.pyi0000644€tŠÔÚ€2›s®0000001533313215007212025077 0ustar jukkaDROPBOX\Domain Users00000000000000from typing import Any, Callable, Dict, List, NamedTuple, Optional, Tuple, Type, Union import sys import types import unittest TestResults = NamedTuple('TestResults', [ ('failed', int), ('attempted', int), ]) OPTIONFLAGS_BY_NAME: Dict[str, int] def register_optionflag(name: str) -> int: ... DONT_ACCEPT_TRUE_FOR_1: int DONT_ACCEPT_BLANKLINE: int NORMALIZE_WHITESPACE: int ELLIPSIS: int SKIP: int IGNORE_EXCEPTION_DETAIL: int COMPARISON_FLAGS: int REPORT_UDIFF: int REPORT_CDIFF: int REPORT_NDIFF: int REPORT_ONLY_FIRST_FAILURE: int if sys.version_info >= (3, 4): FAIL_FAST: int REPORTING_FLAGS: int BLANKLINE_MARKER: str ELLIPSIS_MARKER: str class Example: source: str want: str exc_msg: Optional[str] lineno: int indent: int options: Dict[int, bool] def __init__(self, source: str, want: str, exc_msg: Optional[str] = ..., lineno: int = ..., indent: int = ..., options: Optional[Dict[int, bool]] = ...) -> None: ... def __hash__(self) -> int: ... class DocTest: examples: List[Example] globs: Dict[str, Any] name: str filename: Optional[str] lineno: Optional[int] docstring: Optional[str] def __init__(self, examples: List[Example], globs: Dict[str, Any], name: str, filename: Optional[str], lineno: Optional[int], docstring: Optional[str]) -> None: ... def __hash__(self) -> int: ... def __lt__(self, other: DocTest) -> bool: ... class DocTestParser: def parse(self, string: str, name: str = ...) -> List[Union[str, Example]]: ... def get_doctest(self, string: str, globs: Dict[str, Any], name: str, filename: Optional[str], lineno: Optional[str]) -> DocTest: ... def get_examples(self, strin: str, name: str = ...) -> List[Example]: ... class DocTestFinder: def __init__(self, verbose: bool = ..., parser: DocTestParser = ..., recurse: bool = ..., exclude_empty: bool = ...) -> None: ... def find(self, obj: object, name: Optional[str] = ..., module: Union[None, bool, types.ModuleType] = ..., globs: Optional[Dict[str, Any]] = ..., extraglobs: Optional[Dict[str, Any]] = ...) -> List[DocTest]: ... _Out = Callable[[str], Any] _ExcInfo = Tuple[Type[BaseException], BaseException, types.TracebackType] class DocTestRunner: DIVIDER: str optionflags: int original_optionflags: int tries: int failures: int test: DocTest def __init__(self, checker: Optional[OutputChecker] = ..., verbose: Optional[bool] = ..., optionflags: int = ...) -> None: ... def report_start(self, out: _Out, test: DocTest, example: Example) -> None: ... def report_success(self, out: _Out, test: DocTest, example: Example, got: str) -> None: ... def report_failure(self, out: _Out, test: DocTest, example: Example, got: str) -> None: ... def report_unexpected_exception(self, out: _Out, test: DocTest, example: Example, exc_info: _ExcInfo) -> None: ... def run(self, test: DocTest, compileflags: Optional[int] = ..., out: Optional[_Out] = ..., clear_globs: bool = ...) -> TestResults: ... def summarize(self, verbose: Optional[bool] = ...) -> TestResults: ... def merge(self, other: DocTestRunner) -> None: ... class OutputChecker: def check_output(self, want: str, got: str, optionflags: int) -> bool: ... def output_difference(self, example: Example, got: str, optionflags: int) -> str: ... class DocTestFailure(Exception): test: DocTest example: Example got: str def __init__(self, test: DocTest, example: Example, got: str) -> None: ... class UnexpectedException(Exception): test: DocTest example: Example exc_info: _ExcInfo def __init__(self, test: DocTest, example: Example, exc_info: _ExcInfo) -> None: ... class DebugRunner(DocTestRunner): ... master: Optional[DocTestRunner] def testmod(m: Optional[types.ModuleType] = ..., name: Optional[str] = ..., globs: Dict[str, Any] = ..., verbose: Optional[bool] = ..., report: bool = ..., optionflags: int = ..., extraglobs: Dict[str, Any] = ..., raise_on_error: bool = ..., exclude_empty: bool = ...) -> TestResults: ... def testfile(filename: str, module_relative: bool = ..., name: Optional[str] = ..., package: Union[None, str, types.ModuleType] = ..., globs: Optional[Dict[str, Any]] = ..., verbose: Optional[bool] = ..., report: bool = ..., optionflags: int = ..., extraglobs: Optional[Dict[str, Any]] = ..., raise_on_error: bool = ..., parser: DocTestParser = ..., encoding: Optional[str] = ...) -> TestResults: ... def run_docstring_examples(f: object, globs: Dict[str, Any], verbose: bool = ..., name: str = ..., compileflags: Optional[int] = ..., optionflags: int = ...) -> None: ... def set_unittest_reportflags(flags: int) -> int: ... class DocTestCase(unittest.TestCase): def __init__(self, test: DocTest, optionflags: int = ..., setUp: Optional[Callable[[DocTest], Any]] = ..., tearDown: Optional[Callable[[DocTest], Any]] = ..., checker: Optional[OutputChecker] = ...) -> None: ... def setUp(self) -> None: ... def tearDown(self) -> None: ... def runTest(self) -> None: ... def format_failure(self, err: str) -> str: ... def debug(self) -> None: ... def id(self) -> str: ... def __hash__(self) -> int: ... def shortDescription(self) -> str: ... class SkipDocTestCase(DocTestCase): def __init__(self, module: types.ModuleType) -> None: ... def setUp(self) -> None: ... def test_skip(self) -> None: ... def shortDescription(self) -> str: ... if sys.version_info >= (3, 4): class _DocTestSuite(unittest.TestSuite): ... else: _DocTestSuite = unittest.TestSuite def DocTestSuite(module: Union[None, str, types.ModuleType] = ..., globs: Optional[Dict[str, Any]] = ..., extraglobs: Optional[Dict[str, Any]] = ..., test_finder: Optional[DocTestFinder] = ..., **options: Any) -> _DocTestSuite: ... class DocFileCase(DocTestCase): def id(self) -> str: ... def format_failure(self, err: str) -> str: ... def DocFileTest(path: str, module_relative: bool = ..., package: Union[None, str, types.ModuleType] = ..., globs: Optional[Dict[str, Any]] = ..., parser: DocTestParser = ..., encoding: Optional[str] = ..., **options: Any) -> DocFileCase: ... def DocFileSuite(*paths: str, **kw: Any) -> _DocTestSuite: ... def script_from_examples(s: str) -> str: ... def testsource(module: Union[None, str, types.ModuleType], name: str) -> str: ... def debug_src(src: str, pm: bool = ..., globs: Optional[Dict[str, Any]] = ...) -> None: ... def debug_script(src: str, pm: bool = ..., globs: Optional[Dict[str, Any]] = ...) -> None: ... def debug(module: Union[None, str, types.ModuleType], name: str, pm: bool = ...) -> None: ... mypy-0.560/typeshed/stdlib/2and3/errno.pyi0000644€tŠÔÚ€2›s®0000000654513215007212024564 0ustar jukkaDROPBOX\Domain Users00000000000000# Stubs for errno from typing import Mapping import sys errorcode = ... # type: Mapping[int, str] EPERM = ... # type: int ENOENT = ... # type: int ESRCH = ... # type: int EINTR = ... # type: int EIO = ... # type: int ENXIO = ... # type: int E2BIG = ... # type: int ENOEXEC = ... # type: int EBADF = ... # type: int ECHILD = ... # type: int EAGAIN = ... # type: int ENOMEM = ... # type: int EACCES = ... # type: int EFAULT = ... # type: int ENOTBLK = ... # type: int EBUSY = ... # type: int EEXIST = ... # type: int EXDEV = ... # type: int ENODEV = ... # type: int ENOTDIR = ... # type: int EISDIR = ... # type: int EINVAL = ... # type: int ENFILE = ... # type: int EMFILE = ... # type: int ENOTTY = ... # type: int ETXTBSY = ... # type: int EFBIG = ... # type: int ENOSPC = ... # type: int ESPIPE = ... # type: int EROFS = ... # type: int EMLINK = ... # type: int EPIPE = ... # type: int EDOM = ... # type: int ERANGE = ... # type: int EDEADLCK = ... # type: int ENAMETOOLONG = ... # type: int ENOLCK = ... # type: int ENOSYS = ... # type: int ENOTEMPTY = ... # type: int ELOOP = ... # type: int EWOULDBLOCK = ... # type: int ENOMSG = ... # type: int EIDRM = ... # type: int ECHRNG = ... # type: int EL2NSYNC = ... # type: int EL3HLT = ... # type: int EL3RST = ... # type: int ELNRNG = ... # type: int EUNATCH = ... # type: int ENOCSI = ... # type: int EL2HLT = ... # type: int EBADE = ... # type: int EBADR = ... # type: int EXFULL = ... # type: int ENOANO = ... # type: int EBADRQC = ... # type: int EBADSLT = ... # type: int EDEADLOCK = ... # type: int EBFONT = ... # type: int ENOSTR = ... # type: int ENODATA = ... # type: int ETIME = ... # type: int ENOSR = ... # type: int ENONET = ... # type: int ENOPKG = ... # type: int EREMOTE = ... # type: int ENOLINK = ... # type: int EADV = ... # type: int ESRMNT = ... # type: int ECOMM = ... # type: int EPROTO = ... # type: int EMULTIHOP = ... # type: int EDOTDOT = ... # type: int EBADMSG = ... # type: int EOVERFLOW = ... # type: int ENOTUNIQ = ... # type: int EBADFD = ... # type: int EREMCHG = ... # type: int ELIBACC = ... # type: int ELIBBAD = ... # type: int ELIBSCN = ... # type: int ELIBMAX = ... # type: int ELIBEXEC = ... # type: int EILSEQ = ... # type: int ERESTART = ... # type: int ESTRPIPE = ... # type: int EUSERS = ... # type: int ENOTSOCK = ... # type: int EDESTADDRREQ = ... # type: int EMSGSIZE = ... # type: int EPROTOTYPE = ... # type: int ENOPROTOOPT = ... # type: int EPROTONOSUPPORT = ... # type: int ESOCKTNOSUPPORT = ... # type: int EOPNOTSUPP = ... # type: int EPFNOSUPPORT = ... # type: int EAFNOSUPPORT = ... # type: int EADDRINUSE = ... # type: int EADDRNOTAVAIL = ... # type: int ENETDOWN = ... # type: int ENETUNREACH = ... # type: int ENETRESET = ... # type: int ECONNABORTED = ... # type: int ECONNRESET = ... # type: int ENOBUFS = ... # type: int EISCONN = ... # type: int ENOTCONN = ... # type: int ESHUTDOWN = ... # type: int ETOOMANYREFS = ... # type: int ETIMEDOUT = ... # type: int ECONNREFUSED = ... # type: int EHOSTDOWN = ... # type: int EHOSTUNREACH = ... # type: int EALREADY = ... # type: int EINPROGRESS = ... # type: int ESTALE = ... # type: int EUCLEAN = ... # type: int ENOTNAM = ... # type: int ENAVAIL = ... # type: int EISNAM = ... # type: int EREMOTEIO = ... # type: int EDQUOT = ... # type: int mypy-0.560/typeshed/stdlib/2and3/filecmp.pyi0000644€tŠÔÚ€2›s®0000000352113215007212025045 0ustar jukkaDROPBOX\Domain Users00000000000000# Stubs for filecmp (Python 2/3) import sys from typing import AnyStr, Callable, Dict, Generic, Iterable, List, Optional, Sequence, Tuple, Union, Text DEFAULT_IGNORES = ... # type: List[str] def cmp(f1: Union[bytes, Text], f2: Union[bytes, Text], shallow: Union[int, bool] = ...) -> bool: ... def cmpfiles(a: AnyStr, b: AnyStr, common: Iterable[AnyStr], shallow: Union[int, bool] = ...) -> Tuple[List[AnyStr], List[AnyStr], List[AnyStr]]: ... class dircmp(Generic[AnyStr]): def __init__(self, a: AnyStr, b: AnyStr, ignore: Optional[Sequence[AnyStr]] = ..., hide: Optional[Sequence[AnyStr]] = ...) -> None: ... left = ... # type: AnyStr right = ... # type: AnyStr hide = ... # type: Sequence[AnyStr] ignore = ... # type: Sequence[AnyStr] # These properties are created at runtime by __getattr__ subdirs = ... # type: Dict[AnyStr, dircmp[AnyStr]] same_files = ... # type: List[AnyStr] diff_files = ... # type: List[AnyStr] funny_files = ... # type: List[AnyStr] common_dirs = ... # type: List[AnyStr] common_files = ... # type: List[AnyStr] common_funny = ... # type: List[AnyStr] common = ... # type: List[AnyStr] left_only = ... # type: List[AnyStr] right_only = ... # type: List[AnyStr] left_list = ... # type: List[AnyStr] right_list = ... # type: List[AnyStr] def report(self) -> None: ... def report_partial_closure(self) -> None: ... def report_full_closure(self) -> None: ... methodmap = ... # type: Dict[str, Callable[[], None]] def phase0(self) -> None: ... def phase1(self) -> None: ... def phase2(self) -> None: ... def phase3(self) -> None: ... def phase4(self) -> None: ... def phase4_closure(self) -> None: ... if sys.version_info >= (3,): def clear_cache() -> None: ... mypy-0.560/typeshed/stdlib/2and3/fileinput.pyi0000644€tŠÔÚ€2›s®0000000366013215007212025431 0ustar jukkaDROPBOX\Domain Users00000000000000from typing import Iterable, Callable, IO, AnyStr, Generic, Any, Text, Union, Iterator, Optional import os import sys if sys.version_info >= (3, 6): _Path = Union[Text, bytes, os.PathLike[Any]] else: _Path = Union[Text, bytes] def input( files: Union[_Path, Iterable[_Path], None] = ..., inplace: bool = ..., backup: str = ..., bufsize: int = ..., mode: str = ..., openhook: Callable[[_Path, str], IO[AnyStr]] = ...) -> Iterable[AnyStr]: ... def close() -> None: ... def nextfile() -> None: ... def filename() -> str: ... def lineno() -> int: ... def filelineno() -> int: ... def fileno() -> int: ... def isfirstline() -> bool: ... def isstdin() -> bool: ... class FileInput(Iterable[AnyStr], Generic[AnyStr]): def __init__( self, files: Union[None, _Path, Iterable[_Path]] = ..., inplace: bool = ..., backup: str = ..., bufsize: int = ..., mode: str = ..., openhook: Callable[[_Path, str], IO[AnyStr]] = ... ) -> None: ... def __del__(self) -> None: ... def close(self) -> None: ... if sys.version_info >= (3, 2): def __enter__(self) -> FileInput[AnyStr]: ... def __exit__(self, type: Any, value: Any, traceback: Any) -> None: ... def __iter__(self) -> Iterator[AnyStr]: ... def __next__(self) -> AnyStr: ... def __getitem__(self, i: int) -> AnyStr: ... def nextfile(self) -> None: ... def readline(self) -> AnyStr: ... def filename(self) -> str: ... def lineno(self) -> int: ... def filelineno(self) -> int: ... def fileno(self) -> int: ... def isfirstline(self) -> bool: ... def isstdin(self) -> bool: ... def hook_compressed(filename: _Path, mode: str) -> IO[Any]: ... if sys.version_info >= (3, 6): def hook_encoded(encoding: str, errors: Optional[str] = ...) -> Callable[[_Path, str], IO[Any]]: ... else: def hook_encoded(encoding: str) -> Callable[[_Path, str], IO[Any]]: ... mypy-0.560/typeshed/stdlib/2and3/formatter.pyi0000644€tŠÔÚ€2›s®0000001140613215007212025432 0ustar jukkaDROPBOX\Domain Users00000000000000# Source: https://hg.python.org/cpython/file/2.7/Lib/formatter.py # and https://github.com/python/cpython/blob/master/Lib/formatter.py from typing import Any, IO, List, Optional, Tuple AS_IS = None _FontType = Tuple[str, bool, bool, bool] _StylesType = Tuple[Any, ...] class NullFormatter: writer = ... # type: Optional[NullWriter] def __init__(self, writer: Optional[NullWriter] = ...) -> None: ... def end_paragraph(self, blankline: int) -> None: ... def add_line_break(self) -> None: ... def add_hor_rule(self, *args, **kw) -> None: ... def add_label_data(self, format, counter: int, blankline: Optional[int] = ...) -> None: ... def add_flowing_data(self, data: str) -> None: ... def add_literal_data(self, data: str) -> None: ... def flush_softspace(self) -> None: ... def push_alignment(self, align: Optional[str]) -> None: ... def pop_alignment(self) -> None: ... def push_font(self, x: _FontType) -> None: ... def pop_font(self) -> None: ... def push_margin(self, margin: int) -> None: ... def pop_margin(self) -> None: ... def set_spacing(self, spacing: Optional[str]) -> None: ... def push_style(self, *styles: _StylesType) -> None: ... def pop_style(self, n: int = ...) -> None: ... def assert_line_data(self, flag: int = ...) -> None: ... class AbstractFormatter: writer = ... # type: NullWriter align = ... # type: Optional[str] align_stack = ... # type: List[Optional[str]] font_stack = ... # type: List[_FontType] margin_stack = ... # type: List[int] spacing = ... # type: Optional[str] style_stack = ... # type: Any nospace = ... # type: int softspace = ... # type: int para_end = ... # type: int parskip = ... # type: int hard_break = ... # type: int have_label = ... # type: int def __init__(self, writer: NullWriter) -> None: ... def end_paragraph(self, blankline: int) -> None: ... def add_line_break(self) -> None: ... def add_hor_rule(self, *args, **kw) -> None: ... def add_label_data(self, format, counter: int, blankline: Optional[int] = ...) -> None: ... def format_counter(self, format, counter: int) -> str: ... def format_letter(self, case: str, counter: int) -> str: ... def format_roman(self, case: str, counter: int) -> str: ... def add_flowing_data(self, data: str) -> None: ... def add_literal_data(self, data: str) -> None: ... def flush_softspace(self) -> None: ... def push_alignment(self, align: Optional[str]) -> None: ... def pop_alignment(self) -> None: ... def push_font(self, font: _FontType) -> None: ... def pop_font(self) -> None: ... def push_margin(self, margin: int) -> None: ... def pop_margin(self) -> None: ... def set_spacing(self, spacing: Optional[str]) -> None: ... def push_style(self, *styles: _StylesType) -> None: ... def pop_style(self, n: int = ...) -> None: ... def assert_line_data(self, flag: int = ...) -> None: ... class NullWriter: def __init__(self) -> None: ... def flush(self) -> None: ... def new_alignment(self, align: Optional[str]) -> None: ... def new_font(self, font: _FontType) -> None: ... def new_margin(self, margin: int, level: int) -> None: ... def new_spacing(self, spacing: Optional[str]) -> None: ... def new_styles(self, styles) -> None: ... def send_paragraph(self, blankline: int) -> None: ... def send_line_break(self) -> None: ... def send_hor_rule(self, *args, **kw) -> None: ... def send_label_data(self, data: str) -> None: ... def send_flowing_data(self, data: str) -> None: ... def send_literal_data(self, data: str) -> None: ... class AbstractWriter(NullWriter): def new_alignment(self, align: Optional[str]) -> None: ... def new_font(self, font: _FontType) -> None: ... def new_margin(self, margin: int, level: int) -> None: ... def new_spacing(self, spacing: Optional[str]) -> None: ... def new_styles(self, styles) -> None: ... def send_paragraph(self, blankline: int) -> None: ... def send_line_break(self) -> None: ... def send_hor_rule(self, *args, **kw) -> None: ... def send_label_data(self, data: str) -> None: ... def send_flowing_data(self, data: str) -> None: ... def send_literal_data(self, data: str) -> None: ... class DumbWriter(NullWriter): file = ... # type: IO maxcol = ... # type: int def __init__(self, file: Optional[IO] = ..., maxcol: int = ...) -> None: ... def reset(self) -> None: ... def send_paragraph(self, blankline: int) -> None: ... def send_line_break(self) -> None: ... def send_hor_rule(self, *args, **kw) -> None: ... def send_literal_data(self, data: str) -> None: ... def send_flowing_data(self, data: str) -> None: ... def test(file: Optional[str] = ...) -> None: ... mypy-0.560/typeshed/stdlib/2and3/fractions.pyi0000644€tŠÔÚ€2›s®0000000603113215007212025415 0ustar jukkaDROPBOX\Domain Users00000000000000# Stubs for fractions # See https://docs.python.org/3/library/fractions.html # # Note: these stubs are incomplete. The more complex type # signatures are currently omitted. Also see numbers.pyi. from typing import Optional, TypeVar, Union, overload from numbers import Real, Integral, Rational from decimal import Decimal import sys _ComparableNum = Union[int, float, Decimal, Real] @overload def gcd(a: int, b: int) -> int: ... @overload def gcd(a: Integral, b: int) -> Integral: ... @overload def gcd(a: int, b: Integral) -> Integral: ... @overload def gcd(a: Integral, b: Integral) -> Integral: ... class Fraction(Rational): @overload def __init__(self, numerator: Union[int, Rational] = ..., denominator: Optional[Union[int, Rational]] = ..., *, _normalize: bool = ...) -> None: ... @overload def __init__(self, value: float, *, _normalize: bool = ...) -> None: ... @overload def __init__(self, value: Decimal, *, _normalize: bool = ...) -> None: ... @overload def __init__(self, value: str, *, _normalize: bool = ...) -> None: ... @classmethod def from_float(cls, f: float) -> 'Fraction': ... @classmethod def from_decimal(cls, dec: Decimal) -> 'Fraction': ... def limit_denominator(self, max_denominator: int = ...) -> 'Fraction': ... @property def numerator(self) -> int: ... @property def denominator(self) -> int: ... def __add__(self, other): ... def __radd__(self, other): ... def __sub__(self, other): ... def __rsub__(self, other): ... def __mul__(self, other): ... def __rmul__(self, other): ... def __truediv__(self, other): ... def __rtruediv__(self, other): ... if sys.version_info < (3, 0): def __div__(self, other): ... def __rdiv__(self, other): ... def __floordiv__(self, other) -> int: ... def __rfloordiv__(self, other) -> int: ... def __mod__(self, other): ... def __rmod__(self, other): ... def __pow__(self, other): ... def __rpow__(self, other): ... def __pos__(self) -> 'Fraction': ... def __neg__(self) -> 'Fraction': ... def __abs__(self) -> 'Fraction': ... def __trunc__(self) -> int: ... if sys.version_info >= (3, 0): def __floor__(self) -> int: ... def __ceil__(self) -> int: ... def __round__(self, ndigits=None): ... def __hash__(self) -> int: ... def __eq__(self, other: object) -> bool: ... def __lt__(self, other: _ComparableNum) -> bool: ... def __gt__(self, other: _ComparableNum) -> bool: ... def __le__(self, other: _ComparableNum) -> bool: ... def __ge__(self, other: _ComparableNum) -> bool: ... if sys.version_info >= (3, 0): def __bool__(self) -> bool: ... else: def __nonzero__(self) -> bool: ... # Not actually defined within fractions.py, but provides more useful # overrides @property def real(self) -> 'Fraction': ... @property def imag(self) -> 'Fraction': ... def conjugate(self) -> 'Fraction': ... mypy-0.560/typeshed/stdlib/2and3/ftplib.pyi0000644€tŠÔÚ€2›s®0000001341213215007212024706 0ustar jukkaDROPBOX\Domain Users00000000000000# Stubs for ftplib (Python 2.7/3) import sys from typing import Optional, BinaryIO, Tuple, TextIO, Iterable, Callable, List, Union, Iterator, Dict, Text, TypeVar, Generic, Any from types import TracebackType from socket import socket from ssl import SSLContext _T = TypeVar('_T') _IntOrStr = Union[int, Text] MSG_OOB = ... # type: int FTP_PORT = ... # type: int MAXLINE = ... # type: int CRLF = ... # type: str if sys.version_info >= (3,): B_CRLF = ... # type: bytes class Error(Exception): ... class error_reply(Error): ... class error_temp(Error): ... class error_perm(Error): ... class error_proto(Error): ... all_errors = Tuple[Exception, ...] class FTP: debugging = ... # type: int # Note: This is technically the type that's passed in as the host argument. But to make it easier in Python 2 we # accept Text but return str. host = ... # type: str port = ... # type: int maxline = ... # type: int sock = ... # type: Optional[socket] welcome = ... # type: Optional[str] passiveserver = ... # type: int timeout = ... # type: int af = ... # type: int lastresp = ... # type: str if sys.version_info >= (3,): file = ... # type: Optional[TextIO] encoding = ... # type: str def __enter__(self: _T) -> _T: ... def __exit__(self, exc_type: Optional[type], exc_val: Optional[Exception], exc_tb: Optional[TracebackType]) -> bool: ... else: file = ... # type: Optional[BinaryIO] if sys.version_info >= (3, 3): source_address = ... # type: Optional[Tuple[str, int]] def __init__(self, host: Text = ..., user: Text = ..., passwd: Text = ..., acct: Text = ..., timeout: float = ..., source_address: Optional[Tuple[str, int]] = ...) -> None: ... def connect(self, host: Text = ..., port: int = ..., timeout: float = ..., source_address: Optional[Tuple[str, int]] = ...) -> str: ... else: def __init__(self, host: Text = ..., user: Text = ..., passwd: Text = ..., acct: Text = ..., timeout: float = ...) -> None: ... def connect(self, host: Text = ..., port: int = ..., timeout: float = ...) -> str: ... def getwelcome(self) -> str: ... def set_debuglevel(self, level: int) -> None: ... def debug(self, level: int) -> None: ... def set_pasv(self, val: Union[bool, int]) -> None: ... def sanitize(self, s: Text) -> str: ... def putline(self, line: Text) -> None: ... def putcmd(self, line: Text) -> None: ... def getline(self) -> str: ... def getmultiline(self) -> str: ... def getresp(self) -> str: ... def voidresp(self) -> str: ... def abort(self) -> str: ... def sendcmd(self, cmd: Text) -> str: ... def voidcmd(self, cmd: Text) -> str: ... def sendport(self, host: Text, port: int) -> str: ... def sendeprt(self, host: Text, port: int) -> str: ... def makeport(self) -> socket: ... def makepasv(self) -> Tuple[str, int]: ... def login(self, user: Text = ..., passwd: Text = ..., acct: Text = ...) -> str: ... # In practice, `rest` rest can actually be anything whose str() is an integer sequence, so to make it simple we allow integers. def ntransfercmd(self, cmd: Text, rest: Optional[_IntOrStr] = ...) -> Tuple[socket, int]: ... def transfercmd(self, cmd: Text, rest: Optional[_IntOrStr] = ...) -> socket: ... def retrbinary(self, cmd: Text, callback: Callable[[bytes], Any], blocksize: int = ..., rest: Optional[_IntOrStr] = ...) -> str: ... def storbinary(self, cmd: Text, fp: BinaryIO, blocksize: int = ..., callback: Optional[Callable[[bytes], Any]] = ..., rest: Optional[_IntOrStr] = ...) -> str: ... def retrlines(self, cmd: Text, callback: Optional[Callable[[str], Any]] = ...) -> str: ... def storlines(self, cmd: Text, fp: BinaryIO, callback: Optional[Callable[[bytes], Any]] = ...) -> str: ... def acct(self, password: Text) -> str: ... def nlst(self, *args: Text) -> List[str]: ... # Technically only the last arg can be a Callable but ... def dir(self, *args: Union[str, Callable[[str], None]]) -> None: ... if sys.version_info >= (3, 3): def mlsd(self, path: Text = ..., facts: Iterable[str] = ...) -> Iterator[Tuple[str, Dict[str, str]]]: ... def rename(self, fromname: Text, toname: Text) -> str: ... def delete(self, filename: Text) -> str: ... def cwd(self, dirname: Text) -> str: ... def size(self, filename: Text) -> str: ... def mkd(self, dirname: Text) -> str: ... def rmd(self, dirname: Text) -> str: ... def pwd(self) -> str: ... def quit(self) -> str: ... def close(self) -> None: ... class FTP_TLS(FTP): def __init__(self, host: Text = ..., user: Text = ..., passwd: Text = ..., acct: Text = ..., keyfile: Optional[str] = ..., certfile: Optional[str] = ..., context: Optional[SSLContext] = ..., timeout: float = ..., source_address: Optional[Tuple[str, int]] = ...) -> None: ... ssl_version = ... # type: int keyfile = ... # type: Optional[str] certfile = ... # type: Optional[str] context = ... # type: SSLContext def login(self, user: Text = ..., passwd: Text = ..., acct: Text = ..., secure: bool = ...) -> str: ... def auth(self) -> str: ... def prot_p(self) -> str: ... def prot_c(self) -> str: ... if sys.version_info >= (3, 3): def ccc(self) -> str: ... if sys.version_info < (3,): class Netrc: def __init__(self, filename: Optional[Text] = ...) -> None: ... def get_hosts(self) -> List[str]: ... def get_account(self, host: Text) -> Tuple[Optional[str], Optional[str], Optional[str]]: ... def get_macros(self) -> List[str]: ... def get_macro(self, macro: Text) -> Tuple[str, ...]: ... mypy-0.560/typeshed/stdlib/2and3/grp.pyi0000644€tŠÔÚ€2›s®0000000067013215007212024220 0ustar jukkaDROPBOX\Domain Users00000000000000from typing import List, NamedTuple, Optional struct_group = NamedTuple("struct_group", [("gr_name", str), ("gr_passwd", Optional[str]), ("gr_gid", int), ("gr_mem", List[str])]) def getgrall() -> List[struct_group]: ... def getgrgid(gid: int) -> struct_group: ... def getgrnam(name: str) -> struct_group: ... mypy-0.560/typeshed/stdlib/2and3/hmac.pyi0000644€tŠÔÚ€2›s®0000000206113215007212024334 0ustar jukkaDROPBOX\Domain Users00000000000000# Stubs for hmac from typing import Any, Callable, Optional, Union, overload from types import ModuleType import sys _B = Union[bytes, bytearray] # TODO more precise type for object of hashlib _Hash = Any digest_size: None if sys.version_info >= (3, 4): def new(key: _B, msg: Optional[_B] = ..., digestmod: Optional[Union[str, Callable[[], _Hash], ModuleType]] = ...) -> HMAC: ... else: def new(key: _B, msg: Optional[_B] = ..., digestmod: Optional[Union[Callable[[], _Hash], ModuleType]] = ...) -> HMAC: ... class HMAC: if sys.version_info >= (3,): digest_size = ... # type: int if sys.version_info >= (3, 4): block_size = ... # type: int name = ... # type: str def update(self, msg: _B) -> None: ... def digest(self) -> bytes: ... def hexdigest(self) -> str: ... def copy(self) -> HMAC: ... @overload def compare_digest(a: str, b: str) -> bool: ... @overload def compare_digest(a: bytes, b: bytes) -> bool: ... @overload def compare_digest(a: bytearray, b: bytearray) -> bool: ... mypy-0.560/typeshed/stdlib/2and3/imaplib.pyi0000644€tŠÔÚ€2›s®0000001337213215007212025050 0ustar jukkaDROPBOX\Domain Users00000000000000# Stubs for imaplib (Python 2) import imaplib import subprocess import sys import time from socket import socket as _socket from ssl import SSLSocket from typing import Any, Callable, Dict, IO, List, Optional, Pattern, Text, Tuple, Union CommandResults = Tuple[str, List[Any]] class IMAP4: error: Exception abort: Exception readonly: Exception mustquote: Pattern[Text] = ... debug: int = ... state: str = ... literal: Optional[Text] = ... tagged_commands: Dict[str, str] = ... untagged_responses: Dict[str, str] = ... continuation_response: str = ... is_readonly: bool = ... tagnum: int = ... tagpre: str = ... tagre: Pattern[Text] = ... welcome: bytes = ... capabilities: Tuple[str] = ... PROTOCOL_VERSION: str = ... def __init__(self, host: str, port: int) -> None: ... def __getattr__(self, attr: str) -> Any: ... host: str = ... port: int = ... sock: _socket = ... file: Union[IO[Text], IO[bytes]] = ... def open(self, host: str = ..., port: int = ...) -> None: ... def read(self, size: int) -> bytes: ... def readline(self) -> bytes: ... def send(self, data: bytes) -> None: ... def shutdown(self) -> None: ... def socket(self) -> _socket: ... def recent(self) -> CommandResults: ... def response(self, code: str) -> CommandResults: ... def append(self, mailbox: str, flags: str, date_time: str, message: str) -> str: ... def authenticate(self, mechanism: str, authobject: Callable) -> Tuple[str, str]: ... def capability(self) -> CommandResults: ... def check(self) -> CommandResults: ... def close(self) -> CommandResults: ... def copy(self, message_set: str, new_mailbox: str) -> CommandResults: ... def create(self, mailbox: str) -> CommandResults: ... def delete(self, mailbox: str) -> CommandResults: ... def deleteacl(self, mailbox: str, who: str) -> CommandResults: ... def expunge(self) -> CommandResults: ... def fetch(self, message_set: str, message_parts: str) -> CommandResults: ... def getacl(self, mailbox: str) -> CommandResults: ... def getannotation(self, mailbox: str, entry: str, attribute: str) -> CommandResults: ... def getquota(self, root: str) -> CommandResults: ... def getquotaroot(self, mailbox: str) -> CommandResults: ... def list(self, directory: str = ..., pattern: str = ...) -> CommandResults: ... def login(self, user: str, password: str) -> CommandResults: ... def login_cram_md5(self, user: str, password: str) -> CommandResults: ... def logout(self) -> CommandResults: ... def lsub(self, directory: str = ..., pattern: str = ...) -> CommandResults: ... def myrights(self, mailbox: str) -> CommandResults: ... def namespace(self) -> CommandResults: ... def noop(self) -> CommandResults: ... def partial(self, message_num: str, message_part: str, start: str, length: str) -> CommandResults: ... def proxyauth(self, user: str) -> CommandResults: ... def rename(self, oldmailbox: str, newmailbox: str) -> CommandResults: ... def search(self, charset: str, *criteria: str) -> CommandResults: ... def select(self, mailbox: str = ..., readonly: bool = ...) -> CommandResults: ... def setacl(self, mailbox: str, who: str, what: str) -> CommandResults: ... def setannotation(self, *args: List[str]) -> CommandResults: ... def setquota(self, root: str, limits: str) -> CommandResults: ... def sort(self, sort_criteria: str, charset: str, *search_criteria: List[str]) -> CommandResults: ... if sys.version_info >= (3,): def starttls(self, ssl_context: Optional[Any] = ...) -> CommandResults: ... def status(self, mailbox: str, names: str) -> CommandResults: ... def store(self, message_set: str, command: str, flags: str) -> CommandResults: ... def subscribe(self, mailbox: str) -> CommandResults: ... def thread(self, threading_algorithm: str, charset: str, *search_criteria: List[str]) -> CommandResults: ... def uid(self, command: str, *args: List[str]) -> CommandResults: ... def unsubscribe(self, mailbox: str) -> CommandResults: ... def xatom(self, name: str, *args: List[str]) -> CommandResults: ... def print_log(self) -> None: ... class IMAP4_SSL(IMAP4): keyfile: str = ... certfile: str = ... def __init__(self, host: str = ..., port: int = ..., keyfile: Optional[str] = ..., certfile: Optional[str] = ...) -> None: ... host: str = ... port: int = ... sock: _socket = ... sslobj: SSLSocket = ... file: IO[Any] = ... def open(self, host: str = ..., port: Optional[int] = ...) -> None: ... def read(self, size: int) -> bytes: ... def readline(self) -> bytes: ... def send(self, data: bytes) -> None: ... def shutdown(self) -> None: ... def socket(self) -> _socket: ... def ssl(self) -> SSLSocket: ... class IMAP4_stream(IMAP4): command: str = ... def __init__(self, command: str) -> None: ... host: str = ... port: int = ... sock: _socket = ... file: IO[Any] = ... process: subprocess.Popen = ... writefile: IO[Any] = ... readfile: IO[Any] = ... def open(self, host: str = ..., port: Optional[int] = ...) -> None: ... def read(self, size: int) -> bytes: ... def readline(self) -> bytes: ... def send(self, data: bytes) -> None: ... def shutdown(self) -> None: ... class _Authenticator: mech: Callable = ... def __init__(self, mechinst: Callable) -> None: ... def process(self, data: str) -> str: ... def encode(self, inp: bytes) -> str: ... def decode(self, inp: str) -> bytes: ... def Internaldate2tuple(resp: str) -> time.struct_time: ... def Int2AP(num: int) -> str: ... def ParseFlags(resp: str) -> Tuple[str]: ... def Time2Internaldate(date_time: Union[float, time.struct_time, str]) -> str: ... mypy-0.560/typeshed/stdlib/2and3/keyword.pyi0000644€tŠÔÚ€2›s®0000000022513215007212025110 0ustar jukkaDROPBOX\Domain Users00000000000000# Stubs for keyword from typing import Sequence, Text, Union def iskeyword(s: Union[Text, bytes]) -> bool: ... kwlist = ... # type: Sequence[str] mypy-0.560/typeshed/stdlib/2and3/lib2to3/0000755€tŠÔÚ€2›s®0000000000013215007244024165 5ustar jukkaDROPBOX\Domain Users00000000000000mypy-0.560/typeshed/stdlib/2and3/lib2to3/__init__.pyi0000644€tŠÔÚ€2›s®0000000004113215007212026435 0ustar jukkaDROPBOX\Domain Users00000000000000# Stubs for lib2to3 (Python 3.6) mypy-0.560/typeshed/stdlib/2and3/lib2to3/pgen2/0000755€tŠÔÚ€2›s®0000000000013215007244025200 5ustar jukkaDROPBOX\Domain Users00000000000000mypy-0.560/typeshed/stdlib/2and3/lib2to3/pgen2/__init__.pyi0000644€tŠÔÚ€2›s®0000000027013215007212027454 0ustar jukkaDROPBOX\Domain Users00000000000000# Stubs for lib2to3.pgen2 (Python 3.6) import os import sys from typing import Text, Union if sys.version_info >= (3, 6): _Path = Union[Text, os.PathLike] else: _Path = Text mypy-0.560/typeshed/stdlib/2and3/lib2to3/pgen2/driver.pyi0000644€tŠÔÚ€2›s®0000000203213215007212027206 0ustar jukkaDROPBOX\Domain Users00000000000000# Stubs for lib2to3.pgen2.driver (Python 3.6) import os import sys from typing import Any, Callable, IO, Iterable, List, Optional, Text, Tuple, Union from logging import Logger from lib2to3.pytree import _Convert, _NL from lib2to3.pgen2 import _Path from lib2to3.pgen2.grammar import Grammar class Driver: grammar: Grammar logger: Logger convert: _Convert def __init__(self, grammar: Grammar, convert: Optional[_Convert] = ..., logger: Optional[Logger] = ...) -> None: ... def parse_tokens(self, tokens: Iterable[Any], debug: bool = ...) -> _NL: ... def parse_stream_raw(self, stream: IO[Text], debug: bool = ...) -> _NL: ... def parse_stream(self, stream: IO[Text], debug: bool = ...) -> _NL: ... def parse_file(self, filename: _Path, encoding: Optional[Text] = ..., debug: bool = ...) -> _NL: ... def parse_string(self, text: Text, debug: bool = ...) -> _NL: ... def load_grammar(gt: Text = ..., gp: Optional[Text] = ..., save: bool = ..., force: bool = ..., logger: Optional[Logger] = ...) -> Grammar: ... mypy-0.560/typeshed/stdlib/2and3/lib2to3/pgen2/grammar.pyi0000644€tŠÔÚ€2›s®0000000142113215007212027342 0ustar jukkaDROPBOX\Domain Users00000000000000# Stubs for lib2to3.pgen2.grammar (Python 3.6) from lib2to3.pgen2 import _Path from typing import Any, Dict, List, Optional, Text, Tuple, TypeVar _P = TypeVar('_P') _Label = Tuple[int, Optional[Text]] _DFA = List[List[Tuple[int, int]]] _DFAS = Tuple[_DFA, Dict[int, int]] class Grammar: symbol2number: Dict[Text, int] number2symbol: Dict[int, Text] states: List[_DFA] dfas: Dict[int, _DFAS] labels: List[_Label] keywords: Dict[Text, int] tokens: Dict[int, int] symbol2label: Dict[Text, int] start: int def __init__(self) -> None: ... def dump(self, filename: _Path) -> None: ... def load(self, filename: _Path) -> None: ... def copy(self: _P) -> _P: ... def report(self) -> None: ... opmap_raw: Text opmap: Dict[Text, Text] mypy-0.560/typeshed/stdlib/2and3/lib2to3/pgen2/literals.pyi0000644€tŠÔÚ€2›s®0000000033013215007212027531 0ustar jukkaDROPBOX\Domain Users00000000000000# Stubs for lib2to3.pgen2.literals (Python 3.6) from typing import Dict, Match, Text simple_escapes: Dict[Text, Text] def escape(m: Match) -> Text: ... def evalString(s: Text) -> Text: ... def test() -> None: ... mypy-0.560/typeshed/stdlib/2and3/lib2to3/pgen2/parse.pyi0000644€tŠÔÚ€2›s®0000000221013215007212027023 0ustar jukkaDROPBOX\Domain Users00000000000000# Stubs for lib2to3.pgen2.parse (Python 3.6) from typing import Any, Dict, List, Optional, Sequence, Set, Text, Tuple from lib2to3.pgen2.grammar import Grammar, _DFAS from lib2to3.pytree import _NL, _Convert, _RawNode _Context = Sequence[Any] class ParseError(Exception): msg: Text type: int value: Optional[Text] context: _Context def __init__(self, msg: Text, type: int, value: Optional[Text], context: _Context) -> None: ... class Parser: grammar: Grammar convert: _Convert stack: List[Tuple[_DFAS, int, _RawNode]] rootnode: Optional[_NL] used_names: Set[Text] def __init__(self, grammar: Grammar, convert: Optional[_Convert] = ...) -> None: ... def setup(self, start: Optional[int] = ...) -> None: ... def addtoken(self, type: int, value: Optional[Text], context: _Context) -> bool: ... def classify(self, type: int, value: Optional[Text], context: _Context) -> int: ... def shift(self, type: int, value: Optional[Text], newstate: int, context: _Context) -> None: ... def push(self, type: int, newdfa: _DFAS, newstate: int, context: _Context) -> None: ... def pop(self) -> None: ... mypy-0.560/typeshed/stdlib/2and3/lib2to3/pgen2/pgen.pyi0000644€tŠÔÚ€2›s®0000000421013215007212026644 0ustar jukkaDROPBOX\Domain Users00000000000000# Stubs for lib2to3.pgen2.pgen (Python 3.6) from typing import Any, Dict, IO, Iterable, Iterator, List, Optional, Text, Tuple from mypy_extensions import NoReturn from lib2to3.pgen2 import _Path, grammar from lib2to3.pgen2.tokenize import _TokenInfo class PgenGrammar(grammar.Grammar): ... class ParserGenerator: filename: _Path stream: IO[Text] generator: Iterator[_TokenInfo] first: Dict[Text, Dict[Text, int]] def __init__(self, filename: _Path, stream: Optional[IO[Text]] = ...) -> None: ... def make_grammar(self) -> PgenGrammar: ... def make_first(self, c: PgenGrammar, name: Text) -> Dict[int, int]: ... def make_label(self, c: PgenGrammar, label: Text) -> int: ... def addfirstsets(self) -> None: ... def calcfirst(self, name: Text) -> None: ... def parse(self) -> Tuple[Dict[Text, List[DFAState]], Text]: ... def make_dfa(self, start: NFAState, finish: NFAState) -> List[DFAState]: ... def dump_nfa(self, name: Text, start: NFAState, finish: NFAState) -> List[DFAState]: ... def dump_dfa(self, name: Text, dfa: Iterable[DFAState]) -> None: ... def simplify_dfa(self, dfa: List[DFAState]) -> None: ... def parse_rhs(self) -> Tuple[NFAState, NFAState]: ... def parse_alt(self) -> Tuple[NFAState, NFAState]: ... def parse_item(self) -> Tuple[NFAState, NFAState]: ... def parse_atom(self) -> Tuple[NFAState, NFAState]: ... def expect(self, type: int, value: Optional[Any] = ...) -> Text: ... def gettoken(self) -> None: ... def raise_error(self, msg: str, *args: Any) -> NoReturn: ... class NFAState: arcs: List[Tuple[Optional[Text], NFAState]] def __init__(self) -> None: ... def addarc(self, next: NFAState, label: Optional[Text] = ...) -> None: ... class DFAState: nfaset: Dict[NFAState, Any] isfinal: bool arcs: Dict[Text, DFAState] def __init__(self, nfaset: Dict[NFAState, Any], final: NFAState) -> None: ... def addarc(self, next: DFAState, label: Text) -> None: ... def unifystate(self, old: DFAState, new: DFAState) -> None: ... def __eq__(self, other: Any) -> bool: ... def generate_grammar(filename: _Path = ...) -> PgenGrammar: ... mypy-0.560/typeshed/stdlib/2and3/lib2to3/pgen2/token.pyi0000644€tŠÔÚ€2›s®0000000212713215007212027040 0ustar jukkaDROPBOX\Domain Users00000000000000# Stubs for lib2to3.pgen2.token (Python 3.6) import sys from typing import Dict, Text ENDMARKER: int NAME: int NUMBER: int STRING: int NEWLINE: int INDENT: int DEDENT: int LPAR: int RPAR: int LSQB: int RSQB: int COLON: int COMMA: int SEMI: int PLUS: int MINUS: int STAR: int SLASH: int VBAR: int AMPER: int LESS: int GREATER: int EQUAL: int DOT: int PERCENT: int BACKQUOTE: int LBRACE: int RBRACE: int EQEQUAL: int NOTEQUAL: int LESSEQUAL: int GREATEREQUAL: int TILDE: int CIRCUMFLEX: int LEFTSHIFT: int RIGHTSHIFT: int DOUBLESTAR: int PLUSEQUAL: int MINEQUAL: int STAREQUAL: int SLASHEQUAL: int PERCENTEQUAL: int AMPEREQUAL: int VBAREQUAL: int CIRCUMFLEXEQUAL: int LEFTSHIFTEQUAL: int RIGHTSHIFTEQUAL: int DOUBLESTAREQUAL: int DOUBLESLASH: int DOUBLESLASHEQUAL: int OP: int COMMENT: int NL: int if sys.version_info >= (3,): RARROW: int if sys.version_info >= (3, 5): AT: int ATEQUAL: int AWAIT: int ASYNC: int ERRORTOKEN: int N_TOKENS: int NT_OFFSET: int tok_name: Dict[int, Text] def ISTERMINAL(x: int) -> bool: ... def ISNONTERMINAL(x: int) -> bool: ... def ISEOF(x: int) -> bool: ... mypy-0.560/typeshed/stdlib/2and3/lib2to3/pgen2/tokenize.pyi0000644€tŠÔÚ€2›s®0000000173413215007212027553 0ustar jukkaDROPBOX\Domain Users00000000000000# Stubs for lib2to3.pgen2.tokenize (Python 3.6) # NOTE: Only elements from __all__ are present. from typing import Callable, Iterable, Iterator, List, Text, Tuple from lib2to3.pgen2.token import * # noqa _Coord = Tuple[int, int] _TokenEater = Callable[[int, Text, _Coord, _Coord, Text], None] _TokenInfo = Tuple[int, Text, _Coord, _Coord, Text] class TokenError(Exception): ... class StopTokenizing(Exception): ... def tokenize(readline: Callable[[], Text], tokeneater: _TokenEater = ...) -> None: ... class Untokenizer: tokens: List[Text] prev_row: int prev_col: int def __init__(self) -> None: ... def add_whitespace(self, start: _Coord) -> None: ... def untokenize(self, iterable: Iterable[_TokenInfo]) -> Text: ... def compat(self, token: Tuple[int, Text], iterable: Iterable[_TokenInfo]) -> None: ... def untokenize(iterable: Iterable[_TokenInfo]) -> Text: ... def generate_tokens( readline: Callable[[], Text] ) -> Iterator[_TokenInfo]: ... mypy-0.560/typeshed/stdlib/2and3/lib2to3/pygram.pyi0000644€tŠÔÚ€2›s®0000000434013215007212026203 0ustar jukkaDROPBOX\Domain Users00000000000000# Stubs for lib2to3.pygram (Python 3.6) from typing import Any from lib2to3.pgen2.grammar import Grammar class Symbols: def __init__(self, grammar: Grammar) -> None: ... class python_symbols(Symbols): and_expr: int and_test: int annassign: int arglist: int argument: int arith_expr: int assert_stmt: int async_funcdef: int async_stmt: int atom: int augassign: int break_stmt: int classdef: int comp_for: int comp_if: int comp_iter: int comp_op: int comparison: int compound_stmt: int continue_stmt: int decorated: int decorator: int decorators: int del_stmt: int dictsetmaker: int dotted_as_name: int dotted_as_names: int dotted_name: int encoding_decl: int eval_input: int except_clause: int exec_stmt: int expr: int expr_stmt: int exprlist: int factor: int file_input: int flow_stmt: int for_stmt: int funcdef: int global_stmt: int if_stmt: int import_as_name: int import_as_names: int import_from: int import_name: int import_stmt: int lambdef: int listmaker: int not_test: int old_lambdef: int old_test: int or_test: int parameters: int pass_stmt: int power: int print_stmt: int raise_stmt: int return_stmt: int shift_expr: int simple_stmt: int single_input: int sliceop: int small_stmt: int star_expr: int stmt: int subscript: int subscriptlist: int suite: int term: int test: int testlist: int testlist1: int testlist_gexp: int testlist_safe: int testlist_star_expr: int tfpdef: int tfplist: int tname: int trailer: int try_stmt: int typedargslist: int varargslist: int vfpdef: int vfplist: int vname: int while_stmt: int with_item: int with_stmt: int with_var: int xor_expr: int yield_arg: int yield_expr: int yield_stmt: int class pattern_symbols(Symbols): Alternative: int Alternatives: int Details: int Matcher: int NegatedUnit: int Repeater: int Unit: int python_grammar: Grammar python_grammar_no_print_statement: Grammar pattern_grammar: Grammar mypy-0.560/typeshed/stdlib/2and3/lib2to3/pytree.pyi0000644€tŠÔÚ€2›s®0000000626613215007212026225 0ustar jukkaDROPBOX\Domain Users00000000000000# Stubs for lib2to3.pytree (Python 3.6) import sys from typing import Any, Callable, Dict, Iterator, List, Optional, Text, Tuple, TypeVar, Union from lib2to3.pgen2.grammar import Grammar _P = TypeVar('_P') _NL = Union[Node, Leaf] _Context = Tuple[Text, int, int] _Results = Dict[Text, _NL] _RawNode = Tuple[int, Text, _Context, Optional[List[_NL]]] _Convert = Callable[[Grammar, _RawNode], Any] HUGE: int def type_repr(type_num: int) -> Text: ... class Base: type: int parent: Optional[Node] prefix: Text children: List[_NL] was_changed: bool was_checked: bool def __eq__(self, other: Any) -> bool: ... def _eq(self: _P, other: _P) -> bool: ... def clone(self: _P) -> _P: ... def post_order(self) -> Iterator[_NL]: ... def pre_order(self) -> Iterator[_NL]: ... def replace(self, new: Union[_NL, List[_NL]]) -> None: ... def get_lineno(self) -> int: ... def changed(self) -> None: ... def remove(self) -> Optional[int]: ... @property def next_sibling(self) -> Optional[_NL]: ... @property def prev_sibling(self) -> Optional[_NL]: ... def leaves(self) -> Iterator[Leaf]: ... def depth(self) -> int: ... def get_suffix(self) -> Text: ... if sys.version_info < (3,): def get_prefix(self) -> Text: ... def set_prefix(self, prefix: Text) -> None: ... class Node(Base): fixers_applied: List[Any] def __init__(self, type: int, children: List[_NL], context: Optional[Any] = ..., prefix: Optional[Text] = ..., fixers_applied: Optional[List[Any]] = ...) -> None: ... def set_child(self, i: int, child: _NL) -> None: ... def insert_child(self, i: int, child: _NL) -> None: ... def append_child(self, child: _NL) -> None: ... class Leaf(Base): lineno: int column: int value: Text fixers_applied: List[Any] def __init__(self, type: int, value: Text, context: Optional[_Context] = ..., prefix: Optional[Text] = ..., fixers_applied: List[Any] = ...) -> None: ... def convert(gr: Grammar, raw_node: _RawNode) -> _NL: ... class BasePattern: type: int content: Optional[Text] name: Optional[Text] def optimize(self) -> BasePattern: ... # sic, subclasses are free to optimize themselves into different patterns def match(self, node: _NL, results: Optional[_Results] = ...) -> bool: ... def match_seq(self, nodes: List[_NL], results: Optional[_Results] = ...) -> bool: ... def generate_matches(self, nodes: List[_NL]) -> Iterator[Tuple[int, _Results]]: ... class LeafPattern(BasePattern): def __init__(self, type: Optional[int] = ..., content: Optional[Text] = ..., name: Optional[Text] = ...) -> None: ... class NodePattern(BasePattern): wildcards: bool def __init__(self, type: Optional[int] = ..., content: Optional[Text] = ..., name: Optional[Text] = ...) -> None: ... class WildcardPattern(BasePattern): min: int max: int def __init__(self, content: Optional[Text] = ..., min: int = ..., max: int = ..., name: Optional[Text] = ...) -> None: ... class NegatedPattern(BasePattern): def __init__(self, content: Optional[Text] = ...) -> None: ... def generate_matches(patterns: List[BasePattern], nodes: List[_NL]) -> Iterator[Tuple[int, _Results]]: ... mypy-0.560/typeshed/stdlib/2and3/linecache.pyi0000644€tŠÔÚ€2›s®0000000111113215007212025332 0ustar jukkaDROPBOX\Domain Users00000000000000import sys from typing import Any, Dict, List, Optional, Text _ModuleGlobals = Dict[str, Any] def getline(filename: Text, lineno: int, module_globals: Optional[_ModuleGlobals] = ...) -> str: ... def clearcache() -> None: ... def getlines(filename: Text, module_globals: Optional[_ModuleGlobals] = ...) -> None: ... def checkcache(filename: Optional[Text] = ...) -> None: ... def updatecache(filename: Text, module_globals: Optional[_ModuleGlobals] = ...) -> List[str]: ... if sys.version_info >= (3, 5): def lazycache(filename: Text, module_globals: _ModuleGlobals) -> bool: ... mypy-0.560/typeshed/stdlib/2and3/locale.pyi0000644€tŠÔÚ€2›s®0000000604513215007212024671 0ustar jukkaDROPBOX\Domain Users00000000000000# Stubs for locale from decimal import Decimal from typing import Any, Iterable, List, Mapping, Optional, Sequence, Tuple, Union import sys # workaround for mypy#2010 if sys.version_info < (3,): from __builtin__ import str as _str else: from builtins import str as _str CODESET = ... # type: int D_T_FMT = ... # type: int D_FMT = ... # type: int T_FMT = ... # type: int T_FMT_AMPM = ... # type: int DAY_1 = ... # type: int DAY_2 = ... # type: int DAY_3 = ... # type: int DAY_4 = ... # type: int DAY_5 = ... # type: int DAY_6 = ... # type: int DAY_7 = ... # type: int ABDAY_1 = ... # type: int ABDAY_2 = ... # type: int ABDAY_3 = ... # type: int ABDAY_4 = ... # type: int ABDAY_5 = ... # type: int ABDAY_6 = ... # type: int ABDAY_7 = ... # type: int MON_1 = ... # type: int MON_2 = ... # type: int MON_3 = ... # type: int MON_4 = ... # type: int MON_5 = ... # type: int MON_6 = ... # type: int MON_7 = ... # type: int MON_8 = ... # type: int MON_9 = ... # type: int MON_10 = ... # type: int MON_11 = ... # type: int MON_12 = ... # type: int ABMON_1 = ... # type: int ABMON_2 = ... # type: int ABMON_3 = ... # type: int ABMON_4 = ... # type: int ABMON_5 = ... # type: int ABMON_6 = ... # type: int ABMON_7 = ... # type: int ABMON_8 = ... # type: int ABMON_9 = ... # type: int ABMON_10 = ... # type: int ABMON_11 = ... # type: int ABMON_12 = ... # type: int RADIXCHAR = ... # type: int THOUSEP = ... # type: int YESEXPR = ... # type: int NOEXPR = ... # type: int CRNCYSTR = ... # type: int ERA = ... # type: int ERA_D_T_FMT = ... # type: int ERA_D_FMT = ... # type: int ERA_T_FMT = ... # type: int ALT_DIGITS = ... # type: int LC_CTYPE = ... # type: int LC_COLLATE = ... # type: int LC_TIME = ... # type: int LC_MONETARY = ... # type: int LC_MESSAGES = ... # type: int LC_NUMERIC = ... # type: int LC_ALL = ... # type: int CHAR_MAX = ... # type: int class Error(Exception): ... def setlocale(category: int, locale: Union[_str, Iterable[_str], None] = ...) -> _str: ... def localeconv() -> Mapping[_str, Union[int, _str, List[int]]]: ... def nl_langinfo(option: int) -> _str: ... def getdefaultlocale(envvars: Tuple[_str, ...] = ...) -> Tuple[Optional[_str], Optional[_str]]: ... def getlocale(category: int = ...) -> Sequence[_str]: ... def getpreferredencoding(do_setlocale: bool = ...) -> _str: ... def normalize(localename: _str) -> _str: ... def resetlocale(category: int = ...) -> None: ... def strcoll(string1: _str, string2: _str) -> int: ... def strxfrm(string: _str) -> _str: ... def format(format: _str, val: Union[float, Decimal], grouping: bool = ..., monetary: bool = ...) -> _str: ... def format_string(format: _str, val: Sequence[Any], grouping: bool = ...) -> _str: ... def currency(val: int, symbol: bool = ..., grouping: bool = ..., international: bool = ...) -> _str: ... if sys.version_info >= (3, 5): def delocalize(string: _str) -> None: ... def atof(string: _str) -> float: ... def atoi(string: _str) -> int: ... def str(float: float) -> _str: ... mypy-0.560/typeshed/stdlib/2and3/logging/0000755€tŠÔÚ€2›s®0000000000013215007244024335 5ustar jukkaDROPBOX\Domain Users00000000000000mypy-0.560/typeshed/stdlib/2and3/logging/__init__.pyi0000644€tŠÔÚ€2›s®0000004405013215007212026615 0ustar jukkaDROPBOX\Domain Users00000000000000# Stubs for logging (Python 3.4) from typing import ( Any, Callable, Dict, Iterable, List, Mapping, MutableMapping, Optional, IO, Tuple, Text, Union, overload, ) from string import Template from time import struct_time from types import TracebackType import sys import threading _SysExcInfoType = Union[Tuple[type, BaseException, TracebackType], Tuple[None, None, None]] if sys.version_info >= (3, 5): _ExcInfoType = Union[None, bool, _SysExcInfoType, BaseException] else: _ExcInfoType = Union[None, bool, _SysExcInfoType] _ArgsType = Union[Tuple[Any, ...], Dict[str, Any]] _FilterType = Union['Filter', Callable[['LogRecord'], int]] _Level = Union[int, Text] raiseExceptions: bool if sys.version_info >= (3,): _levelToName = ... # type: Dict[int, str] _nameToLevel = ... # type: Dict[str, int] else: _levelNames = ... # type: dict class Filterer(object): filters = ... # type: List[Filter] def __init__(self) -> None: ... def addFilter(self, filter: Filter) -> None: ... def removeFilter(self, filter: Filter) -> None: ... def filter(self, record: 'LogRecord') -> bool: ... class Logger(Filterer): name = ... # type: str level = ... # type: int parent = ... # type: Union[Logger, PlaceHolder] propagate = ... # type: bool handlers = ... # type: List[Handler] disabled = ... # type: int def __init__(self, name: str, level: _Level = ...) -> None: ... def setLevel(self, lvl: Union[int, str]) -> None: ... def isEnabledFor(self, lvl: int) -> bool: ... def getEffectiveLevel(self) -> int: ... def getChild(self, suffix: str) -> 'Logger': ... if sys.version_info >= (3,): def debug(self, msg: Text, *args: Any, exc_info: _ExcInfoType = ..., stack_info: bool = ..., extra: Optional[Dict[str, Any]] = ..., **kwargs: Any) -> None: ... def info(self, msg: Text, *args: Any, exc_info: _ExcInfoType = ..., stack_info: bool = ..., extra: Optional[Dict[str, Any]] = ..., **kwargs: Any) -> None: ... def warning(self, msg: Text, *args: Any, exc_info: _ExcInfoType = ..., stack_info: bool = ..., extra: Optional[Dict[str, Any]] = ..., **kwargs: Any) -> None: ... def warn(self, msg: Text, *args: Any, exc_info: _ExcInfoType = ..., stack_info: bool = ..., extra: Optional[Dict[str, Any]] = ..., **kwargs: Any) -> None: ... def error(self, msg: Text, *args: Any, exc_info: _ExcInfoType = ..., stack_info: bool = ..., extra: Optional[Dict[str, Any]] = ..., **kwargs: Any) -> None: ... def critical(self, msg: Text, *args: Any, exc_info: _ExcInfoType = ..., stack_info: bool = ..., extra: Optional[Dict[str, Any]] = ..., **kwargs: Any) -> None: ... def log(self, lvl: int, msg: Text, *args: Any, exc_info: _ExcInfoType = ..., stack_info: bool = ..., extra: Optional[Dict[str, Any]] = ..., **kwargs: Any) -> None: ... def exception(self, msg: Text, *args: Any, exc_info: _ExcInfoType = ..., stack_info: bool = ..., extra: Optional[Dict[str, Any]] = ..., **kwargs: Any) -> None: ... else: def debug(self, msg: Text, *args: Any, exc_info: _ExcInfoType = ..., extra: Optional[Dict[str, Any]] = ..., **kwargs: Any) -> None: ... def info(self, msg: Text, *args: Any, exc_info: _ExcInfoType = ..., extra: Optional[Dict[str, Any]] = ..., **kwargs: Any) -> None: ... def warning(self, msg: Text, *args: Any, exc_info: _ExcInfoType = ..., extra: Optional[Dict[str, Any]] = ..., **kwargs: Any) -> None: ... def warn(self, msg: Text, *args: Any, exc_info: _ExcInfoType = ..., extra: Optional[Dict[str, Any]] = ..., **kwargs: Any) -> None: ... def error(self, msg: Text, *args: Any, exc_info: _ExcInfoType = ..., extra: Optional[Dict[str, Any]] = ..., **kwargs: Any) -> None: ... def critical(self, msg: Text, *args: Any, exc_info: _ExcInfoType = ..., extra: Optional[Dict[str, Any]] = ..., **kwargs: Any) -> None: ... def log(self, lvl: int, msg: Text, *args: Any, exc_info: _ExcInfoType = ..., extra: Optional[Dict[str, Any]] = ..., **kwargs: Any) -> None: ... def exception(self, msg: Text, *args: Any, exc_info: _ExcInfoType = ..., extra: Optional[Dict[str, Any]] = ..., **kwargs: Any) -> None: ... def addFilter(self, filt: _FilterType) -> None: ... def removeFilter(self, filt: _FilterType) -> None: ... def filter(self, record: 'LogRecord') -> bool: ... def addHandler(self, hdlr: 'Handler') -> None: ... def removeHandler(self, hdlr: 'Handler') -> None: ... if sys.version_info >= (3,): def findCaller(self, stack_info: bool = ...) -> Tuple[str, int, str, Optional[str]]: ... else: def findCaller(self) -> Tuple[str, int, str]: ... def handle(self, record: 'LogRecord') -> None: ... if sys.version_info >= (3,): def makeRecord(self, name: str, lvl: int, fn: str, lno: int, msg: Text, args: Mapping[str, Any], exc_info: Optional[_SysExcInfoType], func: Optional[str] = ..., extra: Optional[Mapping[str, Any]] = ..., sinfo: Optional[str] = ...) -> LogRecord: ... else: def makeRecord(self, name: str, lvl: int, fn: str, lno: int, msg: Text, args: Mapping[str, Any], exc_info: Optional[_SysExcInfoType], func: Optional[str] = ..., extra: Optional[Mapping[str, Any]] = ...) -> LogRecord: ... if sys.version_info >= (3,): def hasHandlers(self) -> bool: ... CRITICAL = ... # type: int FATAL = ... # type: int ERROR = ... # type: int WARNING = ... # type: int WARN = ... # type: int INFO = ... # type: int DEBUG = ... # type: int NOTSET = ... # type: int class Handler(Filterer): level = ... # type: int formatter = ... # type: Optional[Formatter] lock = ... # type: Optional[threading.Lock] def __init__(self, level: _Level = ...) -> None: ... def createLock(self) -> None: ... def acquire(self) -> None: ... def release(self) -> None: ... def setLevel(self, lvl: Union[int, str]) -> None: ... def setFormatter(self, form: 'Formatter') -> None: ... def addFilter(self, filt: _FilterType) -> None: ... def removeFilter(self, filt: _FilterType) -> None: ... def filter(self, record: 'LogRecord') -> bool: ... def flush(self) -> None: ... def close(self) -> None: ... def handle(self, record: 'LogRecord') -> None: ... def handleError(self, record: 'LogRecord') -> None: ... def format(self, record: 'LogRecord') -> str: ... def emit(self, record: 'LogRecord') -> None: ... class Formatter: converter = ... # type: Callable[[Optional[float]], struct_time] _fmt = ... # type: Optional[str] datefmt = ... # type: Optional[str] if sys.version_info >= (3,): _style = ... # type: PercentStyle default_time_format = ... # type: str default_msec_format = ... # type: str if sys.version_info >= (3,): def __init__(self, fmt: Optional[str] = ..., datefmt: Optional[str] =..., style: str = ...) -> None: ... else: def __init__(self, fmt: Optional[str] = ..., datefmt: Optional[str] =...) -> None: ... def format(self, record: 'LogRecord') -> str: ... def formatTime(self, record: 'LogRecord', datefmt: str = ...) -> str: ... def formatException(self, exc_info: _SysExcInfoType) -> str: ... if sys.version_info >= (3,): def formatStack(self, stack_info: str) -> str: ... class Filter: def __init__(self, name: str = ...) -> None: ... def filter(self, record: 'LogRecord') -> int: ... class LogRecord: args = ... # type: _ArgsType asctime = ... # type: str created = ... # type: int exc_info = ... # type: Optional[_SysExcInfoType] filename = ... # type: str funcName = ... # type: str levelname = ... # type: str levelno = ... # type: int lineno = ... # type: int module = ... # type: str msecs = ... # type: int message = ... # type: str msg = ... # type: str name = ... # type: str pathname = ... # type: str process = ... # type: int processName = ... # type: str relativeCreated = ... # type: int if sys.version_info >= (3,): stack_info = ... # type: Optional[str] thread = ... # type: int threadName = ... # type: str if sys.version_info >= (3,): def __init__(self, name: str, level: int, pathname: str, lineno: int, msg: Text, args: _ArgsType, exc_info: Optional[_SysExcInfoType], func: Optional[str] = ..., sinfo: Optional[str] = ...) -> None: ... else: def __init__(self, name: str, level: int, pathname: str, lineno: int, msg: Text, args: _ArgsType, exc_info: Optional[_SysExcInfoType], func: Optional[str] = ...) -> None: ... def getMessage(self) -> str: ... class LoggerAdapter: def __init__(self, logger: Logger, extra: Mapping[str, Any]) -> None: ... def process(self, msg: Text, kwargs: MutableMapping[str, Any]) -> Tuple[str, MutableMapping[str, Any]]: ... if sys.version_info >= (3,): def debug(self, msg: Text, *args: Any, exc_info: _ExcInfoType = ..., stack_info: bool = ..., extra: Optional[Dict[str, Any]] = ..., **kwargs: Any) -> None: ... def info(self, msg: Text, *args: Any, exc_info: _ExcInfoType = ..., stack_info: bool = ..., extra: Optional[Dict[str, Any]] = ..., **kwargs: Any) -> None: ... def warning(self, msg: Text, *args: Any, exc_info: _ExcInfoType = ..., stack_info: bool = ..., extra: Optional[Dict[str, Any]] = ..., **kwargs: Any) -> None: ... def error(self, msg: Text, *args: Any, exc_info: _ExcInfoType = ..., stack_info: bool = ..., extra: Optional[Dict[str, Any]] = ..., **kwargs: Any) -> None: ... def exception(self, msg: Text, *args: Any, exc_info: _ExcInfoType = ..., stack_info: bool = ..., extra: Optional[Dict[str, Any]] = ..., **kwargs: Any) -> None: ... def critical(self, msg: Text, *args: Any, exc_info: _ExcInfoType = ..., stack_info: bool = ..., extra: Optional[Dict[str, Any]] = ..., **kwargs: Any) -> None: ... def log(self, lvl: int, msg: Text, *args: Any, exc_info: _ExcInfoType = ..., stack_info: bool = ..., extra: Optional[Dict[str, Any]] = ..., **kwargs: Any) -> None: ... else: def debug(self, msg: Text, *args: Any, exc_info: _ExcInfoType = ..., extra: Optional[Dict[str, Any]] = ..., **kwargs: Any) -> None: ... def info(self, msg: Text, *args: Any, exc_info: _ExcInfoType = ..., extra: Optional[Dict[str, Any]] = ..., **kwargs: Any) -> None: ... def warning(self, msg: Text, *args: Any, exc_info: _ExcInfoType = ..., extra: Optional[Dict[str, Any]] = ..., **kwargs: Any) -> None: ... def error(self, msg: Text, *args: Any, exc_info: _ExcInfoType = ..., extra: Optional[Dict[str, Any]] = ..., **kwargs: Any) -> None: ... def exception(self, msg: Text, *args: Any, exc_info: _ExcInfoType = ..., extra: Optional[Dict[str, Any]] = ..., **kwargs: Any) -> None: ... def critical(self, msg: Text, *args: Any, exc_info: _ExcInfoType = ..., extra: Optional[Dict[str, Any]] = ..., **kwargs: Any) -> None: ... def log(self, lvl: int, msg: Text, *args: Any, exc_info: _ExcInfoType = ..., extra: Optional[Dict[str, Any]] = ..., **kwargs: Any) -> None: ... def isEnabledFor(self, lvl: int) -> bool: ... if sys.version_info >= (3,): def getEffectiveLevel(self) -> int: ... def setLevel(self, lvl: Union[int, str]) -> None: ... def hasHandlers(self) -> bool: ... if sys.version_info >= (3,): def getLogger(name: Optional[str] = ...) -> Logger: ... else: @overload def getLogger() -> Logger: ... @overload def getLogger(name: Union[Text, str]) -> Logger: ... def getLoggerClass() -> type: ... if sys.version_info >= (3,): def getLogRecordFactory() -> Callable[..., LogRecord]: ... if sys.version_info >= (3,): def debug(msg: Text, *args: Any, exc_info: _ExcInfoType = ..., stack_info: bool = ..., extra: Optional[Dict[str, Any]] = ..., **kwargs: Any) -> None: ... def info(msg: Text, *args: Any, exc_info: _ExcInfoType = ..., stack_info: bool = ..., extra: Optional[Dict[str, Any]] = ..., **kwargs: Any) -> None: ... def warning(msg: Text, *args: Any, exc_info: _ExcInfoType = ..., stack_info: bool = ..., extra: Optional[Dict[str, Any]] = ..., **kwargs: Any) -> None: ... def warn(msg: Text, *args: Any, exc_info: _ExcInfoType = ..., stack_info: bool = ..., extra: Optional[Dict[str, Any]] = ..., **kwargs: Any) -> None: ... def error(msg: Text, *args: Any, exc_info: _ExcInfoType = ..., stack_info: bool = ..., extra: Optional[Dict[str, Any]] = ..., **kwargs: Any) -> None: ... def critical(msg: Text, *args: Any, exc_info: _ExcInfoType = ..., stack_info: bool = ..., extra: Optional[Dict[str, Any]] = ..., **kwargs: Any) -> None: ... def exception(msg: Text, *args: Any, exc_info: _ExcInfoType = ..., stack_info: bool = ..., extra: Optional[Dict[str, Any]] = ..., **kwargs: Any) -> None: ... def log(lvl: int, msg: Text, *args: Any, exc_info: _ExcInfoType = ..., stack_info: bool = ..., extra: Optional[Dict[str, Any]] = ..., **kwargs: Any) -> None: ... else: def debug(msg: Text, *args: Any, exc_info: _ExcInfoType = ..., extra: Optional[Dict[str, Any]] = ..., **kwargs: Any) -> None: ... def info(msg: Text, *args: Any, exc_info: _ExcInfoType = ..., extra: Optional[Dict[str, Any]] = ..., **kwargs: Any) -> None: ... def warning(msg: Text, *args: Any, exc_info: _ExcInfoType = ..., extra: Optional[Dict[str, Any]] = ..., **kwargs: Any) -> None: ... def warn(msg: Text, *args: Any, exc_info: _ExcInfoType = ..., extra: Optional[Dict[str, Any]] = ..., **kwargs: Any) -> None: ... def error(msg: Text, *args: Any, exc_info: _ExcInfoType = ..., extra: Optional[Dict[str, Any]] = ..., **kwargs: Any) -> None: ... def critical(msg: Text, *args: Any, exc_info: _ExcInfoType = ..., extra: Optional[Dict[str, Any]] = ..., **kwargs: Any) -> None: ... def exception(msg: Text, *args: Any, exc_info: _ExcInfoType = ..., extra: Optional[Dict[str, Any]] = ..., **kwargs: Any) -> None: ... def log(lvl: int, msg: Text, *args: Any, exc_info: _ExcInfoType = ..., extra: Optional[Dict[str, Any]] = ..., **kwargs: Any) -> None: ... fatal = critical def disable(lvl: int) -> None: ... def addLevelName(lvl: int, levelName: str) -> None: ... def getLevelName(lvl: int) -> str: ... def makeLogRecord(attrdict: Mapping[str, Any]) -> LogRecord: ... if sys.version_info >= (3,): def basicConfig(*, filename: str = ..., filemode: str = ..., format: str = ..., datefmt: str = ..., style: str = ..., level: _Level = ..., stream: IO[str] = ..., handlers: Iterable[Handler] = ...) -> None: ... else: @overload def basicConfig() -> None: ... @overload def basicConfig(*, filename: str = ..., filemode: str = ..., format: str = ..., datefmt: str = ..., level: _Level = ..., stream: IO[str] = ...) -> None: ... def shutdown() -> None: ... def setLoggerClass(klass: type) -> None: ... def captureWarnings(capture: bool) -> None: ... if sys.version_info >= (3,): def setLogRecordFactory(factory: Callable[..., LogRecord]) -> None: ... if sys.version_info >= (3,): lastResort = ... # type: Optional['StreamHandler'] class StreamHandler(Handler): stream = ... # type: IO[str] if sys.version_info >= (3,): terminator = ... # type: str def __init__(self, stream: Optional[IO[str]] = ...) -> None: ... class FileHandler(Handler): def __init__(self, filename: str, mode: str = ..., encoding: Optional[str] = ..., delay: bool = ...) -> None: ... class NullHandler(Handler): ... class PlaceHolder: def __init__(self, alogger: Logger) -> None: ... def append(self, alogger: Logger) -> None: ... # Below aren't in module docs but still visible class RootLogger(Logger): ... root = ... # type: RootLogger if sys.version_info >= (3,): class PercentStyle(object): default_format = ... # type: str asctime_format = ... # type: str asctime_search = ... # type: str _fmt = ... # type: str def __init__(self, fmt: str) -> None: ... def usesTime(self) -> bool: ... def format(self, record: Any) -> str: ... class StrFormatStyle(PercentStyle): ... class StringTemplateStyle(PercentStyle): _tpl = ... # type: Template BASIC_FORMAT = ... # type: str _STYLES = ... # type: Dict[str, Tuple[PercentStyle, str]] mypy-0.560/typeshed/stdlib/2and3/logging/config.pyi0000644€tŠÔÚ€2›s®0000000160313215007212026320 0ustar jukkaDROPBOX\Domain Users00000000000000# Stubs for logging.config (Python 3.4) from typing import Any, Callable, Dict, Optional, IO, Union import sys if sys.version_info >= (3,): from configparser import RawConfigParser else: from ConfigParser import RawConfigParser def dictConfig(config: Dict[str, Any]) -> None: ... if sys.version_info >= (3, 4): def fileConfig(fname: Union[str, IO[str], RawConfigParser], defaults: Optional[Dict[str, str]] = ..., disable_existing_loggers: bool = ...) -> None: ... def listen(port: int = ..., verify: Optional[Callable[[bytes], Optional[bytes]]] = ...) -> None: ... else: def fileConfig(fname: Union[str, IO[str]], defaults: Optional[Dict[str, str]] = ..., disable_existing_loggers: bool = ...) -> None: ... def listen(port: int = ...) -> None: ... def stopListening() -> None: ... mypy-0.560/typeshed/stdlib/2and3/logging/handlers.pyi0000644€tŠÔÚ€2›s®0000001736013215007212026662 0ustar jukkaDROPBOX\Domain Users00000000000000# Stubs for logging.handlers (Python 2.4) import datetime from logging import Handler, FileHandler, LogRecord from socket import SocketType import ssl import sys from typing import Any, Callable, Dict, List, Optional, Tuple, Union, overload if sys.version_info >= (3,): from queue import Queue else: from Queue import Queue # TODO update socket stubs to add SocketKind _SocketKind = int class WatchedFileHandler(Handler): @overload def __init__(self, filename: str) -> None: ... @overload def __init__(self, filename: str, mode: str) -> None: ... @overload def __init__(self, filename: str, mode: str, encoding: Optional[str]) -> None: ... @overload def __init__(self, filename: str, mode: str, encoding: Optional[str], delay: bool) -> None: ... if sys.version_info >= (3,): class BaseRotatingHandler(FileHandler): namer = ... # type: Optional[Callable[[str], str]] rotator = ... # type: Optional[Callable[[str, str], None]] def __init__(self, filename: str, mode: str, encoding: Optional[str] = ..., delay: bool = ...) -> None: ... def rotation_filename(self, default_name: str) -> None: ... def rotate(self, source: str, dest: str) -> None: ... if sys.version_info >= (3,): class RotatingFileHandler(BaseRotatingHandler): def __init__(self, filename: str, mode: str = ..., maxBytes: int = ..., backupCount: int = ..., encoding: Optional[str] = ..., delay: bool = ...) -> None: ... def doRollover(self) -> None: ... else: class RotatingFileHandler(Handler): def __init__(self, filename: str, mode: str = ..., maxBytes: int = ..., backupCount: int = ..., encoding: Optional[str] = ..., delay: bool = ...) -> None: ... def doRollover(self) -> None: ... if sys.version_info >= (3,): class TimedRotatingFileHandler(BaseRotatingHandler): if sys.version_info >= (3, 4): def __init__(self, filename: str, when: str = ..., interval: int = ..., backupCount: int = ..., encoding: Optional[str] = ..., delay: bool = ..., utc: bool = ..., atTime: Optional[datetime.datetime] = ...) -> None: ... else: def __init__(self, filename: str, when: str = ..., interval: int = ..., backupCount: int = ..., encoding: Optional[str] = ..., delay: bool = ..., utc: bool = ...) -> None: ... def doRollover(self) -> None: ... else: class TimedRotatingFileHandler: def __init__(self, filename: str, when: str = ..., interval: int = ..., backupCount: int = ..., encoding: Optional[str] = ..., delay: bool = ..., utc: bool = ...) -> None: ... def doRollover(self) -> None: ... class SocketHandler(Handler): retryStart = ... # type: float retryFactor = ... # type: float retryMax = ... # type: float if sys.version_info >= (3, 4): def __init__(self, host: str, port: Optional[int]) -> None: ... else: def __init__(self, host: str, port: int) -> None: ... def makeSocket(self) -> SocketType: ... def makePickle(self, record: LogRecord) -> bytes: ... def send(self, packet: bytes) -> None: ... def createSocket(self) -> None: ... class DatagramHandler(SocketHandler): ... class SysLogHandler(Handler): LOG_ALERT = ... # type: int LOG_CRIT = ... # type: int LOG_DEBUG = ... # type: int LOG_EMERG = ... # type: int LOG_ERR = ... # type: int LOG_INFO = ... # type: int LOG_NOTICE = ... # type: int LOG_WARNING = ... # type: int LOG_AUTH = ... # type: int LOG_AUTHPRIV = ... # type: int LOG_CRON = ... # type: int LOG_DAEMON = ... # type: int LOG_FTP = ... # type: int LOG_KERN = ... # type: int LOG_LPR = ... # type: int LOG_MAIL = ... # type: int LOG_NEWS = ... # type: int LOG_SYSLOG = ... # type: int LOG_USER = ... # type: int LOG_UUCP = ... # type: int LOG_LOCAL0 = ... # type: int LOG_LOCAL1 = ... # type: int LOG_LOCAL2 = ... # type: int LOG_LOCAL3 = ... # type: int LOG_LOCAL4 = ... # type: int LOG_LOCAL5 = ... # type: int LOG_LOCAL6 = ... # type: int LOG_LOCAL7 = ... # type: int def __init__(self, address: Union[Tuple[str, int], str] = ..., facility: int = ..., socktype: _SocketKind = ...) -> None: ... def encodePriority(self, facility: Union[int, str], priority: Union[int, str]) -> int: ... def mapPriority(self, levelName: int) -> str: ... class NTEventLogHandler(Handler): def __init__(self, appname: str, dllname: str = ..., logtype: str = ...) -> None: ... def getEventCategory(self, record: LogRecord) -> int: ... # TODO correct return value? def getEventType(self, record: LogRecord) -> int: ... def getMessageID(self, record: LogRecord) -> int: ... class SMTPHandler(Handler): # TODO `secure` can also be an empty tuple if sys.version_info >= (3,): def __init__(self, mailhost: Union[str, Tuple[str, int]], fromaddr: str, toaddrs: List[str], subject: str, credentials: Optional[Tuple[str, str]] = ..., secure: Union[Tuple[str], Tuple[str, str], None] =..., timeout: float = ...) -> None: ... else: def __init__(self, mailhost: Union[str, Tuple[str, int]], fromaddr: str, toaddrs: List[str], subject: str, credentials: Optional[Tuple[str, str]] = ..., secure: Union[Tuple[str], Tuple[str, str], None] =...) -> None: ... def getSubject(self, record: LogRecord) -> str: ... class BufferingHandler(Handler): def __init__(self, capacity: int) -> None: ... def shouldFlush(self, record: LogRecord) -> bool: ... class MemoryHandler(BufferingHandler): def __init__(self, capacity: int, flushLevel: int = ..., target: Optional[Handler] =...) -> None: ... def setTarget(self, target: Handler) -> None: ... class HTTPHandler(Handler): if sys.version_info >= (3, 5): def __init__(self, host: str, url: str, method: str = ..., secure: bool = ..., credentials: Optional[Tuple[str, str]] = ..., context: Optional[ssl.SSLContext] = ...) -> None: ... elif sys.version_info >= (3,): def __init__(self, host: str, url: str, method: str = ..., secure: bool = ..., credentials: Optional[Tuple[str, str]] = ...) -> None: ... else: def __init__(self, host: str, url: str, method: str = ...) -> None: ... def mapLogRecord(self, record: LogRecord) -> Dict[str, Any]: ... if sys.version_info >= (3,): class QueueHandler(Handler): def __init__(self, queue: Queue) -> None: ... def prepare(self, record: LogRecord) -> Any: ... def enqueue(self, record: LogRecord) -> None: ... class QueueListener: if sys.version_info >= (3, 5): def __init__(self, queue: Queue, *handlers: Handler, respect_handler_level: bool = ...) -> None: ... else: def __init__(self, queue: Queue, *handlers: Handler) -> None: ... def dequeue(self, block: bool) -> LogRecord: ... def prepare(self, record: LogRecord) -> Any: ... def start(self) -> None: ... def stop(self) -> None: ... def enqueue_sentinel(self) -> None: ... mypy-0.560/typeshed/stdlib/2and3/marshal.pyi0000644€tŠÔÚ€2›s®0000000037213215007212025056 0ustar jukkaDROPBOX\Domain Users00000000000000from typing import Any, IO version = ... # type: int def dump(value: Any, file: IO[Any], version: int = ...) -> None: ... def load(file: IO[Any]) -> Any: ... def dumps(value: Any, version: int = ...) -> str: ... def loads(string: str) -> Any: ... mypy-0.560/typeshed/stdlib/2and3/math.pyi0000644€tŠÔÚ€2›s®0000000420213215007212024354 0ustar jukkaDROPBOX\Domain Users00000000000000# Stubs for math # See: http://docs.python.org/2/library/math.html from typing import Tuple, Iterable, Optional import sys e = ... # type: float pi = ... # type: float if sys.version_info >= (3, 5): inf = ... # type: float nan = ... # type: float if sys.version_info >= (3, 6): tau = ... # type: float def acos(x: float) -> float: ... def acosh(x: float) -> float: ... def asin(x: float) -> float: ... def asinh(x: float) -> float: ... def atan(x: float) -> float: ... def atan2(y: float, x: float) -> float: ... def atanh(x: float) -> float: ... def ceil(x: float) -> int: ... def copysign(x: float, y: float) -> float: ... def cos(x: float) -> float: ... def cosh(x: float) -> float: ... def degrees(x: float) -> float: ... def erf(x: float) -> float: ... def erfc(x: float) -> float: ... def exp(x: float) -> float: ... def expm1(x: float) -> float: ... def fabs(x: float) -> float: ... def factorial(x: int) -> int: ... if sys.version_info >= (3,): def floor(x: float) -> int: ... else: def floor(x: float) -> float: ... def fmod(x: float, y: float) -> float: ... def frexp(x: float) -> Tuple[float, int]: ... def fsum(iterable: Iterable) -> float: ... def gamma(x: float) -> float: ... if sys.version_info >= (3, 5): def gcd(a: int, b: int) -> int: ... def hypot(x: float, y: float) -> float: ... if sys.version_info >= (3, 5): def isclose(a: float, b: float, rel_tol: float = ..., abs_tol: float = ...) -> bool: ... def isinf(x: float) -> bool: ... if sys.version_info >= (3,): def isfinite(x: float) -> bool: ... def isnan(x: float) -> bool: ... def ldexp(x: float, i: int) -> float: ... def lgamma(x: float) -> float: ... def log(x: float, base: float = ...) -> float: ... def log10(x: float) -> float: ... def log1p(x: float) -> float: ... if sys.version_info >= (3, 3): def log2(x: float) -> float: ... def modf(x: float) -> Tuple[float, float]: ... def pow(x: float, y: float) -> float: ... def radians(x: float) -> float: ... def sin(x: float) -> float: ... def sinh(x: float) -> float: ... def sqrt(x: float) -> float: ... def tan(x: float) -> float: ... def tanh(x: float) -> float: ... def trunc(x: float) -> int: ... mypy-0.560/typeshed/stdlib/2and3/mimetypes.pyi0000644€tŠÔÚ€2›s®0000000326313215007212025445 0ustar jukkaDROPBOX\Domain Users00000000000000# Stubs for mimetypes from typing import Dict, IO, List, Optional, Sequence, Text, Tuple import sys def guess_type(url: Text, strict: bool = ...) -> Tuple[Optional[str], Optional[str]]: ... def guess_all_extensions(type: str, strict: bool = ...) -> List[str]: ... def guess_extension(type: str, strict: bool = ...) -> Optional[str]: ... def init(files: Optional[Sequence[str]] = ...) -> None: ... def read_mime_types(filename: str) -> Optional[Dict[str, str]]: ... def add_type(type: str, ext: str, strict: bool = ...) -> None: ... inited = ... # type: bool knownfiles = ... # type: List[str] suffix_map = ... # type: Dict[str, str] encodings_map = ... # type: Dict[str, str] types_map = ... # type: Dict[str, str] common_types = ... # type: Dict[str, str] class MimeTypes: suffix_map = ... # type: Dict[str, str] encodings_map = ... # type: Dict[str, str] types_map = ... # type: Tuple[Dict[str, str], Dict[str, str]] types_map_inv = ... # type: Tuple[Dict[str, str], Dict[str, str]] def __init__(self, filenames: Tuple[str, ...] = ..., strict: bool = ...) -> None: ... def guess_extension(self, type: str, strict: bool = ...) -> Optional[str]: ... def guess_type(self, url: str, strict: bool = ...) -> Tuple[Optional[str], Optional[str]]: ... def guess_all_extensions(self, type: str, strict: bool = ...) -> List[str]: ... def read(self, filename: str, strict: bool = ...) -> None: ... def readfp(self, fp: IO[str], strict: bool = ...) -> None: ... if sys.platform == 'win32': def read_windows_registry(self, strict: bool = ...) -> None: ... mypy-0.560/typeshed/stdlib/2and3/mmap.pyi0000644€tŠÔÚ€2›s®0000000540713215007212024365 0ustar jukkaDROPBOX\Domain Users00000000000000import sys from typing import (Optional, Sequence, Union, Generic, overload, Iterable, Iterator, Sized, ContextManager, AnyStr) ACCESS_READ = ... # type: int ACCESS_WRITE = ... # type: int ACCESS_COPY = ... # type: int ALLOCATIONGRANULARITY = ... # type: int if sys.platform != 'win32': MAP_PRIVATE = ... # type: int MAP_SHARED = ... # type: int PROT_READ = ... # type: int PROT_WRITE = ... # type: int PAGESIZE = ... # type: int class _mmap(Generic[AnyStr]): if sys.platform == 'win32': def __init__(self, fileno: int, length: int, tagname: Optional[str] = ..., access: int = ..., offset: int = ...) -> None: ... else: def __init__(self, fileno: int, length: int, flags: int = ..., prot: int = ..., access: int = ..., offset: int = ...) -> None: ... def close(self) -> None: ... def find(self, sub: AnyStr, start: int = ..., end: int = ...) -> int: ... def flush(self, offset: int = ..., size: int = ...) -> int: ... def move(self, dest: int, src: int, count: int) -> None: ... def read(self, n: int = ...) -> AnyStr: ... def read_byte(self) -> AnyStr: ... def readline(self) -> AnyStr: ... def resize(self, newsize: int) -> None: ... def seek(self, pos: int, whence: int = ...) -> None: ... def size(self) -> int: ... def tell(self) -> int: ... def write(self, bytes: AnyStr) -> None: ... def write_byte(self, byte: AnyStr) -> None: ... def __len__(self) -> int: ... if sys.version_info >= (3,): class mmap(_mmap, ContextManager[mmap], Iterable[bytes], Sized): closed = ... # type: bool def rfind(self, sub: bytes, start: int = ..., stop: int = ...) -> int: ... @overload def __getitem__(self, index: int) -> int: ... @overload def __getitem__(self, index: slice) -> bytes: ... def __delitem__(self, index: Union[int, slice]) -> None: ... @overload def __setitem__(self, index: int, object: int) -> None: ... @overload def __setitem__(self, index: slice, object: bytes) -> None: ... # Doesn't actually exist, but the object is actually iterable because it has __getitem__ and # __len__, so we claim that there is also an __iter__ to help type checkers. def __iter__(self) -> Iterator[bytes]: ... else: class mmap(_mmap, Sequence[bytes]): def rfind(self, string: bytes, start: int = ..., stop: int = ...) -> int: ... def __getitem__(self, index: Union[int, slice]) -> bytes: ... def __delitem__(self, index: Union[int, slice]) -> None: ... def __setitem__(self, index: Union[int, slice], object: bytes) -> None: ... mypy-0.560/typeshed/stdlib/2and3/numbers.pyi0000644€tŠÔÚ€2›s®0000000755613215007212025115 0ustar jukkaDROPBOX\Domain Users00000000000000# Stubs for numbers (Python 3.5) # See https://docs.python.org/2.7/library/numbers.html # and https://docs.python.org/3/library/numbers.html # # Note: these stubs are incomplete. The more complex type # signatures are currently omitted. from typing import Any, Optional, TypeVar, SupportsFloat from abc import ABCMeta, abstractmethod import sys class Number(metaclass=ABCMeta): @abstractmethod def __hash__(self) -> int: ... class Complex(Number): @abstractmethod def __complex__(self) -> complex: ... if sys.version_info >= (3, 0): def __bool__(self) -> bool: ... else: def __nonzero__(self) -> bool: ... @property @abstractmethod def real(self): ... @property @abstractmethod def imag(self): ... @abstractmethod def __add__(self, other): ... @abstractmethod def __radd__(self, other): ... @abstractmethod def __neg__(self): ... @abstractmethod def __pos__(self): ... def __sub__(self, other): ... def __rsub__(self, other): ... @abstractmethod def __mul__(self, other): ... @abstractmethod def __rmul__(self, other): ... if sys.version_info < (3, 0): @abstractmethod def __div__(self, other): ... @abstractmethod def __rdiv__(self, other): ... @abstractmethod def __truediv__(self, other): ... @abstractmethod def __rtruediv__(self, other): ... @abstractmethod def __pow__(self, exponent): ... @abstractmethod def __rpow__(self, base): ... def __abs__(self): ... def conjugate(self): ... def __eq__(self, other: object) -> bool: ... if sys.version_info < (3, 0): def __ne__(self, other: object) -> bool: ... class Real(Complex, SupportsFloat): @abstractmethod def __float__(self) -> float: ... @abstractmethod def __trunc__(self) -> int: ... if sys.version_info >= (3, 0): @abstractmethod def __floor__(self) -> int: ... @abstractmethod def __ceil__(self) -> int: ... @abstractmethod def __round__(self, ndigits: Optional[int] = ...): ... def __divmod__(self, other): ... def __rdivmod__(self, other): ... @abstractmethod def __floordiv__(self, other): ... @abstractmethod def __rfloordiv__(self, other): ... @abstractmethod def __mod__(self, other): ... @abstractmethod def __rmod__(self, other): ... @abstractmethod def __lt__(self, other) -> bool: ... @abstractmethod def __le__(self, other) -> bool: ... def __complex__(self) -> complex: ... @property def real(self): ... @property def imag(self): ... def conjugate(self): ... class Rational(Real): @property @abstractmethod def numerator(self) -> int: ... @property @abstractmethod def denominator(self) -> int: ... def __float__(self) -> float: ... class Integral(Rational): if sys.version_info >= (3, 0): @abstractmethod def __int__(self) -> int: ... else: @abstractmethod def __long__(self) -> long: ... def __index__(self) -> int: ... @abstractmethod def __pow__(self, exponent, modulus=None): ... @abstractmethod def __lshift__(self, other): ... @abstractmethod def __rlshift__(self, other): ... @abstractmethod def __rshift__(self, other): ... @abstractmethod def __rrshift__(self, other): ... @abstractmethod def __and__(self, other): ... @abstractmethod def __rand__(self, other): ... @abstractmethod def __xor__(self, other): ... @abstractmethod def __rxor__(self, other): ... @abstractmethod def __or__(self, other): ... @abstractmethod def __ror__(self, other): ... @abstractmethod def __invert__(self): ... def __float__(self) -> float: ... @property def numerator(self) -> int: ... @property def denominator(self) -> int: ... mypy-0.560/typeshed/stdlib/2and3/opcode.pyi0000644€tŠÔÚ€2›s®0000000120513215007212024674 0ustar jukkaDROPBOX\Domain Users00000000000000from typing import List, Dict, Optional, Sequence import sys cmp_op = ... # type: Sequence[str] hasconst = ... # type: List[int] hasname = ... # type: List[int] hasjrel = ... # type: List[int] hasjabs = ... # type: List[int] haslocal = ... # type: List[int] hascompare = ... # type: List[int] hasfree = ... # type: List[int] opname = ... # type: List[str] opmap = ... # type: Dict[str, int] HAVE_ARGUMENT = ... # type: int EXTENDED_ARG = ... # type: int if sys.version_info >= (3, 4): def stack_effect(opcode: int, oparg: Optional[int] = ...) -> int: ... if sys.version_info >= (3, 6): hasnargs = ... # type: List[int] mypy-0.560/typeshed/stdlib/2and3/operator.pyi0000644€tŠÔÚ€2›s®0000001456113215007212025267 0ustar jukkaDROPBOX\Domain Users00000000000000# Stubs for operator from typing import ( Any, Callable, Container, Mapping, MutableMapping, MutableSequence, Sequence, SupportsAbs, Tuple, TypeVar, overload, ) import sys _T = TypeVar('_T') _K = TypeVar('_K') _V = TypeVar('_V') def lt(a: Any, b: Any) -> Any: ... def le(a: Any, b: Any) -> Any: ... def eq(a: Any, b: Any) -> Any: ... def ne(a: Any, b: Any) -> Any: ... def ge(a: Any, b: Any) -> Any: ... def gt(a: Any, b: Any) -> Any: ... def __lt__(a: Any, b: Any) -> Any: ... def __le__(a: Any, b: Any) -> Any: ... def __eq__(a: Any, b: Any) -> Any: ... def __ne__(a: Any, b: Any) -> Any: ... def __ge__(a: Any, b: Any) -> Any: ... def __gt__(a: Any, b: Any) -> Any: ... def not_(obj: Any) -> bool: ... def __not__(obj: Any) -> bool: ... def truth(x: Any) -> bool: ... def is_(a: Any, b: Any) -> bool: ... def is_not(a: Any, b: Any) -> bool: ... def abs(x: SupportsAbs) -> Any: ... def __abs__(a: SupportsAbs) -> Any: ... def add(a: Any, b: Any) -> Any: ... def __add__(a: Any, b: Any) -> Any: ... def and_(a: Any, b: Any) -> Any: ... def __and__(a: Any, b: Any) -> Any: ... if sys.version_info < (3, ): def div(a: Any, b: Any) -> Any: ... def __div__(a: Any, b: Any) -> Any: ... def floordiv(a: Any, b: Any) -> Any: ... def __floordiv__(a: Any, b: Any) -> Any: ... def index(a: Any) -> int: ... def __index__(a: Any) -> int: ... def inv(obj: Any) -> Any: ... def invert(obj: Any) -> Any: ... def __inv__(obj: Any) -> Any: ... def __invert__(obj: Any) -> Any: ... def lshift(a: Any, b: Any) -> Any: ... def __lshift__(a: Any, b: Any) -> Any: ... def mod(a: Any, b: Any) -> Any: ... def __mod__(a: Any, b: Any) -> Any: ... def mul(a: Any, b: Any) -> Any: ... def __mul__(a: Any, b: Any) -> Any: ... if sys.version_info >= (3, 5): def matmul(a: Any, b: Any) -> Any: ... def __matmul__(a: Any, b: Any) -> Any: ... def neg(obj: Any) -> Any: ... def __neg__(obj: Any) -> Any: ... def or_(a: Any, b: Any) -> Any: ... def __or__(a: Any, b: Any) -> Any: ... def pos(obj: Any) -> Any: ... def __pos__(obj: Any) -> Any: ... def pow(a: Any, b: Any) -> Any: ... def __pow__(a: Any, b: Any) -> Any: ... def rshift(a: Any, b: Any) -> Any: ... def __rshift__(a: Any, b: Any) -> Any: ... def sub(a: Any, b: Any) -> Any: ... def __sub__(a: Any, b: Any) -> Any: ... def truediv(a: Any, b: Any) -> Any: ... def __truediv__(a: Any, b: Any) -> Any: ... def xor(a: Any, b: Any) -> Any: ... def __xor__(a: Any, b: Any) -> Any: ... def concat(a: Sequence[_T], b: Sequence[_T]) -> Sequence[_T]: ... def __concat__(a: Sequence[_T], b: Sequence[_T]) -> Sequence[_T]: ... def contains(a: Container[Any], b: Any) -> bool: ... def __contains__(a: Container[Any], b: Any) -> bool: ... def countOf(a: Container[Any], b: Any) -> int: ... @overload def delitem(a: MutableSequence[_T], b: int) -> None: ... @overload def delitem(a: MutableMapping[_K, _V], b: _K) -> None: ... @overload def __delitem__(a: MutableSequence[_T], b: int) -> None: ... @overload def __delitem__(a: MutableMapping[_K, _V], b: _K) -> None: ... if sys.version_info < (3, ): def delslice(a: MutableSequence[Any], b: int, c: int) -> None: ... def __delslice__(a: MutableSequence[Any], b: int, c: int) -> None: ... @overload def getitem(a: Sequence[_T], b: int) -> _T: ... @overload def getitem(a: Mapping[_K, _V], b: _K) -> _V: ... @overload def __getitem__(a: Sequence[_T], b: int) -> _T: ... @overload def __getitem__(a: Mapping[_K, _V], b: _K) -> _V: ... if sys.version_info < (3, ): def getslice(a: Sequence[_T], b: int, c: int) -> Sequence[_T]: ... def __getslice__(a: Sequence[_T], b: int, c: int) -> Sequence[_T]: ... def indexOf(a: Sequence[_T], b: _T) -> int: ... if sys.version_info < (3, ): def repeat(a: Any, b: int) -> Any: ... def __repeat__(a: Any, b: int) -> Any: ... if sys.version_info < (3, ): def sequenceIncludes(a: Container[Any], b: Any) -> bool: ... @overload def setitem(a: MutableSequence[_T], b: int, c: _T) -> None: ... @overload def setitem(a: MutableMapping[_K, _V], b: _K, c: _V) -> None: ... @overload def __setitem__(a: MutableSequence[_T], b: int, c: _T) -> None: ... @overload def __setitem__(a: MutableMapping[_K, _V], b: _K, c: _V) -> None: ... if sys.version_info < (3, ): def setslice(a: MutableSequence[_T], b: int, c: int, v: Sequence[_T]) -> None: ... def __setslice__(a: MutableSequence[_T], b: int, c: int, v: Sequence[_T]) -> None: ... if sys.version_info >= (3, 4): def length_hint(obj: Any, default: int = ...) -> int: ... @overload def attrgetter(attr: str) -> Callable[[Any], Any]: ... @overload def attrgetter(*attrs: str) -> Callable[[Any], Tuple[Any, ...]]: ... @overload def itemgetter(item: Any) -> Callable[[Any], Any]: ... @overload def itemgetter(*items: Any) -> Callable[[Any], Tuple[Any, ...]]: ... def methodcaller(name: str, *args: Any, **kwargs: Any) -> Callable[..., Any]: ... def iadd(a: Any, b: Any) -> Any: ... def __iadd__(a: Any, b: Any) -> Any: ... def iand(a: Any, b: Any) -> Any: ... def __iand__(a: Any, b: Any) -> Any: ... def iconcat(a: Any, b: Any) -> Any: ... def __iconcat__(a: Any, b: Any) -> Any: ... if sys.version_info < (3, ): def idiv(a: Any, b: Any) -> Any: ... def __idiv__(a: Any, b: Any) -> Any: ... def ifloordiv(a: Any, b: Any) -> Any: ... def __ifloordiv__(a: Any, b: Any) -> Any: ... def ilshift(a: Any, b: Any) -> Any: ... def __ilshift__(a: Any, b: Any) -> Any: ... def imod(a: Any, b: Any) -> Any: ... def __imod__(a: Any, b: Any) -> Any: ... def imul(a: Any, b: Any) -> Any: ... def __imul__(a: Any, b: Any) -> Any: ... if sys.version_info >= (3, 5): def imatmul(a: Any, b: Any) -> Any: ... def __imatmul__(a: Any, b: Any) -> Any: ... def ior(a: Any, b: Any) -> Any: ... def __ior__(a: Any, b: Any) -> Any: ... def ipow(a: Any, b: Any) -> Any: ... def __ipow__(a: Any, b: Any) -> Any: ... if sys.version_info < (3, ): def irepeat(a: Any, b: int) -> Any: ... def __irepeat__(a: Any, b: int) -> Any: ... def irshift(a: Any, b: Any) -> Any: ... def __irshift__(a: Any, b: Any) -> Any: ... def isub(a: Any, b: Any) -> Any: ... def __isub__(a: Any, b: Any) -> Any: ... def itruediv(a: Any, b: Any) -> Any: ... def __itruediv__(a: Any, b: Any) -> Any: ... def ixor(a: Any, b: Any) -> Any: ... def __ixor__(a: Any, b: Any) -> Any: ... if sys.version_info < (3, ): def isCallable(x: Any) -> bool: ... def isMappingType(x: Any) -> bool: ... def isNumberType(x: Any) -> bool: ... def isSequenceType(x: Any) -> bool: ... mypy-0.560/typeshed/stdlib/2and3/optparse.pyi0000644€tŠÔÚ€2›s®0000002455213215007212025272 0ustar jukkaDROPBOX\Domain Users00000000000000# Generated by pytype, with only minor tweaks. Might be incomplete. import sys from typing import Any, Callable, Dict, IO, Iterable, List, Mapping, Optional, Sequence, Tuple, Union # See https://groups.google.com/forum/#!topic/python-ideas/gA1gdj3RZ5g if sys.version_info >= (3,): _Text = str else: _Text = Union[str, unicode] NO_DEFAULT = ... # type: Tuple[_Text, ...] SUPPRESS_HELP = ... # type: _Text SUPPRESS_USAGE = ... # type: _Text def check_builtin(option: Option, opt: Any, value: _Text) -> Any: ... def check_choice(option: Option, opt: Any, value: _Text) -> Any: ... if sys.version_info < (3,): def isbasestring(x: Any) -> bool: ... class OptParseError(Exception): msg = ... # type: _Text def __init__(self, msg: _Text) -> None: ... class BadOptionError(OptParseError): opt_str = ... # type: _Text def __init__(self, opt_str: _Text) -> None: ... class AmbiguousOptionError(BadOptionError): possibilities = ... # type: Iterable[_Text] def __init__(self, opt_str: _Text, possibilities: Sequence[_Text]) -> None: ... class OptionError(OptParseError): msg = ... # type: _Text option_id = ... # type: _Text def __init__(self, msg: _Text, option: Option) -> None: ... class OptionConflictError(OptionError): ... class OptionValueError(OptParseError): ... class HelpFormatter: NO_DEFAULT_VALUE = ... # type: _Text _long_opt_fmt = ... # type: _Text _short_opt_fmt = ... # type: _Text current_indent = ... # type: int default_tag = ... # type: _Text help_position = ... # type: Any help_width = ... # type: Any indent_increment = ... # type: int level = ... # type: int max_help_position = ... # type: int option_strings = ... # type: Dict[Option, _Text] parser = ... # type: OptionParser short_first = ... # type: Any width = ... # type: int def __init__(self, indent_increment: int, max_help_position: int, width: Optional[int], short_first: int) -> None: ... def _format__Text(self, _Text: _Text) -> _Text: ... def dedent(self) -> None: ... def expand_default(self, option: Option) -> _Text: ... def format_description(self, description: _Text) -> _Text: ... def format_epilog(self, epilog) -> _Text: ... def format_heading(self, heading: Any) -> _Text: ... def format_option(self, option: OptionParser) -> _Text: ... def format_option_strings(self, option: OptionParser) -> Any: ... def format_usage(self, usage: Any) -> _Text: ... def indent(self) -> None: ... def set_long_opt_delimiter(self, delim: _Text) -> None: ... def set_parser(self, parser: OptionParser) -> None: ... def set_short_opt_delimiter(self, delim: _Text) -> None: ... def store_option_strings(self, parser: OptionParser) -> None: ... class IndentedHelpFormatter(HelpFormatter): def __init__(self, indent_increment: int = ..., max_help_position: int = ..., width: Optional[int] = ..., short_first: int = ...) -> None: ... def format_heading(self, heading: _Text) -> _Text: ... def format_usage(self, usage: _Text) -> _Text: ... class TitledHelpFormatter(HelpFormatter): def __init__(self, indent_increment: int = ..., max_help_position: int = ..., width: Optional[int] = ..., short_first: int = ...) -> None: ... def format_heading(self, heading: _Text) -> _Text: ... def format_usage(self, usage: _Text) -> _Text: ... class Option: ACTIONS = ... # type: Tuple[_Text, ...] ALWAYS_TYPED_ACTIONS = ... # type: Tuple[_Text, ...] ATTRS = ... # type: List[_Text] CHECK_METHODS = ... # type: Optional[List[Callable]] CONST_ACTIONS = ... # type: Tuple[_Text, ...] STORE_ACTIONS = ... # type: Tuple[_Text, ...] TYPED_ACTIONS = ... # type: Tuple[_Text, ...] TYPES = ... # type: Tuple[_Text, ...] TYPE_CHECKER = ... # type: Dict[_Text, Callable] _long_opts = ... # type: List[_Text] _short_opts = ... # type: List[_Text] action = ... # type: _Text dest = ... # type: Any nargs = ... # type: int type = ... # type: Any def __init__(self, *opts, **attrs) -> None: ... def _check_action(self) -> None: ... def _check_callback(self) -> None: ... def _check_choice(self) -> None: ... def _check_const(self) -> None: ... def _check_dest(self) -> None: ... def _check_nargs(self) -> None: ... def _check_opt_strings(self, opts: Optional[_Text]) -> Any: ... def _check_type(self) -> None: ... def _set_attrs(self, attrs: Dict[_Text, Any]) -> None: ... def _set_opt_strings(self, opts: _Text) -> None: ... def check_value(self, opt: Any, value: Any) -> Any: ... def convert_value(self, opt: Any, value: Any) -> Any: ... def get_opt_string(self) -> _Text: ... def process(self, opt: Any, value: Any, values: Any, parser: OptionParser) -> int: ... def take_action(self, action: _Text, dest: _Text, opt: Any, value: Any, values: Any, parser: OptionParser) -> int: ... def takes_value(self) -> bool: ... make_option = Option class OptionContainer: _long_opt = ... # type: Dict[_Text, Option] _short_opt = ... # type: Dict[_Text, Option] conflict_handler = ... # type: _Text defaults = ... # type: Dict[_Text, Any] description = ... # type: Any option_class = ... # type: Any def __init__(self, option_class: Option, conflict_handler: Any, description: Any) -> None: ... def _check_conflict(self, option: Any) -> None: ... def _create_option_mappings(self) -> None: ... def _share_option_mappings(self, parser: OptionParser) -> None: ... def add_option(self, *args, **kwargs) -> Any: ... def add_options(self, option_list: Iterable[Option]) -> None: ... def destroy(self) -> None: ... def format_description(self, formatter: Optional[HelpFormatter]) -> Any: ... def format_help(self, formatter: Optional[HelpFormatter]) -> _Text: ... def format_option_help(self, formatter: Optional[HelpFormatter]) -> _Text: ... def get_description(self) -> Any: ... def get_option(self, opt_str: _Text) -> Optional[Option]: ... def has_option(self, opt_str: _Text) -> bool: ... def remove_option(self, opt_str: _Text) -> None: ... def set_conflict_handler(self, handler: Any) -> None: ... def set_description(self, description: Any) -> None: ... class OptionGroup(OptionContainer): option_list = ... # type: List[Option] parser = ... # type: OptionParser title = ... # type: _Text def __init__(self, parser: OptionParser, title: _Text, description: Optional[_Text] = ...) -> None: ... def _create_option_list(self) -> None: ... def set_title(self, title: _Text) -> None: ... class OptionParser(OptionContainer): allow_interspersed_args = ... # type: bool epilog = ... # type: Any formatter = ... # type: HelpFormatter largs = ... # type: Optional[List[_Text]] option_groups = ... # type: List[OptionParser] option_list = ... # type: List[Any] process_default_values = ... # type: Any prog = ... # type: Any rargs = ... # type: Optional[List[Any]] standard_option_list = ... # type: List usage = ... # type: Optional[_Text] values = ... # type: Any version = ... # type: _Text def __init__(self, usage: Optional[_Text] = ..., option_list: Iterable[Option] = ..., option_class: Option = ..., version: Optional[_Text] = ..., conflict_handler: _Text = ..., description: Optional[_Text] = ..., formatter: Optional[HelpFormatter] = ..., add_help_option: bool = ..., prog: Optional[_Text] = ..., epilog: Optional[_Text] = ...) -> None: ... def _add_help_option(self) -> None: ... def _add_version_option(self) -> None: ... def _create_option_list(self) -> None: ... def _get_all_options(self) -> List[Any]: ... def _get_args(self, args: Iterable) -> List[Any]: ... def _init_parsing_state(self) -> None: ... def _match_long_opt(self, opt: _Text) -> _Text: ... def _populate_option_list(self, option_list: Iterable[Option], add_help: bool = ...) -> None: ... def _process_args(self, largs: List, rargs: List, values: Values) -> None: ... def _process_long_opt(self, rargs: List, values: Any) -> None: ... def _process_short_opts(self, rargs: List, values: Any) -> None: ... def add_option_group(self, *args, **kwargs) -> OptionParser: ... def check_values(self, values: Any, args) -> Tuple[Any, ...]: ... def disable_interspersed_args(self) -> None: ... def enable_interspersed_args(self) -> None: ... def error(self, msg: _Text) -> None: ... def exit(self, status: int = ..., msg: Optional[str] = ...) -> None: ... def expand_prog_name(self, s: Optional[_Text]) -> Any: ... def format_epilog(self, formatter: HelpFormatter) -> Any: ... def format_help(self, formatter: Optional[HelpFormatter] = ...) -> _Text: ... def format_option_help(self, formatter: Optional[HelpFormatter] = ...) -> _Text: ... def get_default_values(self) -> Values: ... def get_option_group(self, opt_str: _Text) -> Any: ... def get_prog_name(self) -> _Text: ... def get_usage(self) -> _Text: ... def get_version(self) -> _Text: ... def parse_args(self, args: Optional[Sequence[_Text]] = ..., values: Optional[Values] = ...) -> Tuple[Any, ...]: ... def print_usage(self, file: Optional[IO[str]] = ...) -> None: ... def print_help(self, file: Optional[IO[str]] = ...) -> None: ... def print_version(self, file: Optional[IO[str]] = ...) -> None: ... def set_default(self, dest: Any, value: Any) -> None: ... def set_defaults(self, **kwargs) -> None: ... def set_process_default_values(self, process: Any) -> None: ... def set_usage(self, usage: _Text) -> None: ... class Values: def __init__(self, defaults: Optional[Mapping[str, Any]] = ...) -> None: ... def _update(self, dict: Dict[_Text, Any], mode: Any) -> None: ... def _update_careful(self, dict: Dict[_Text, Any]) -> None: ... def _update_loose(self, dict: Dict[_Text, Any]) -> None: ... def ensure_value(self, attr: Any, value: Any) -> Any: ... def read_file(self, filename: _Text, mode: _Text) -> None: ... def read_module(self, modname: _Text, mode: _Text) -> None: ... mypy-0.560/typeshed/stdlib/2and3/pdb.pyi0000644€tŠÔÚ€2›s®0000000125313215007212024173 0ustar jukkaDROPBOX\Domain Users00000000000000# NOTE: This stub is incomplete - only contains some global functions from typing import Any, Dict, Optional def run(statement: str, globals: Optional[Dict[str, Any]] = ..., locals: Optional[Dict[str, Any]] = ...) -> None: ... def runeval(expression: str, globals: Optional[Dict[str, Any]] = ..., locals: Optional[Dict[str, Any]] = ...) -> Any: ... def runctx(statement: str, globals: Dict[str, Any], locals: Dict[str, Any]) -> None: ... def runcall(*args: Any, **kwds: Any) -> Any: ... def set_trace() -> None: ... def post_mortem(t: Optional[Any] = ...) -> None: ... def pm() -> None: ... mypy-0.560/typeshed/stdlib/2and3/pickle.pyi0000644€tŠÔÚ€2›s®0000000675613215007212024712 0ustar jukkaDROPBOX\Domain Users00000000000000import sys from typing import Any, IO, Mapping, Union, Tuple, Callable, Optional, Iterator HIGHEST_PROTOCOL: int if sys.version_info >= (3, 0): DEFAULT_PROTOCOL: int if sys.version_info >= (3, 0): def dump(obj: Any, file: IO[bytes], protocol: Optional[int] = ..., *, fix_imports: bool = ...) -> None: ... def dumps(obj: Any, protocol: Optional[int] = ..., *, fix_imports: bool = ...) -> bytes: ... def loads(bytes_object: bytes, *, fix_imports: bool = ..., encoding: str = ..., errors: str = ...) -> Any: ... def load(file: IO[bytes], *, fix_imports: bool = ..., encoding: str = ..., errors: str = ...) -> Any: ... else: def dump(obj: Any, file: IO[bytes], protocol: Optional[int] = ...) -> None: ... def dumps(obj: Any, protocol: Optional[int] = ...) -> bytes: ... def load(file: IO[bytes]) -> Any: ... def loads(string: bytes) -> Any: ... class PickleError(Exception): ... class PicklingError(PickleError): ... class UnpicklingError(PickleError): ... _reducedtype = Union[str, Tuple[Callable[..., Any], Tuple], Tuple[Callable[..., Any], Tuple, Any], Tuple[Callable[..., Any], Tuple, Any, Optional[Iterator]], Tuple[Callable[..., Any], Tuple, Any, Optional[Iterator], Optional[Iterator]]] class Pickler: fast: bool if sys.version_info >= (3, 3): dispatch_table: Mapping[type, Callable[[Any], _reducedtype]] if sys.version_info >= (3, 0): def __init__(self, file: IO[bytes], protocol: Optional[int] = ..., *, fix_imports: bool = ...) -> None: ... else: def __init__(self, file: IO[bytes], protocol: Optional[int] = ...) -> None: ... def dump(self, obj: Any) -> None: ... def clear_memo(self) -> None: ... def persistent_id(self, obj: Any) -> Any: ... class Unpickler: if sys.version_info >= (3, 0): def __init__(self, file: IO[bytes], *, fix_imports: bool = ..., encoding: str = ..., errors: str = ...) -> None: ... else: def __init__(self, file: IO[bytes]) -> None: ... def load(self) -> Any: ... def find_class(self, module: str, name: str) -> Any: ... if sys.version_info >= (3, 0): def persistent_load(self, pid: Any) -> Any: ... MARK: bytes STOP: bytes POP: bytes POP_MARK: bytes DUP: bytes FLOAT: bytes INT: bytes BININT: bytes BININT1: bytes LONG: bytes BININT2: bytes NONE: bytes PERSID: bytes BINPERSID: bytes REDUCE: bytes STRING: bytes BINSTRING: bytes SHORT_BINSTRING: bytes UNICODE: bytes BINUNICODE: bytes APPEND: bytes BUILD: bytes GLOBAL: bytes DICT: bytes EMPTY_DICT: bytes APPENDS: bytes GET: bytes BINGET: bytes INST: bytes LONG_BINGET: bytes LIST: bytes EMPTY_LIST: bytes OBJ: bytes PUT: bytes BINPUT: bytes LONG_BINPUT: bytes SETITEM: bytes TUPLE: bytes EMPTY_TUPLE: bytes SETITEMS: bytes BINFLOAT: bytes TRUE: bytes FALSE: bytes # protocol 2 PROTO: bytes NEWOBJ: bytes EXT1: bytes EXT2: bytes EXT4: bytes TUPLE1: bytes TUPLE2: bytes TUPLE3: bytes NEWTRUE: bytes NEWFALSE: bytes LONG1: bytes LONG4: bytes if sys.version_info >= (3, 0): # protocol 3 BINBYTES: bytes SHORT_BINBYTES: bytes if sys.version_info >= (3, 4): # protocol 4 SHORT_BINUNICODE: bytes BINUNICODE8: bytes BINBYTES8: bytes EMPTY_SET: bytes ADDITEMS: bytes FROZENSET: bytes NEWOBJ_EX: bytes STACK_GLOBAL: bytes MEMOIZE: bytes FRAME: bytes mypy-0.560/typeshed/stdlib/2and3/pickletools.pyi0000644€tŠÔÚ€2›s®0000001045313215007212025760 0ustar jukkaDROPBOX\Domain Users00000000000000# Stubs for pickletools (Python 2 and 3) import sys from typing import Any, Callable, IO, Iterator, List, MutableMapping, Optional, Text, Tuple, Type, Union _Reader = Callable[[IO[bytes]], Any] if sys.version_info >= (3, 0): bytes_types: Tuple[Type[Any], ...] UP_TO_NEWLINE: int TAKEN_FROM_ARGUMENT1: int TAKEN_FROM_ARGUMENT4: int if sys.version_info >= (3, 3): TAKEN_FROM_ARGUMENT4U: int if sys.version_info >= (3, 4): TAKEN_FROM_ARGUMENT8U: int class ArgumentDescriptor(object): name: str n: int reader: _Reader doc: str def __init__(self, name: str, n: int, reader: _Reader, doc: str) -> None: ... def read_uint1(f: IO[bytes]) -> int: ... uint1: ArgumentDescriptor def read_uint2(f: IO[bytes]) -> int: ... uint2: ArgumentDescriptor def read_int4(f: IO[bytes]) -> int: ... int4: ArgumentDescriptor if sys.version_info >= (3, 3): def read_uint4(f: IO[bytes]) -> int: ... uint4: ArgumentDescriptor if sys.version_info >= (3, 5): def read_uint8(f: IO[bytes]) -> int: ... uint8: ArgumentDescriptor def read_stringnl(f: IO[bytes], decode: bool = ..., stripquotes: bool = ...) -> Union[bytes, Text]: ... stringnl: ArgumentDescriptor def read_stringnl_noescape(f: IO[bytes]) -> str: ... stringnl_noescape: ArgumentDescriptor def read_stringnl_noescape_pair(f: IO[bytes]) -> Text: ... stringnl_noescape_pair: ArgumentDescriptor def read_string1(f: IO[bytes]) -> str: ... string1: ArgumentDescriptor def read_string4(f: IO[bytes]) -> str: ... string4: ArgumentDescriptor if sys.version_info >= (3, 3): def read_bytes1(f: IO[bytes]) -> bytes: ... bytes1: ArgumentDescriptor def read_bytes4(f: IO[bytes]) -> bytes: ... bytes4: ArgumentDescriptor if sys.version_info >= (3, 4): def read_bytes8(f: IO[bytes]) -> bytes: ... bytes8: ArgumentDescriptor def read_unicodestringnl(f: IO[bytes]) -> Text: ... unicodestringnl: ArgumentDescriptor if sys.version_info >= (3, 4): def read_unicodestring1(f: IO[bytes]) -> Text: ... unicodestring1: ArgumentDescriptor def read_unicodestring4(f: IO[bytes]) -> Text: ... unicodestring4: ArgumentDescriptor if sys.version_info >= (3, 4): def read_unicodestring8(f: IO[bytes]) -> Text: ... unicodestring8: ArgumentDescriptor def read_decimalnl_short(f: IO[bytes]) -> int: ... def read_decimalnl_long(f: IO[bytes]) -> int: ... decimalnl_short: ArgumentDescriptor decimalnl_long: ArgumentDescriptor def read_floatnl(f: IO[bytes]) -> float: ... floatnl: ArgumentDescriptor def read_float8(f: IO[bytes]) -> float: ... float8: ArgumentDescriptor def read_long1(f: IO[bytes]) -> int: ... long1: ArgumentDescriptor def read_long4(f: IO[bytes]) -> int: ... long4: ArgumentDescriptor class StackObject(object): name: str obtype: Union[Type[Any], Tuple[Type[Any], ...]] doc: str def __init__(self, name: str, obtype: Union[Type[Any], Tuple[Type[Any], ...]], doc: str) -> None: ... pyint: StackObject pylong: StackObject pyinteger_or_bool: StackObject pybool: StackObject pyfloat: StackObject if sys.version_info >= (3, 4): pybytes_or_str: StackObject pystring: StackObject if sys.version_info >= (3, 0): pybytes: StackObject pyunicode: StackObject pynone: StackObject pytuple: StackObject pylist: StackObject pydict: StackObject if sys.version_info >= (3, 4): pyset: StackObject pyfrozenset: StackObject anyobject: StackObject markobject: StackObject stackslice: StackObject class OpcodeInfo(object): name: str code: str arg: Optional[ArgumentDescriptor] stack_before: List[StackObject] stack_after: List[StackObject] proto: int doc: str def __init__(self, name: str, code: str, arg: Optional[ArgumentDescriptor], stack_before: List[StackObject], stack_after: List[StackObject], proto: int, doc: str) -> None: ... opcodes: List[OpcodeInfo] def genops(pickle: Union[bytes, IO[bytes]]) -> Iterator[Tuple[OpcodeInfo, Optional[Any], Optional[int]]]: ... def optimize(p: Union[bytes, IO[bytes]]) -> bytes: ... if sys.version_info >= (3, 2): def dis(pickle: Union[bytes, IO[bytes]], out: Optional[IO[str]] = ..., memo: Optional[MutableMapping[int, Any]] = ..., indentlevel: int = ..., annotate: int = ...) -> None: ... else: def dis(pickle: Union[bytes, IO[bytes]], out: Optional[IO[str]] = ..., memo: Optional[MutableMapping[int, Any]] = ..., indentlevel: int = ...) -> None: ... mypy-0.560/typeshed/stdlib/2and3/pkgutil.pyi0000644€tŠÔÚ€2›s®0000000220613215007212025104 0ustar jukkaDROPBOX\Domain Users00000000000000# Stubs for pkgutil from typing import Any, Callable, Generator, IO, Iterable, List, Optional, Tuple import sys if sys.version_info >= (3,): from importlib.abc import Loader else: Loader = Any _YMFNI = Generator[Tuple[Any, str, bool], None, None] def extend_path(path: Iterable[str], name: str) -> Iterable[str]: ... class ImpImporter: def __init__(self, dirname: Optional[str] = ...) -> None: ... class ImpLoader: def __init__(self, fullname: str, file: IO[str], filename: str, etc: Tuple[str, str, int]) -> None: ... def find_loader(fullname: str) -> Loader: ... def get_importer(path_item: str) -> Any: ... # TODO precise type def get_loader(module_or_name: str) -> Loader: ... def iter_importers(fullname: str = ...) -> Generator[Any, None, None]: ... # TODO precise type def iter_modules(path: Optional[List[str]] = ..., prefix: str = ...) -> _YMFNI: ... # TODO precise type def walk_packages(path: Optional[List[str]] = ..., prefix: str = ..., onerror: Optional[Callable[[str], None]] = ...) -> _YMFNI: ... def get_data(package: str, resource: str) -> Optional[bytes]: ... mypy-0.560/typeshed/stdlib/2and3/plistlib.pyi0000644€tŠÔÚ€2›s®0000000433413215007212025253 0ustar jukkaDROPBOX\Domain Users00000000000000# Stubs for plistlib from typing import ( Any, IO, Mapping, MutableMapping, Optional, Union, Type, TypeVar, ) from typing import Dict as DictT import sys if sys.version_info >= (3,): from enum import Enum class PlistFormat(Enum): FMT_XML = ... # type: PlistFormat FMT_BINARY = ... # type: PlistFormat FMT_XML = PlistFormat.FMT_XML FMT_BINARY = PlistFormat.FMT_BINARY mm = MutableMapping[str, Any] _D = TypeVar('_D', bound=mm) if sys.version_info >= (3,): _Path = str else: _Path = Union[str, unicode] if sys.version_info >= (3, 4): def load(fp: IO[bytes], *, fmt: Optional[PlistFormat] = ..., use_builtin_types: bool = ..., dict_type: Type[_D] = ...) -> _D: ... def loads(data: bytes, *, fmt: Optional[PlistFormat] = ..., use_builtin_types: bool = ..., dict_type: Type[_D] = ...) -> _D: ... def dump(value: Mapping[str, Any], fp: IO[bytes], *, fmt: PlistFormat =..., sort_keys: bool = ..., skipkeys: bool = ...) -> None: ... def dumps(value: Mapping[str, Any], *, fmt: PlistFormat = ..., skipkeys: bool = ..., sort_keys: bool = ...) -> bytes: ... def readPlist(pathOrFile: Union[_Path, IO[bytes]]) -> DictT[str, Any]: ... def writePlist(value: Mapping[str, Any], pathOrFile: Union[_Path, IO[bytes]]) -> None: ... def readPlistFromBytes(data: bytes) -> DictT[str, Any]: ... def writePlistToBytes(value: Mapping[str, Any]) -> bytes: ... if sys.version_info < (3,): def readPlistFromResource(path: _Path, restype: str = ..., resid: int = ...) -> DictT[str, Any]: ... def writePlistToResource(rootObject: Mapping[str, Any], path: _Path, restype: str = ..., resid: int = ...) -> None: ... def readPlistFromString(data: str) -> DictT[str, Any]: ... def writePlistToString(rootObject: Mapping[str, Any]) -> str: ... if sys.version_info >= (3,): class Dict(dict): def __getattr__(self, attr: str) -> Any: ... def __setattr__(self, attr: str, value: Any) -> None: ... def __delattr__(self, attr: str) -> None: ... class Data: data = ... # type: bytes def __init__(self, data: bytes) -> None: ... mypy-0.560/typeshed/stdlib/2and3/poplib.pyi0000644€tŠÔÚ€2›s®0000000471313215007212024717 0ustar jukkaDROPBOX\Domain Users00000000000000# Stubs for poplib (Python 2 and 3) from mypy_extensions import NoReturn import socket import ssl import sys from typing import Any, BinaryIO, Dict, List, Optional, overload, Pattern, Text, Tuple _LongResp = Tuple[bytes, List[bytes], int] class error_proto(Exception): pass POP3_PORT: int POP3_SSL_PORT: int CR: bytes LF: bytes CRLF: bytes class POP3: if sys.version_info >= (3, 0): encoding: Text host: Text port: int sock: socket.socket file: BinaryIO welcome: bytes def __init__(self, host: Text, port: int = ..., timeout: float = ...) -> None: ... def getwelcome(self) -> bytes: ... def set_debuglevel(self, level: int) -> None: ... def user(self, user: Text) -> bytes: ... def pass_(self, pswd: Text) -> bytes: ... def stat(self) -> Tuple[int, int]: ... def list(self, which: Optional[Any] = ...) -> _LongResp: ... def retr(self, which: Any) -> _LongResp: ... def dele(self, which: Any) -> bytes: ... def noop(self) -> bytes: ... def rset(self) -> bytes: ... def quit(self) -> bytes: ... def close(self) -> None: ... def rpop(self, user: Text) -> bytes: ... timestamp: Pattern[Text] if sys.version_info < (3, 0): def apop(self, user: Text, secret: Text) -> bytes: ... else: def apop(self, user: Text, password: Text) -> bytes: ... def top(self, which: Any, howmuch: int) -> _LongResp: ... @overload def uidl(self) -> _LongResp: ... @overload def uidl(self, which: Any) -> bytes: ... if sys.version_info >= (3, 5): def utf8(self) -> bytes: ... if sys.version_info >= (3, 4): def capa(self) -> Dict[Text, List[Text]]: ... def stls(self, context: Optional[ssl.SSLContext] = ...) -> bytes: ... class POP3_SSL(POP3): if sys.version_info >= (3, 0): def __init__(self, host: Text, port: int = ..., keyfile: Optional[Text] = ..., certfile: Optional[Text] = ..., timeout: float = ..., context: Optional[ssl.SSLContext] = ...) -> None: ... else: def __init__(self, host: Text, port: int = ..., keyfile: Optional[Text] = ..., certfile: Optional[Text] = ..., timeout: float = ...) -> None: ... if sys.version_info >= (3, 4): # "context" is actually the last argument, but that breaks LSP and it doesn't really matter because all the arguments are ignored def stls(self, context: Any = ..., keyfile: Any = ..., certfile: Any = ...) -> bytes: ... mypy-0.560/typeshed/stdlib/2and3/pprint.pyi0000644€tŠÔÚ€2›s®0000000173413215007212024746 0ustar jukkaDROPBOX\Domain Users00000000000000# Stubs for pprint # Based on http://docs.python.org/2/library/pprint.html # Based on http://docs.python.org/3/library/pprint.html from typing import Any, Dict, Tuple, IO def pformat(o: object, indent: int = ..., width: int = ..., depth: int = ...) -> str: ... def pprint(o: object, stream: IO[str] = ..., indent: int = ..., width: int = ..., depth: int = ...) -> None: ... def isreadable(o: object) -> bool: ... def isrecursive(o: object) -> bool: ... def saferepr(o: object) -> str: ... class PrettyPrinter: def __init__(self, indent: int = ..., width: int = ..., depth: int = ..., stream: IO[str] = ...) -> None: ... def pformat(self, o: object) -> str: ... def pprint(self, o: object) -> None: ... def isreadable(self, o: object) -> bool: ... def isrecursive(self, o: object) -> bool: ... def format(self, o: object, context: Dict[int, Any], maxlevels: int, level: int) -> Tuple[str, bool, bool]: ... mypy-0.560/typeshed/stdlib/2and3/profile.pyi0000644€tŠÔÚ€2›s®0000000211613215007212025065 0ustar jukkaDROPBOX\Domain Users00000000000000from typing import Any, Callable, Dict, Optional, TypeVar def run(statement: str, filename: Optional[str] = ..., sort: int = ...) -> None: ... def runctx(statement: str, globals: Dict[str, Any], locals: Dict[str, Any], filename: Optional[str] = ..., sort: int = ...) -> None: ... _SelfT = TypeVar('_SelfT', bound='Profile') _T = TypeVar('_T') class Profile: def __init__(self, timer: Optional[Callable[[], float]] = ..., bias: Optional[int] = ...) -> None: ... def set_cmd(self, cmd: str) -> None: ... def simulate_call(self, name: str) -> None: ... def simulate_cmd_complete(self) -> None: ... def print_stats(self, sort: int = ...) -> None: ... def dump_stats(self, file: str) -> None: ... def create_stats(self) -> None: ... def snapshot_stats(self) -> None: ... def run(self: _SelfT, cmd: str) -> _SelfT: ... def runctx(self: _SelfT, cmd: str, globals: Dict[str, Any], locals: Dict[str, Any]) -> _SelfT: ... def runcall(self, func: Callable[..., _T], *args: Any, **kw: Any) -> _T: ... def calibrate(self, m: int, verbose: int = ...) -> float: ... mypy-0.560/typeshed/stdlib/2and3/pstats.pyi0000644€tŠÔÚ€2›s®0000000365613215007212024755 0ustar jukkaDROPBOX\Domain Users00000000000000from profile import Profile from cProfile import Profile as _cProfile import os import sys from typing import Any, Dict, IO, Iterable, List, Text, Tuple, TypeVar, Union, overload _Selector = Union[str, float, int] _T = TypeVar('_T', bound='Stats') if sys.version_info >= (3, 6): _Path = Union[bytes, Text, os.PathLike[Any]] else: _Path = Union[bytes, Text] class Stats: def __init__(self: _T, __arg: Union[None, str, Text, Profile, _cProfile] = ..., *args: Union[None, str, Text, Profile, _cProfile, _T], stream: IO[Any] = ...) -> None: ... def init(self, arg: Union[None, str, Text, Profile, _cProfile]) -> None: ... def load_stats(self, arg: Union[None, str, Text, Profile, _cProfile]) -> None: ... def get_top_level_stats(self) -> None: ... def add(self: _T, *arg_list: Union[None, str, Text, Profile, _cProfile, _T]) -> _T: ... def dump_stats(self, filename: _Path) -> None: ... def get_sort_arg_defs(self) -> Dict[str, Tuple[Tuple[Tuple[int, int], ...], str]]: ... @overload def sort_stats(self: _T, field: int) -> _T: ... @overload def sort_stats(self: _T, *field: str) -> _T: ... def reverse_order(self: _T) -> _T: ... def strip_dirs(self: _T) -> _T: ... def calc_callees(self) -> None: ... def eval_print_amount(self, sel: _Selector, list: List[str], msg: str) -> Tuple[List[str], str]: ... def get_print_list(self, sel_list: Iterable[_Selector]) -> Tuple[int, List[str]]: ... def print_stats(self: _T, *amount: _Selector) -> _T: ... def print_callees(self: _T, *amount: _Selector) -> _T: ... def print_callers(self: _T, *amount: _Selector) -> _T: ... def print_call_heading(self, name_size: int, column_title: str) -> None: ... def print_call_line(self, name_size: int, source: str, call_dict: Dict[str, Any], arrow: str = ...) -> None: ... def print_title(self) -> None: ... def print_line(self, func: str) -> None: ... mypy-0.560/typeshed/stdlib/2and3/pty.pyi0000644€tŠÔÚ€2›s®0000000115713215007212024245 0ustar jukkaDROPBOX\Domain Users00000000000000# Stubs for pty (Python 2 and 3) import sys from typing import Callable, Iterable, Tuple, Union _Reader = Callable[[int], bytes] STDIN_FILENO: int STDOUT_FILENO: int STDERR_FILENO: int CHILD: int def openpty() -> Tuple[int, int]: ... def master_open() -> Tuple[int, str]: ... def slave_open(tty_name: str) -> int: ... def fork() -> Tuple[int, int]: ... if sys.version_info >= (3, 4): def spawn(argv: Union[str, Iterable[str]], master_read: _Reader = ..., stdin_read: _Reader = ...) -> int: ... else: def spawn(argv: Union[str, Iterable[str]], master_read: _Reader = ..., stdin_read: _Reader = ...) -> None: ... mypy-0.560/typeshed/stdlib/2and3/pwd.pyi0000644€tŠÔÚ€2›s®0000000114713215007212024222 0ustar jukkaDROPBOX\Domain Users00000000000000from typing import List, NamedTuple struct_passwd = NamedTuple("struct_passwd", [("pw_name", str), ("pw_passwd", str), ("pw_uid", int), ("pw_gid", int), ("pw_gecos", str), ("pw_dir", str), ("pw_shell", str)]) def getpwall() -> List[struct_passwd]: ... def getpwuid(uid: int) -> struct_passwd: ... def getpwnam(name: str) -> struct_passwd: ... mypy-0.560/typeshed/stdlib/2and3/py_compile.pyi0000644€tŠÔÚ€2›s®0000000144413215007212025570 0ustar jukkaDROPBOX\Domain Users00000000000000# Stubs for py_compile (Python 2 and 3) import sys from typing import Optional, List, Text, AnyStr, Union _EitherStr = Union[bytes, Text] class PyCompileError(Exception): exc_type_name = ... # type: str exc_value = ... # type: BaseException file = ... # type: str msg = ... # type: str def __init__(self, exc_type: str, exc_value: BaseException, file: str, msg: str = ...) -> None: ... if sys.version_info >= (3, 2): def compile(file: AnyStr, cfile: Optional[AnyStr] = ..., dfile: Optional[AnyStr] = ..., doraise: bool = ..., optimize: int = ...) -> Optional[AnyStr]: ... else: def compile(file: _EitherStr, cfile: Optional[_EitherStr] = ..., dfile: Optional[_EitherStr] = ..., doraise: bool = ...) -> None: ... def main(args: Optional[List[Text]] = ...) -> int: ... mypy-0.560/typeshed/stdlib/2and3/pyclbr.pyi0000644€tŠÔÚ€2›s®0000000206313215007212024721 0ustar jukkaDROPBOX\Domain Users00000000000000from typing import List, Union, Sequence, Optional, Dict class Class: module = ... # type: str name = ... # type: str super = ... # type: Optional[List[Union["Class", str]]] methods = ... # type: Dict[str, int] file = ... # type: int lineno = ... # type: int def __init__(self, module: str, name: str, super: Optional[List[Union["Class", str]]], file: str, lineno: int) -> None: ... class Function: module = ... # type: str name = ... # type: str file = ... # type: int lineno = ... # type: int def __init__(self, module: str, name: str, file: str, lineno: int) -> None: ... def readmodule(module: str, path: Optional[Sequence[str]] = ... ) -> Dict[str, Class]: ... def readmodule_ex(module: str, path: Optional[Sequence[str]] = ... ) -> Dict[str, Union[Class, Function, List[str]]]: ... mypy-0.560/typeshed/stdlib/2and3/quopri.pyi0000644€tŠÔÚ€2›s®0000000057413215007212024752 0ustar jukkaDROPBOX\Domain Users00000000000000# Stubs for quopri (Python 2 and 3) from typing import BinaryIO def encode(input: BinaryIO, output: BinaryIO, quotetabs: int, header: int = ...) -> None: ... def encodestring(s: bytes, quotetabs: int = ..., header: int = ...) -> bytes: ... def decode(input: BinaryIO, output: BinaryIO, header: int = ...) -> None: ... def decodestring(s: bytes, header: int = ...) -> bytes: ... mypy-0.560/typeshed/stdlib/2and3/readline.pyi0000644€tŠÔÚ€2›s®0000000276513215007212025222 0ustar jukkaDROPBOX\Domain Users00000000000000# Stubs for readline from typing import Callable, Optional, Sequence import sys _CompleterT = Optional[Callable[[str, int], Optional[str]]] _CompDispT = Optional[Callable[[str, Sequence[str], int], None]] def parse_and_bind(string: str) -> None: ... def read_init_file(filename: str = ...) -> None: ... def get_line_buffer() -> str: ... def insert_text(string: str) -> None: ... def redisplay() -> None: ... def read_history_file(filename: str = ...) -> None: ... def write_history_file(filename: str = ...) -> None: ... if sys.version_info >= (3, 5): def append_history_file(nelements: int, filename: str = ...) -> None: ... def get_history_length() -> int: ... def set_history_length(length: int) -> None: ... def clear_history() -> None: ... def get_current_history_length() -> int: ... def get_history_item(index: int) -> str: ... def remove_history_item(pos: int) -> None: ... def replace_history_item(pos: int, line: str) -> None: ... def add_history(string: str) -> None: ... def set_startup_hook(function: Optional[Callable[[], None]] = ...) -> None: ... def set_pre_input_hook(function: Optional[Callable[[], None]] = ...) -> None: ... def set_completer(function: _CompleterT = ...) -> None: ... def get_completer() -> _CompleterT: ... def get_completion_type() -> int: ... def get_begidx() -> int: ... def get_endidx() -> int: ... def set_completer_delims(string: str) -> None: ... def get_completer_delims() -> str: ... def set_completion_display_matches_hook(function: _CompDispT = ...) -> None: ... mypy-0.560/typeshed/stdlib/2and3/rlcompleter.pyi0000644€tŠÔÚ€2›s®0000000036413215007212025760 0ustar jukkaDROPBOX\Domain Users00000000000000# Stubs for rlcompleter from typing import Optional, Union import sys if sys.version_info >= (3,): _Text = str else: _Text = Union[str, unicode] class Completer: def complete(self, text: _Text, state: int) -> Optional[str]: ... mypy-0.560/typeshed/stdlib/2and3/sched.pyi0000644€tŠÔÚ€2›s®0000000250213215007212024512 0ustar jukkaDROPBOX\Domain Users00000000000000import sys from typing import Any, Callable, Dict, List, NamedTuple, Text, Tuple Event = NamedTuple('Event', [ ('time', float), ('priority', Any), ('action', Callable[..., Any]), ('argument', Tuple[Any, ...]), ('kwargs', Dict[Text, Any]), ]) class scheduler: if sys.version_info >= (3, 3): def __init__(self, timefunc: Callable[[], float] = ..., delayfunc: Callable[[float], None] = ...) -> None: ... def enterabs(self, time: float, priority: Any, action: Callable[..., Any], argument: Tuple[Any, ...] = ..., kwargs: Dict[str, Any] = ...) -> Event: ... def enter(self, delay: float, priority: Any, action: Callable[..., Any], argument: Tuple[Any, ...] = ..., kwargs: Dict[str, Any] = ...) -> Event: ... def run(self, blocking: bool = ...) -> float: ... else: def __init__(self, timefunc: Callable[[], float], delayfunc: Callable[[float], None]) -> None: ... def enterabs(self, time: float, priority: Any, action: Callable[..., Any], argument: Tuple[Any, ...]) -> Event: ... def enter(self, delay: float, priority: Any, action: Callable[..., Any], argument: Tuple[Any, ...]) -> Event: ... def run(self) -> float: ... def cancel(self, event: Event) -> None: ... def empty(self) -> bool: ... @property def queue(self) -> List[Event]: ... mypy-0.560/typeshed/stdlib/2and3/select.pyi0000644€tŠÔÚ€2›s®0000000766713215007212024724 0ustar jukkaDROPBOX\Domain Users00000000000000import sys from typing import Any, Optional, Sequence, Tuple, Iterable, List, Union # When we have protocols, this should change to a protocol with a fileno method # See https://docs.python.org/3/c-api/file.html#c.PyObject_AsFileDescriptor _FileDescriptor = Union[int, Any] EPOLLERR: int EPOLLET: int EPOLLHUP: int EPOLLIN: int EPOLLMSG: int EPOLLONESHOT: int EPOLLOUT: int EPOLLPRI: int EPOLLRDBAND: int EPOLLRDNORM: int EPOLLWRBAND: int EPOLLWRNORM: int EPOLL_RDHUP: int KQ_EV_ADD: int KQ_EV_CLEAR: int KQ_EV_DELETE: int KQ_EV_DISABLE: int KQ_EV_ENABLE: int KQ_EV_EOF: int KQ_EV_ERROR: int KQ_EV_FLAG1: int KQ_EV_ONESHOT: int KQ_EV_SYSFLAGS: int KQ_FILTER_AIO: int KQ_FILTER_NETDEV: int KQ_FILTER_PROC: int KQ_FILTER_READ: int KQ_FILTER_SIGNAL: int KQ_FILTER_TIMER: int KQ_FILTER_VNODE: int KQ_FILTER_WRITE: int KQ_NOTE_ATTRIB: int KQ_NOTE_CHILD: int KQ_NOTE_DELETE: int KQ_NOTE_EXEC: int KQ_NOTE_EXIT: int KQ_NOTE_EXTEND: int KQ_NOTE_FORK: int KQ_NOTE_LINK: int KQ_NOTE_LINKDOWN: int KQ_NOTE_LINKINV: int KQ_NOTE_LINKUP: int KQ_NOTE_LOWAT: int KQ_NOTE_PCTRLMASK: int KQ_NOTE_PDATAMASK: int KQ_NOTE_RENAME: int KQ_NOTE_REVOKE: int KQ_NOTE_TRACK: int KQ_NOTE_TRACKERR: int KQ_NOTE_WRITE: int PIPE_BUF: int POLLERR: int POLLHUP: int POLLIN: int POLLMSG: int POLLNVAL: int POLLOUT: int POLLPRI: int POLLRDBAND: int POLLRDNORM: int POLLWRBAND: int POLLWRNORM: int class poll: def __init__(self) -> None: ... def register(self, fd: _FileDescriptor, eventmask: int = ...) -> None: ... def modify(self, fd: _FileDescriptor, eventmask: int) -> None: ... def unregister(self, fd: _FileDescriptor) -> None: ... def poll(self, timeout: Optional[float] = ...) -> List[Tuple[int, int]]: ... def select(rlist: Sequence[Any], wlist: Sequence[Any], xlist: Sequence[Any], timeout: Optional[float] = ...) -> Tuple[List[Any], List[Any], List[Any]]: ... if sys.version_info >= (3, 3): error = OSError else: class error(Exception): ... # BSD only class kevent(object): data: Any fflags: int filter: int flags: int ident: int udata: Any def __init__(self, ident: _FileDescriptor, filter: int = ..., flags: int = ..., fflags: int = ..., data: Any = ..., udata: Any = ...) -> None: ... # BSD only class kqueue(object): closed: bool def __init__(self) -> None: ... def close(self) -> None: ... def control(self, changelist: Optional[Iterable[kevent]], max_events: int, timeout: float = ...) -> List[kevent]: ... def fileno(self) -> int: ... @classmethod def fromfd(cls, fd: _FileDescriptor) -> kqueue: ... # Linux only class epoll(object): if sys.version_info >= (3, 3): def __init__(self, sizehint: int = ..., flags: int = ...) -> None: ... else: def __init__(self, sizehint: int = ...) -> None: ... if sys.version_info >= (3, 4): def __enter__(self) -> epoll: ... def __exit__(self, *args: Any) -> None: ... def close(self) -> None: ... closed: bool def fileno(self) -> int: ... def register(self, fd: _FileDescriptor, eventmask: int = ...) -> None: ... def modify(self, fd: _FileDescriptor, eventmask: int) -> None: ... def unregister(self, fd: _FileDescriptor) -> None: ... def poll(self, timeout: float = ..., maxevents: int = ...) -> List[Tuple[int, int]]: ... @classmethod def fromfd(cls, fd: _FileDescriptor) -> epoll: ... if sys.version_info >= (3, 3): # Solaris only class devpoll: if sys.version_info >= (3, 4): def close(self) -> None: ... closed: bool def fileno(self) -> int: ... def register(self, fd: _FileDescriptor, eventmask: int = ...) -> None: ... def modify(self, fd: _FileDescriptor, eventmask: int = ...) -> None: ... def unregister(self, fd: _FileDescriptor) -> None: ... def poll(self, timeout: Optional[float] = ...) -> List[Tuple[int, int]]: ... mypy-0.560/typeshed/stdlib/2and3/site.pyi0000644€tŠÔÚ€2›s®0000000103213215007212024365 0ustar jukkaDROPBOX\Domain Users00000000000000# Stubs for site from typing import List, Iterable, Optional import sys PREFIXES = ... # type: List[str] ENABLE_USER_SITE = ... # type: Optional[bool] USER_SITE = ... # type: Optional[str] USER_BASE = ... # type: Optional[str] if sys.version_info < (3,): def main() -> None: ... def addsitedir(sitedir: str, known_paths: Optional[Iterable[str]] = ...) -> None: ... def getsitepackages(prefixes: Optional[Iterable[str]] = ...) -> List[str]: ... def getuserbase() -> str: ... def getusersitepackages() -> str: ... mypy-0.560/typeshed/stdlib/2and3/smtpd.pyi0000644€tŠÔÚ€2›s®0000000637613215007212024570 0ustar jukkaDROPBOX\Domain Users00000000000000# Stubs for smtpd (Python 2 and 3) import sys import socket import asyncore import asynchat from typing import Any, DefaultDict, List, Optional, Text, Tuple, Type _Address = Tuple[str, int] # (host, port) class SMTPChannel(asynchat.async_chat): COMMAND: int DATA: int if sys.version_info >= (3, 3): command_size_limits: DefaultDict[str, int] if sys.version_info >= (3,): smtp_server: SMTPServer conn: socket.socket addr: Any received_lines: List[Text] smtp_state: int seen_greeting: str mailfrom: str rcpttos: List[str] received_data: str fqdn: str peer: str command_size_limit: int data_size_limit: int if sys.version_info >= (3, 5): enable_SMTPUTF8: bool if sys.version_info >= (3, 3): @property def max_command_size_limit(self) -> int: ... if sys.version_info >= (3, 5): def __init__(self, server: SMTPServer, conn: socket.socket, addr: Any, data_size_limit: int = ..., map: Optional[asyncore._maptype] = ..., enable_SMTPUTF8: bool = ..., decode_data: bool = ...) -> None: ... elif sys.version_info >= (3, 4): def __init__(self, server: SMTPServer, conn: socket.socket, addr: Any, data_size_limit: int = ..., map: Optional[asyncore._maptype] = ...) -> None: ... else: def __init__(self, server: SMTPServer, conn: socket.socket, addr: Any, data_size_limit: int = ...) -> None: ... def push(self, msg: bytes) -> None: ... def collect_incoming_data(self, data: bytes) -> None: ... def found_terminator(self) -> None: ... def smtp_HELO(self, arg: str) -> None: ... def smtp_NOOP(self, arg: str) -> None: ... def smtp_QUIT(self, arg: str) -> None: ... def smtp_MAIL(self, arg: str) -> None: ... def smtp_RCPT(self, arg: str) -> None: ... def smtp_RSET(self, arg: str) -> None: ... def smtp_DATA(self, arg: str) -> None: ... if sys.version_info >= (3, 3): def smtp_EHLO(self, arg: str) -> None: ... def smtp_HELP(self, arg: str) -> None: ... def smtp_VRFY(self, arg: str) -> None: ... def smtp_EXPN(self, arg: str) -> None: ... class SMTPServer(asyncore.dispatcher): channel_class: Type[SMTPChannel] data_size_limit: int enable_SMTPUTF8: bool if sys.version_info >= (3, 5): def __init__(self, localaddr: _Address, remoteaddr: _Address, data_size_limit: int = ..., map: Optional[asyncore._maptype] = ..., enable_SMTPUTF8: bool = ..., decode_data: bool = ...) -> None: ... elif sys.version_info >= (3, 4): def __init__(self, localaddr: _Address, remoteaddr: _Address, data_size_limit: int = ..., map: Optional[asyncore._maptype] = ...) -> None: ... else: def __init__(self, localaddr: _Address, remoteaddr: _Address, data_size_limit: int = ...) -> None: ... def handle_accepted(self, conn: socket.socket, addr: Any) -> None: ... def process_message(self, peer: _Address, mailfrom: str, rcpttos: List[Text], data: str, **kwargs: Any) -> Optional[str]: ... class DebuggingServer(SMTPServer): ... class PureProxy(SMTPServer): ... class MailmanProxy(PureProxy): ... mypy-0.560/typeshed/stdlib/2and3/sndhdr.pyi0000644€tŠÔÚ€2›s®0000000124513215007212024711 0ustar jukkaDROPBOX\Domain Users00000000000000# Stubs for sndhdr (Python 2 and 3) import os import sys from typing import Any, NamedTuple, Optional, Tuple, Union if sys.version_info >= (3, 5): SndHeaders = NamedTuple('SndHeaders', [ ('filetype', str), ('framerate', int), ('nchannels', int), ('nframes', int), ('sampwidth', Union[int, str]), ]) _SndHeaders = SndHeaders else: _SndHeaders = Tuple[str, int, int, int, Union[int, str]] if sys.version_info >= (3, 6): _Path = Union[str, bytes, os.PathLike[Any]] else: _Path = Union[str, bytes] def what(filename: _Path) -> Optional[_SndHeaders]: ... def whathdr(filename: _Path) -> Optional[_SndHeaders]: ... mypy-0.560/typeshed/stdlib/2and3/socket.pyi0000644€tŠÔÚ€2›s®0000003715213215007212024725 0ustar jukkaDROPBOX\Domain Users00000000000000# Stubs for socket # Ron Murawski # based on: http://docs.python.org/3.2/library/socket.html # see: http://hg.python.org/cpython/file/3d0686d90f55/Lib/socket.py # see: http://nullege.com/codes/search/socket # adapted for Python 2.7 by Michal Pokorny import sys from typing import Any, Iterable, Tuple, List, Optional, Union, overload, TypeVar # ----- variables and constants ----- AF_UNIX: AddressFamily AF_INET: AddressFamily AF_INET6: AddressFamily SOCK_STREAM: SocketKind SOCK_DGRAM: SocketKind SOCK_RAW: SocketKind SOCK_RDM: SocketKind SOCK_SEQPACKET: SocketKind SOCK_CLOEXEC: SocketKind SOCK_NONBLOCK: SocketKind SOMAXCONN: int has_ipv6: bool _GLOBAL_DEFAULT_TIMEOUT: Any SocketType: Any SocketIO: Any # These are flags that may exist on Python 3.6. Many don't exist on all platforms. AF_AAL5: AddressFamily AF_APPLETALK: AddressFamily AF_ASH: AddressFamily AF_ATMPVC: AddressFamily AF_ATMSVC: AddressFamily AF_AX25: AddressFamily AF_BLUETOOTH: AddressFamily AF_BRIDGE: AddressFamily AF_CAN: AddressFamily AF_DECnet: AddressFamily AF_ECONET: AddressFamily AF_IPX: AddressFamily AF_IRDA: AddressFamily AF_KEY: AddressFamily AF_LLC: AddressFamily AF_NETBEUI: AddressFamily AF_NETLINK: AddressFamily AF_NETROM: AddressFamily AF_PACKET: AddressFamily AF_PPPOX: AddressFamily AF_RDS: AddressFamily AF_ROSE: AddressFamily AF_ROUTE: AddressFamily AF_SECURITY: AddressFamily AF_SNA: AddressFamily AF_SYSTEM: AddressFamily AF_TIPC: AddressFamily AF_UNSPEC: AddressFamily AF_WANPIPE: AddressFamily AF_X25: AddressFamily AI_ADDRCONFIG: AddressInfo AI_ALL: AddressInfo AI_CANONNAME: AddressInfo AI_DEFAULT: AddressInfo AI_MASK: AddressInfo AI_NUMERICHOST: AddressInfo AI_NUMERICSERV: AddressInfo AI_PASSIVE: AddressInfo AI_V4MAPPED: AddressInfo AI_V4MAPPED_CFG: AddressInfo BDADDR_ANY: str BDADDR_LOCAL: str BTPROTO_HCI: int BTPROTO_L2CAP: int BTPROTO_RFCOMM: int BTPROTO_SCO: int CAN_EFF_FLAG: int CAN_EFF_MASK: int CAN_ERR_FLAG: int CAN_ERR_MASK: int CAN_RAW: int CAN_RAW_ERR_FILTER: int CAN_RAW_FILTER: int CAN_RAW_LOOPBACK: int CAN_RAW_RECV_OWN_MSGS: int CAN_RTR_FLAG: int CAN_SFF_MASK: int CAPI: int EAGAIN: int EAI_ADDRFAMILY: int EAI_AGAIN: int EAI_BADFLAGS: int EAI_BADHINTS: int EAI_FAIL: int EAI_FAMILY: int EAI_MAX: int EAI_MEMORY: int EAI_NODATA: int EAI_NONAME: int EAI_OVERFLOW: int EAI_PROTOCOL: int EAI_SERVICE: int EAI_SOCKTYPE: int EAI_SYSTEM: int EBADF: int EINTR: int EWOULDBLOCK: int HCI_DATA_DIR: int HCI_FILTER: int HCI_TIME_STAMP: int INADDR_ALLHOSTS_GROUP: int INADDR_ANY: int INADDR_BROADCAST: int INADDR_LOOPBACK: int INADDR_MAX_LOCAL_GROUP: int INADDR_NONE: int INADDR_UNSPEC_GROUP: int IPPORT_RESERVED: int IPPORT_USERRESERVED: int IPPROTO_AH: int IPPROTO_BIP: int IPPROTO_DSTOPTS: int IPPROTO_EGP: int IPPROTO_EON: int IPPROTO_ESP: int IPPROTO_FRAGMENT: int IPPROTO_GGP: int IPPROTO_GRE: int IPPROTO_HELLO: int IPPROTO_HOPOPTS: int IPPROTO_ICMP: int IPPROTO_ICMPV6: int IPPROTO_IDP: int IPPROTO_IGMP: int IPPROTO_IP: int IPPROTO_IPCOMP: int IPPROTO_IPIP: int IPPROTO_IPV4: int IPPROTO_IPV6: int IPPROTO_MAX: int IPPROTO_MOBILE: int IPPROTO_ND: int IPPROTO_NONE: int IPPROTO_PIM: int IPPROTO_PUP: int IPPROTO_RAW: int IPPROTO_ROUTING: int IPPROTO_RSVP: int IPPROTO_SCTP: int IPPROTO_TCP: int IPPROTO_TP: int IPPROTO_UDP: int IPPROTO_VRRP: int IPPROTO_XTP: int IPV6_CHECKSUM: int IPV6_DONTFRAG: int IPV6_DSTOPTS: int IPV6_HOPLIMIT: int IPV6_HOPOPTS: int IPV6_JOIN_GROUP: int IPV6_LEAVE_GROUP: int IPV6_MULTICAST_HOPS: int IPV6_MULTICAST_IF: int IPV6_MULTICAST_LOOP: int IPV6_NEXTHOP: int IPV6_PATHMTU: int IPV6_PKTINFO: int IPV6_RECVDSTOPTS: int IPV6_RECVHOPLIMIT: int IPV6_RECVHOPOPTS: int IPV6_RECVPATHMTU: int IPV6_RECVPKTINFO: int IPV6_RECVRTHDR: int IPV6_RECVTCLASS: int IPV6_RTHDR: int IPV6_RTHDR_TYPE_0: int IPV6_RTHDRDSTOPTS: int IPV6_TCLASS: int IPV6_UNICAST_HOPS: int IPV6_USE_MIN_MTU: int IPV6_V6ONLY: int IP_ADD_MEMBERSHIP: int IP_DEFAULT_MULTICAST_LOOP: int IP_DEFAULT_MULTICAST_TTL: int IP_DROP_MEMBERSHIP: int IP_HDRINCL: int IP_MAX_MEMBERSHIPS: int IP_MULTICAST_IF: int IP_MULTICAST_LOOP: int IP_MULTICAST_TTL: int IP_OPTIONS: int IP_RECVDSTADDR: int IP_RECVOPTS: int IP_RECVRETOPTS: int IP_RETOPTS: int IP_TOS: int IP_TRANSPARENT: int IP_TTL: int IPX_TYPE: int LOCAL_PEERCRED: int MSG_BCAST: MsgFlag MSG_BTAG: MsgFlag MSG_CMSG_CLOEXEC: MsgFlag MSG_CONFIRM: MsgFlag MSG_CTRUNC: MsgFlag MSG_DONTROUTE: MsgFlag MSG_DONTWAIT: MsgFlag MSG_EOF: MsgFlag MSG_EOR: MsgFlag MSG_ERRQUEUE: MsgFlag MSG_ETAG: MsgFlag MSG_FASTOPEN: MsgFlag MSG_MCAST: MsgFlag MSG_MORE: MsgFlag MSG_NOSIGNAL: MsgFlag MSG_NOTIFICATION: MsgFlag MSG_OOB: MsgFlag MSG_PEEK: MsgFlag MSG_TRUNC: MsgFlag MSG_WAITALL: MsgFlag NETLINK_ARPD: int NETLINK_CRYPTO: int NETLINK_DNRTMSG: int NETLINK_FIREWALL: int NETLINK_IP6_FW: int NETLINK_NFLOG: int NETLINK_ROUTE6: int NETLINK_ROUTE: int NETLINK_SKIP: int NETLINK_TAPBASE: int NETLINK_TCPDIAG: int NETLINK_USERSOCK: int NETLINK_W1: int NETLINK_XFRM: int NI_DGRAM: int NI_MAXHOST: int NI_MAXSERV: int NI_NAMEREQD: int NI_NOFQDN: int NI_NUMERICHOST: int NI_NUMERICSERV: int PACKET_BROADCAST: int PACKET_FASTROUTE: int PACKET_HOST: int PACKET_LOOPBACK: int PACKET_MULTICAST: int PACKET_OTHERHOST: int PACKET_OUTGOING: int PF_CAN: int PF_PACKET: int PF_RDS: int PF_SYSTEM: int SCM_CREDENTIALS: int SCM_CREDS: int SCM_RIGHTS: int SHUT_RD: int SHUT_RDWR: int SHUT_WR: int SOL_ATALK: int SOL_AX25: int SOL_CAN_BASE: int SOL_CAN_RAW: int SOL_HCI: int SOL_IP: int SOL_IPX: int SOL_NETROM: int SOL_RDS: int SOL_ROSE: int SOL_SOCKET: int SOL_TCP: int SOL_TIPC: int SOL_UDP: int SO_ACCEPTCONN: int SO_BINDTODEVICE: int SO_BROADCAST: int SO_DEBUG: int SO_DONTROUTE: int SO_ERROR: int SO_EXCLUSIVEADDRUSE: int SO_KEEPALIVE: int SO_LINGER: int SO_MARK: int SO_OOBINLINE: int SO_PASSCRED: int SO_PEERCRED: int SO_PRIORITY: int SO_RCVBUF: int SO_RCVLOWAT: int SO_RCVTIMEO: int SO_REUSEADDR: int SO_REUSEPORT: int SO_SETFIB: int SO_SNDBUF: int SO_SNDLOWAT: int SO_SNDTIMEO: int SO_TYPE: int SO_USELOOPBACK: int SYSPROTO_CONTROL: int TCP_CORK: int TCP_DEFER_ACCEPT: int TCP_FASTOPEN: int TCP_INFO: int TCP_KEEPCNT: int TCP_KEEPIDLE: int TCP_KEEPINTVL: int TCP_LINGER2: int TCP_MAXSEG: int TCP_NODELAY: int TCP_NOTSENT_LOWAT: int TCP_QUICKACK: int TCP_SYNCNT: int TCP_WINDOW_CLAMP: int TIPC_ADDR_ID: int TIPC_ADDR_NAME: int TIPC_ADDR_NAMESEQ: int TIPC_CFG_SRV: int TIPC_CLUSTER_SCOPE: int TIPC_CONN_TIMEOUT: int TIPC_CRITICAL_IMPORTANCE: int TIPC_DEST_DROPPABLE: int TIPC_HIGH_IMPORTANCE: int TIPC_IMPORTANCE: int TIPC_LOW_IMPORTANCE: int TIPC_MEDIUM_IMPORTANCE: int TIPC_NODE_SCOPE: int TIPC_PUBLISHED: int TIPC_SRC_DROPPABLE: int TIPC_SUB_CANCEL: int TIPC_SUB_PORTS: int TIPC_SUB_SERVICE: int TIPC_SUBSCR_TIMEOUT: int TIPC_TOP_SRV: int TIPC_WAIT_FOREVER: int TIPC_WITHDRAWN: int TIPC_ZONE_SCOPE: int if sys.version_info >= (3, 3): RDS_CANCEL_SENT_TO: int RDS_CMSG_RDMA_ARGS: int RDS_CMSG_RDMA_DEST: int RDS_CMSG_RDMA_MAP: int RDS_CMSG_RDMA_STATUS: int RDS_CMSG_RDMA_UPDATE: int RDS_CONG_MONITOR: int RDS_FREE_MR: int RDS_GET_MR: int RDS_GET_MR_FOR_DEST: int RDS_RDMA_DONTWAIT: int RDS_RDMA_FENCE: int RDS_RDMA_INVALIDATE: int RDS_RDMA_NOTIFY_ME: int RDS_RDMA_READWRITE: int RDS_RDMA_SILENT: int RDS_RDMA_USE_ONCE: int RDS_RECVERR: int if sys.version_info >= (3, 4): CAN_BCM: int CAN_BCM_TX_SETUP: int CAN_BCM_TX_DELETE: int CAN_BCM_TX_READ: int CAN_BCM_TX_SEND: int CAN_BCM_RX_SETUP: int CAN_BCM_RX_DELETE: int CAN_BCM_RX_READ: int CAN_BCM_TX_STATUS: int CAN_BCM_TX_EXPIRED: int CAN_BCM_RX_STATUS: int CAN_BCM_RX_TIMEOUT: int CAN_BCM_RX_CHANGED: int AF_LINK: AddressFamily if sys.version_info >= (3, 5): CAN_RAW_FD_FRAMES: int if sys.version_info >= (3, 6): SO_DOMAIN: int SO_PROTOCOL: int SO_PEERSEC: int SO_PASSSEC: int TCP_USER_TIMEOUT: int TCP_CONGESTION: int AF_ALG: AddressFamily SOL_ALG: int ALG_SET_KEY: int ALG_SET_IV: int ALG_SET_OP: int ALG_SET_AEAD_ASSOCLEN: int ALG_SET_AEAD_AUTHSIZE: int ALG_SET_PUBKEY: int ALG_OP_DECRYPT: int ALG_OP_ENCRYPT: int ALG_OP_SIGN: int ALG_OP_VERIFY: int if sys.platform == 'win32': SIO_RCVALL: int SIO_KEEPALIVE_VALS: int RCVALL_IPLEVEL: int RCVALL_MAX: int RCVALL_OFF: int RCVALL_ON: int RCVALL_SOCKETLEVELONLY: int if sys.version_info >= (3, 6): SIO_LOOPBACK_FAST_PATH: int # enum versions of above flags py 3.4+ if sys.version_info >= (3, 4): from enum import IntEnum class AddressFamily(IntEnum): AF_UNIX = ... AF_INET = ... AF_INET6 = ... AF_APPLETALK = ... AF_ASH = ... AF_ATMPVC = ... AF_ATMSVC = ... AF_AX25 = ... AF_BLUETOOTH = ... AF_BRIDGE = ... AF_DECnet = ... AF_ECONET = ... AF_IPX = ... AF_IRDA = ... AF_KEY = ... AF_LLC = ... AF_NETBEUI = ... AF_NETLINK = ... AF_NETROM = ... AF_PACKET = ... AF_PPPOX = ... AF_ROSE = ... AF_ROUTE = ... AF_SECURITY = ... AF_SNA = ... AF_TIPC = ... AF_UNSPEC = ... AF_WANPIPE = ... AF_X25 = ... AF_LINK = ... class SocketKind(IntEnum): SOCK_STREAM = ... SOCK_DGRAM = ... SOCK_RAW = ... SOCK_RDM = ... SOCK_SEQPACKET = ... SOCK_CLOEXEC = ... SOCK_NONBLOCK = ... else: AddressFamily = int SocketKind = int if sys.version_info >= (3, 6): from enum import IntFlag class AddressInfo(IntFlag): AI_ADDRCONFIG = ... AI_ALL = ... AI_CANONNAME = ... AI_NUMERICHOST = ... AI_NUMERICSERV = ... AI_PASSIVE = ... AI_V4MAPPED = ... class MsgFlag(IntFlag): MSG_CTRUNC = ... MSG_DONTROUTE = ... MSG_DONTWAIT = ... MSG_EOR = ... MSG_OOB = ... MSG_PEEK = ... MSG_TRUNC = ... MSG_WAITALL = ... else: AddressInfo = int MsgFlag = int # ----- exceptions ----- class error(IOError): ... class herror(error): def __init__(self, herror: int, string: str) -> None: ... class gaierror(error): def __init__(self, error: int, string: str) -> None: ... class timeout(error): ... # Addresses can be either tuples of varying lengths (AF_INET, AF_INET6, # AF_NETLINK, AF_TIPC) or strings (AF_UNIX). # TODO AF_PACKET and AF_BLUETOOTH address objects _CMSG = Tuple[int, int, bytes] _SelfT = TypeVar('_SelfT', bound=socket) # ----- classes ----- class socket: family: int type: int proto: int if sys.version_info < (3,): def __init__(self, family: int = ..., type: int = ..., proto: int = ...) -> None: ... else: def __init__(self, family: int = ..., type: int = ..., proto: int = ..., fileno: Optional[int] = ...) -> None: ... if sys.version_info >= (3, 2): def __enter__(self: _SelfT) -> _SelfT: ... def __exit__(self, *args: Any) -> None: ... # --- methods --- # second tuple item is an address def accept(self) -> Tuple['socket', Any]: ... def bind(self, address: Union[tuple, str]) -> None: ... def close(self) -> None: ... def connect(self, address: Union[tuple, str]) -> None: ... def connect_ex(self, address: Union[tuple, str]) -> int: ... def detach(self) -> int: ... def fileno(self) -> int: ... # return value is an address def getpeername(self) -> Any: ... def getsockname(self) -> Any: ... @overload def getsockopt(self, level: int, optname: int) -> int: ... @overload def getsockopt(self, level: int, optname: int, buflen: int) -> bytes: ... def gettimeout(self) -> float: ... def ioctl(self, control: object, option: Tuple[int, int, int]) -> None: ... def listen(self, backlog: int) -> None: ... # TODO the return value may be BinaryIO or TextIO, depending on mode def makefile(self, mode: str = ..., buffering: int = ..., encoding: str = ..., errors: str = ..., newline: str = ...) -> Any: ... def recv(self, bufsize: int, flags: int = ...) -> bytes: ... # return type is an address def recvfrom(self, bufsize: int, flags: int = ...) -> Any: ... def recvfrom_into(self, buffer: bytearray, nbytes: int, flags: int = ...) -> Any: ... def recv_into(self, buffer: bytearray, nbytes: int, flags: int = ...) -> Any: ... def send(self, data: bytes, flags: int = ...) -> int: ... def sendall(self, data: bytes, flags: int =...) -> None: ... # return type: None on success @overload def sendto(self, data: bytes, address: Union[tuple, str]) -> int: ... @overload def sendto(self, data: bytes, flags: int, address: Union[tuple, str]) -> int: ... def setblocking(self, flag: bool) -> None: ... def settimeout(self, value: Optional[float]) -> None: ... def setsockopt(self, level: int, optname: int, value: Union[int, bytes]) -> None: ... def shutdown(self, how: int) -> None: ... if sys.version_info >= (3, 3): def recvmsg(self, __bufsize: int, __ancbufsize: int = ..., __flags: int = ...) -> Tuple[bytes, List[_CMSG], int, Any]: ... def recvmsg_into(self, __buffers: Iterable[bytearray], __ancbufsize: int = ..., __flags: int = ...) -> Tuple[int, List[_CMSG], int, Any]: ... def sendmsg(self, __buffers: Iterable[bytes], __ancdata: Iterable[_CMSG] = ..., __flags: int = ..., __address: Any = ...) -> int: ... # ----- functions ----- def create_connection(address: Tuple[str, int], timeout: float = ..., source_address: Tuple[str, int] = ...) -> socket: ... # the 5th tuple item is an address # TODO the "Tuple[Any, ...]" should be "Union[Tuple[str, int], Tuple[str, int, int, int]]" but that triggers # https://github.com/python/mypy/issues/2509 def getaddrinfo( host: Optional[str], port: Union[str, int, None], family: int = ..., socktype: int = ..., proto: int = ..., flags: int = ...) -> List[Tuple[int, int, int, str, Tuple[Any, ...]]]: ... def getfqdn(name: str = ...) -> str: ... def gethostbyname(hostname: str) -> str: ... def gethostbyname_ex(hostname: str) -> Tuple[str, List[str], List[str]]: ... def gethostname() -> str: ... def gethostbyaddr(ip_address: str) -> Tuple[str, List[str], List[str]]: ... def getnameinfo(sockaddr: tuple, flags: int) -> Tuple[str, int]: ... def getprotobyname(protocolname: str) -> int: ... def getservbyname(servicename: str, protocolname: str = ...) -> int: ... def getservbyport(port: int, protocolname: str = ...) -> str: ... def socketpair(family: int = ..., type: int = ..., proto: int = ...) -> Tuple[socket, socket]: ... def fromfd(fd: int, family: int, type: int, proto: int = ...) -> socket: ... def ntohl(x: int) -> int: ... # param & ret val are 32-bit ints def ntohs(x: int) -> int: ... # param & ret val are 16-bit ints def htonl(x: int) -> int: ... # param & ret val are 32-bit ints def htons(x: int) -> int: ... # param & ret val are 16-bit ints def inet_aton(ip_string: str) -> bytes: ... # ret val 4 bytes in length def inet_ntoa(packed_ip: bytes) -> str: ... def inet_pton(address_family: int, ip_string: str) -> bytes: ... def inet_ntop(address_family: int, packed_ip: bytes) -> str: ... def getdefaulttimeout() -> Optional[float]: ... def setdefaulttimeout(timeout: Optional[float]) -> None: ... if sys.version_info >= (3, 3): def CMSG_LEN(length: int) -> int: ... def CMSG_SPACE(length: int) -> int: ... def sethostname(name: str) -> None: ... def if_nameindex() -> List[Tuple[int, str]]: ... def if_nametoindex(name: str) -> int: ... def if_indextoname(index: int) -> str: ... mypy-0.560/typeshed/stdlib/2and3/stringprep.pyi0000644€tŠÔÚ€2›s®0000000153213215007212025623 0ustar jukkaDROPBOX\Domain Users00000000000000# Stubs for stringprep (Python 2 and 3) from typing import Text def in_table_a1(code: Text) -> bool: ... def in_table_b1(code: Text) -> bool: ... def map_table_b3(code: Text) -> Text: ... def map_table_b2(a: Text) -> Text: ... def in_table_c11(code: Text) -> bool: ... def in_table_c12(code: Text) -> bool: ... def in_table_c11_c12(code: Text) -> bool: ... def in_table_c21(code: Text) -> bool: ... def in_table_c22(code: Text) -> bool: ... def in_table_c21_c22(code: Text) -> bool: ... def in_table_c3(code: Text) -> bool: ... def in_table_c4(code: Text) -> bool: ... def in_table_c5(code: Text) -> bool: ... def in_table_c6(code: Text) -> bool: ... def in_table_c7(code: Text) -> bool: ... def in_table_c8(code: Text) -> bool: ... def in_table_c9(code: Text) -> bool: ... def in_table_d1(code: Text) -> bool: ... def in_table_d2(code: Text) -> bool: ... mypy-0.560/typeshed/stdlib/2and3/struct.pyi0000644€tŠÔÚ€2›s®0000000303513215007212024752 0ustar jukkaDROPBOX\Domain Users00000000000000# Stubs for struct # Based on http://docs.python.org/3.2/library/struct.html # Based on http://docs.python.org/2/library/struct.html import sys from typing import Any, Tuple, Text, Union, Iterator from array import array class error(Exception): ... _FmtType = Union[bytes, Text] if sys.version_info >= (3,): _BufferType = Union[bytes, bytearray, memoryview] _WriteBufferType = Union[array, bytearray, memoryview] else: _BufferType = Union[bytes, bytearray, buffer, memoryview] _WriteBufferType = Union[array[Any], bytearray, buffer, memoryview] def pack(fmt: _FmtType, *v: Any) -> bytes: ... def pack_into(fmt: _FmtType, buffer: _WriteBufferType, offset: int, *v: Any) -> None: ... def unpack(fmt: _FmtType, buffer: _BufferType) -> Tuple[Any, ...]: ... def unpack_from(fmt: _FmtType, buffer: _BufferType, offset: int = ...) -> Tuple[Any, ...]: ... if sys.version_info >= (3, 4): def iter_unpack(fmt: _FmtType, buffer: _BufferType) -> Iterator[Tuple[Any, ...]]: ... def calcsize(fmt: _FmtType) -> int: ... class Struct: format = ... # type: bytes size = ... # type: int def __init__(self, format: _FmtType) -> None: ... def pack(self, *v: Any) -> bytes: ... def pack_into(self, buffer: _WriteBufferType, offset: int, *v: Any) -> None: ... def unpack(self, buffer: _BufferType) -> Tuple[Any, ...]: ... def unpack_from(self, buffer: _BufferType, offset: int = ...) -> Tuple[Any, ...]: ... if sys.version_info >= (3, 4): def iter_unpack(self, buffer: _BufferType) -> Iterator[Tuple[Any, ...]]: ... mypy-0.560/typeshed/stdlib/2and3/sunau.pyi0000644€tŠÔÚ€2›s®0000000643713215007212024572 0ustar jukkaDROPBOX\Domain Users00000000000000# Stubs for sunau (Python 2 and 3) import sys from mypy_extensions import NoReturn from typing import Any, NamedTuple, Optional, Text, IO, Union, Tuple _File = Union[Text, IO[bytes]] class Error(Exception): ... AUDIO_FILE_MAGIC = ... # type: int AUDIO_FILE_ENCODING_MULAW_8 = ... # type: int AUDIO_FILE_ENCODING_LINEAR_8 = ... # type: int AUDIO_FILE_ENCODING_LINEAR_16 = ... # type: int AUDIO_FILE_ENCODING_LINEAR_24 = ... # type: int AUDIO_FILE_ENCODING_LINEAR_32 = ... # type: int AUDIO_FILE_ENCODING_FLOAT = ... # type: int AUDIO_FILE_ENCODING_DOUBLE = ... # type: int AUDIO_FILE_ENCODING_ADPCM_G721 = ... # type: int AUDIO_FILE_ENCODING_ADPCM_G722 = ... # type: int AUDIO_FILE_ENCODING_ADPCM_G723_3 = ... # type: int AUDIO_FILE_ENCODING_ADPCM_G723_5 = ... # type: int AUDIO_FILE_ENCODING_ALAW_8 = ... # type: int AUDIO_UNKNOWN_SIZE = ... # type: int if sys.version_info < (3, 0): _sunau_params = Tuple[int, int, int, int, str, str] else: _sunau_params = NamedTuple('_sunau_params', [ ('nchannels', int), ('sampwidth', int), ('framerate', int), ('nframes', int), ('comptype', str), ('compname', str), ]) class Au_read: def __init__(self, f: _File) -> None: ... if sys.version_info >= (3, 3): def __enter__(self) -> Au_read: ... def __exit__(self, *args: Any) -> None: ... def getfp(self) -> Optional[IO[bytes]]: ... def rewind(self) -> None: ... def close(self) -> None: ... def tell(self) -> int: ... def getnchannels(self) -> int: ... def getnframes(self) -> int: ... def getsampwidth(self) -> int: ... def getframerate(self) -> int: ... def getcomptype(self) -> str: ... def getcompname(self) -> str: ... def getparams(self) -> _sunau_params: ... def getmarkers(self) -> None: ... def getmark(self, id: Any) -> NoReturn: ... def setpos(self, pos: int) -> None: ... def readframes(self, nframes: int) -> Optional[bytes]: ... class Au_write: def __init__(self, f: _File) -> None: ... if sys.version_info >= (3, 3): def __enter__(self) -> Au_write: ... def __exit__(self, *args: Any) -> None: ... def setnchannels(self, nchannels: int) -> None: ... def getnchannels(self) -> int: ... def setsampwidth(self, sampwidth: int) -> None: ... def getsampwidth(self) -> int: ... def setframerate(self, framerate: float) -> None: ... def getframerate(self) -> int: ... def setnframes(self, nframes: int) -> None: ... def getnframes(self) -> int: ... def setcomptype(self, comptype: str, compname: str) -> None: ... def getcomptype(self) -> str: ... def getcompname(self) -> str: ... def setparams(self, params: _sunau_params) -> None: ... def getparams(self) -> _sunau_params: ... def setmark(self, id: Any, pos: Any, name: Any) -> NoReturn: ... def getmark(self, id: Any) -> NoReturn: ... def getmarkers(self) -> None: ... def tell(self) -> int: ... # should be any bytes-like object after 3.4, but we don't have a type for that def writeframesraw(self, data: bytes) -> None: ... def writeframes(self, data: bytes) -> None: ... def close(self) -> None: ... # Returns a Au_read if mode is rb and Au_write if mode is wb def open(f: _File, mode: Optional[str] = ...) -> Any: ... openfp = open mypy-0.560/typeshed/stdlib/2and3/symtable.pyi0000644€tŠÔÚ€2›s®0000000315513215007212025251 0ustar jukkaDROPBOX\Domain Users00000000000000import sys from typing import List, Sequence, Tuple, Text def symtable(code: Text, filename: Text, compile_type: Text) -> SymbolTable: ... class SymbolTable(object): def get_type(self) -> str: ... def get_id(self) -> int: ... def get_name(self) -> str: ... def get_lineno(self) -> int: ... def is_optimized(self) -> bool: ... def is_nested(self) -> bool: ... def has_children(self) -> bool: ... def has_exec(self) -> bool: ... if sys.version_info < (3, 0): def has_import_star(self) -> bool: ... def get_identifiers(self) -> Sequence[str]: ... def lookup(self, name: str) -> Symbol: ... def get_symbols(self) -> List[Symbol]: ... def get_children(self) -> List[SymbolTable]: ... class Function(SymbolTable): def get_parameters(self) -> Tuple[str, ...]: ... def get_locals(self) -> Tuple[str, ...]: ... def get_globals(self) -> Tuple[str, ...]: ... def get_frees(self) -> Tuple[str, ...]: ... class Class(SymbolTable): def get_methods(self) -> Tuple[str, ...]: ... class Symbol(object): def get_name(self) -> str: ... def is_referenced(self) -> bool: ... def is_parameter(self) -> bool: ... def is_global(self) -> bool: ... def is_declared_global(self) -> bool: ... def is_local(self) -> bool: ... if sys.version_info >= (3, 6): def is_annotated(self) -> bool: ... def is_free(self) -> bool: ... def is_imported(self) -> bool: ... def is_assigned(self) -> bool: ... def is_namespace(self) -> bool: ... def get_namespaces(self) -> Sequence[SymbolTable]: ... def get_namespace(self) -> SymbolTable: ... mypy-0.560/typeshed/stdlib/2and3/sysconfig.pyi0000644€tŠÔÚ€2›s®0000000153713215007212025437 0ustar jukkaDROPBOX\Domain Users00000000000000# Stubs for sysconfig from typing import overload, Any, Dict, IO, List, Optional, Tuple, Union @overload def get_config_vars(*args: str) -> List[Any]: ... @overload def get_config_vars() -> Dict[str, Any]: ... def get_config_var(name: str) -> Optional[str]: ... def get_scheme_names() -> Tuple[str, ...]: ... def get_path_names() -> Tuple[str, ...]: ... def get_path(name: str, scheme: str = ..., vars: Optional[Dict[str, Any]] = ..., expand: bool = ...) -> Optional[str]: ... def get_paths(scheme: str = ..., vars: Optional[Dict[str, Any]] = ..., expand: bool = ...) -> Dict[str, str]: ... def get_python_version() -> str: ... def get_platform() -> str: ... def is_python_build() -> bool: ... def parse_config_h(fp: IO[Any], vars: Optional[Dict[str, Any]]) -> Dict[str, Any]: ... def get_config_h_filename() -> str: ... def get_makefile_filename() -> str: ... mypy-0.560/typeshed/stdlib/2and3/syslog.pyi0000644€tŠÔÚ€2›s®0000000235013215007212024745 0ustar jukkaDROPBOX\Domain Users00000000000000from typing import overload LOG_ALERT = ... # type: int LOG_AUTH = ... # type: int LOG_CONS = ... # type: int LOG_CRIT = ... # type: int LOG_CRON = ... # type: int LOG_DAEMON = ... # type: int LOG_DEBUG = ... # type: int LOG_EMERG = ... # type: int LOG_ERR = ... # type: int LOG_INFO = ... # type: int LOG_KERN = ... # type: int LOG_LOCAL0 = ... # type: int LOG_LOCAL1 = ... # type: int LOG_LOCAL2 = ... # type: int LOG_LOCAL3 = ... # type: int LOG_LOCAL4 = ... # type: int LOG_LOCAL5 = ... # type: int LOG_LOCAL6 = ... # type: int LOG_LOCAL7 = ... # type: int LOG_LPR = ... # type: int LOG_MAIL = ... # type: int LOG_NDELAY = ... # type: int LOG_NEWS = ... # type: int LOG_NOTICE = ... # type: int LOG_NOWAIT = ... # type: int LOG_PERROR = ... # type: int LOG_PID = ... # type: int LOG_SYSLOG = ... # type: int LOG_USER = ... # type: int LOG_UUCP = ... # type: int LOG_WARNING = ... # type: int def LOG_MASK(a: int) -> int: ... def LOG_UPTO(a: int) -> int: ... def closelog() -> None: ... def openlog(ident: str = ..., logoption: int = ..., facility: int = ...) -> None: ... def setlogmask(x: int) -> int: ... @overload def syslog(priority: int, message: str) -> None: ... @overload def syslog(message: str) -> None: ... mypy-0.560/typeshed/stdlib/2and3/tabnanny.pyi0000644€tŠÔÚ€2›s®0000000115513215007212025241 0ustar jukkaDROPBOX\Domain Users00000000000000# Stubs for tabnanny (Python 2 and 3) import os import sys from typing import Iterable, Tuple, Union if sys.version_info >= (3, 6): _Path = Union[str, bytes, os.PathLike] else: _Path = Union[str, bytes] verbose = ... # type: int filename_only = ... # type: int class NannyNag(Exception): def __init__(self, lineno: int, msg: str, line: str) -> None: ... def get_lineno(self) -> int: ... def get_msg(self) -> str: ... def get_line(self) -> str: ... def check(file: _Path) -> None: ... def process_tokens(tokens: Iterable[Tuple[int, str, Tuple[int, int], Tuple[int, int], str]]) -> None: ... mypy-0.560/typeshed/stdlib/2and3/tarfile.pyi0000644€tŠÔÚ€2›s®0000001611513215007212025057 0ustar jukkaDROPBOX\Domain Users00000000000000# Stubs for tarfile from typing import ( Callable, IO, Iterable, Iterator, List, Mapping, Optional, Type, Union, ) import os import sys from types import TracebackType if sys.version_info >= (3, 6): _Path = Union[bytes, str, os.PathLike] elif sys.version_info >= (3,): _Path = Union[bytes, str] else: _Path = Union[str, unicode] ENCODING = ... # type: str USTAR_FORMAT = ... # type: int GNU_FORMAT = ... # type: int PAX_FORMAT = ... # type: int DEFAULT_FORMAT = ... # type: int REGTYPE = ... # type: bytes AREGTYPE = ... # type: bytes LNKTYPE = ... # type: bytes SYMTYPE = ... # type: bytes DIRTYPE = ... # type: bytes FIFOTYPE = ... # type: bytes CONTTYPE = ... # type: bytes CHRTYPE = ... # type: bytes BLKTYPE = ... # type: bytes GNUTYPE_SPARSE = ... # type: bytes if sys.version_info < (3,): TAR_PLAIN = ... # type: int TAR_GZIPPED = ... # type: int def open(name: Optional[_Path] = ..., mode: str = ..., fileobj: Optional[IO[bytes]] = ..., bufsize: int = ..., *, format: Optional[int] = ..., tarinfo: Optional[TarInfo] = ..., dereference: Optional[bool] = ..., ignore_zeros: Optional[bool] = ..., encoding: Optional[str] = ..., errors: str = ..., pax_headers: Optional[Mapping[str, str]] = ..., debug: Optional[int] = ..., errorlevel: Optional[int] = ..., compresslevel: Optional[int] = ...) -> TarFile: ... class TarFile(Iterable[TarInfo]): name = ... # type: Optional[_Path] mode = ... # type: str fileobj = ... # type: Optional[IO[bytes]] format = ... # type: Optional[int] tarinfo = ... # type: Optional[TarInfo] dereference = ... # type: Optional[bool] ignore_zeros = ... # type: Optional[bool] encoding = ... # type: Optional[str] errors = ... # type: str pax_headers = ... # type: Optional[Mapping[str, str]] debug = ... # type: Optional[int] errorlevel = ... # type: Optional[int] if sys.version_info < (3,): posix = ... # type: bool def __init__(self, name: Optional[_Path] = ..., mode: str = ..., fileobj: Optional[IO[bytes]] = ..., format: Optional[int] = ..., tarinfo: Optional[TarInfo] = ..., dereference: Optional[bool] = ..., ignore_zeros: Optional[bool] = ..., encoding: Optional[str] = ..., errors: str = ..., pax_headers: Optional[Mapping[str, str]] = ..., debug: Optional[int] = ..., errorlevel: Optional[int] = ..., compresslevel: Optional[int] = ...) -> None: ... def __enter__(self) -> TarFile: ... def __exit__(self, exc_type: Optional[Type[BaseException]], exc_val: Optional[Exception], exc_tb: Optional[TracebackType]) -> bool: ... def __iter__(self) -> Iterator[TarInfo]: ... @classmethod def open(cls, name: Optional[_Path] = ..., mode: str = ..., fileobj: Optional[IO[bytes]] = ..., bufsize: int = ..., *, format: Optional[int] = ..., tarinfo: Optional[TarInfo] = ..., dereference: Optional[bool] = ..., ignore_zeros: Optional[bool] = ..., encoding: Optional[str] = ..., errors: str = ..., pax_headers: Optional[Mapping[str, str]] = ..., debug: Optional[int] = ..., errorlevel: Optional[int] = ...) -> TarFile: ... def getmember(self, name: str) -> TarInfo: ... def getmembers(self) -> List[TarInfo]: ... def getnames(self) -> List[str]: ... if sys.version_info >= (3, 5): def list(self, verbose: bool = ..., *, members: Optional[List[TarInfo]] = ...) -> None: ... else: def list(self, verbose: bool = ...) -> None: ... def next(self) -> Optional[TarInfo]: ... if sys.version_info >= (3, 5): def extractall(self, path: _Path = ..., members: Optional[List[TarInfo]] = ..., *, numeric_owner: bool = ...) -> None: ... else: def extractall(self, path: _Path = ..., members: Optional[List[TarInfo]] = ...) -> None: ... if sys.version_info >= (3, 5): def extract(self, member: Union[str, TarInfo], path: _Path = ..., set_attrs: bool = ..., *, numeric_owner: bool = ...) -> None: ... elif sys.version_info >= (3,): def extract(self, member: Union[str, TarInfo], path: _Path = ..., set_attrs: bool = ...) -> None: ... else: def extract(self, member: Union[str, TarInfo], path: _Path = ...) -> None: ... def extractfile(self, member: Union[str, TarInfo]) -> Optional[IO[bytes]]: ... if sys.version_info >= (3,): def add(self, name: str, arcname: Optional[str] = ..., recursive: bool = ..., exclude: Optional[Callable[[str], bool]] = ..., *, filter: Optional[Callable[[TarInfo], Optional[TarInfo]]] = ...) -> None: ... else: def add(self, name: str, arcname: Optional[str] = ..., recursive: bool = ..., exclude: Optional[Callable[[str], bool]] = ..., filter: Optional[Callable[[TarInfo], Optional[TarInfo]]] = ...) -> None: ... def addfile(self, tarinfo: TarInfo, fileobj: Optional[IO[bytes]] = ...) -> None: ... def gettarinfo(self, name: Optional[str] = ..., arcname: Optional[str] = ..., fileobj: Optional[IO[bytes]] = ...) -> TarInfo: ... def close(self) -> None: ... def is_tarfile(name: str) -> bool: ... if sys.version_info < (3,): class TarFileCompat: def __init__(self, filename: str, mode: str = ..., compression: int = ...) -> None: ... class TarError(Exception): ... class ReadError(TarError): ... class CompressionError(TarError): ... class StreamError(TarError): ... class ExtractError(TarError): ... class HeaderError(TarError): ... class TarInfo: name = ... # type: str size = ... # type: int mtime = ... # type: int mode = ... # type: int type = ... # type: bytes linkname = ... # type: str uid = ... # type: int gid = ... # type: int uname = ... # type: str gname = ... # type: str pax_headers = ... # type: Mapping[str, str] def __init__(self, name: str = ...) -> None: ... if sys.version_info >= (3,): @classmethod def frombuf(cls, buf: bytes, encoding: str, errors: str) -> TarInfo: ... else: @classmethod def frombuf(cls, buf: bytes) -> TarInfo: ... @classmethod def fromtarfile(cls, tarfile: TarFile) -> TarInfo: ... def tobuf(self, format: Optional[int] = ..., encoding: Optional[str] = ..., errors: str = ...) -> bytes: ... def isfile(self) -> bool: ... def isreg(self) -> bool: ... def isdir(self) -> bool: ... def issym(self) -> bool: ... def islnk(self) -> bool: ... def ischr(self) -> bool: ... def isblk(self) -> bool: ... def isfifo(self) -> bool: ... def isdev(self) -> bool: ... mypy-0.560/typeshed/stdlib/2and3/telnetlib.pyi0000644€tŠÔÚ€2›s®0000000725713215007212025422 0ustar jukkaDROPBOX\Domain Users00000000000000# Stubs for telnetlib (Python 2 and 3) import socket import sys from typing import Any, Callable, Match, Optional, Pattern, Sequence, Tuple, Union DEBUGLEVEL = ... # type: int TELNET_PORT = ... # type: int IAC = ... # type: bytes DONT = ... # type: bytes DO = ... # type: bytes WONT = ... # type: bytes WILL = ... # type: bytes theNULL = ... # type: bytes SE = ... # type: bytes NOP = ... # type: bytes DM = ... # type: bytes BRK = ... # type: bytes IP = ... # type: bytes AO = ... # type: bytes AYT = ... # type: bytes EC = ... # type: bytes EL = ... # type: bytes GA = ... # type: bytes SB = ... # type: bytes BINARY = ... # type: bytes ECHO = ... # type: bytes RCP = ... # type: bytes SGA = ... # type: bytes NAMS = ... # type: bytes STATUS = ... # type: bytes TM = ... # type: bytes RCTE = ... # type: bytes NAOL = ... # type: bytes NAOP = ... # type: bytes NAOCRD = ... # type: bytes NAOHTS = ... # type: bytes NAOHTD = ... # type: bytes NAOFFD = ... # type: bytes NAOVTS = ... # type: bytes NAOVTD = ... # type: bytes NAOLFD = ... # type: bytes XASCII = ... # type: bytes LOGOUT = ... # type: bytes BM = ... # type: bytes DET = ... # type: bytes SUPDUP = ... # type: bytes SUPDUPOUTPUT = ... # type: bytes SNDLOC = ... # type: bytes TTYPE = ... # type: bytes EOR = ... # type: bytes TUID = ... # type: bytes OUTMRK = ... # type: bytes TTYLOC = ... # type: bytes VT3270REGIME = ... # type: bytes X3PAD = ... # type: bytes NAWS = ... # type: bytes TSPEED = ... # type: bytes LFLOW = ... # type: bytes LINEMODE = ... # type: bytes XDISPLOC = ... # type: bytes OLD_ENVIRON = ... # type: bytes AUTHENTICATION = ... # type: bytes ENCRYPT = ... # type: bytes NEW_ENVIRON = ... # type: bytes TN3270E = ... # type: bytes XAUTH = ... # type: bytes CHARSET = ... # type: bytes RSP = ... # type: bytes COM_PORT_OPTION = ... # type: bytes SUPPRESS_LOCAL_ECHO = ... # type: bytes TLS = ... # type: bytes KERMIT = ... # type: bytes SEND_URL = ... # type: bytes FORWARD_X = ... # type: bytes PRAGMA_LOGON = ... # type: bytes SSPI_LOGON = ... # type: bytes PRAGMA_HEARTBEAT = ... # type: bytes EXOPL = ... # type: bytes NOOPT = ... # type: bytes class Telnet: def __init__(self, host: Optional[str] = ..., port: int = ..., timeout: int = ...) -> None: ... def open(self, host: str, port: int = ..., timeout: int = ...) -> None: ... def msg(self, msg: str, *args: Any) -> None: ... def set_debuglevel(self, debuglevel: int) -> None: ... def close(self) -> None: ... def get_socket(self) -> socket.socket: ... def fileno(self) -> int: ... def write(self, buffer: bytes) -> None: ... def read_until(self, match: bytes, timeout: Optional[int] = ...) -> bytes: ... def read_all(self) -> bytes: ... def read_some(self) -> bytes: ... def read_very_eager(self) -> bytes: ... def read_eager(self) -> bytes: ... def read_lazy(self) -> bytes: ... def read_very_lazy(self) -> bytes: ... def read_sb_data(self) -> bytes: ... def set_option_negotiation_callback(self, callback: Optional[Callable[[socket.socket, bytes, bytes], Any]]) -> None: ... def process_rawq(self) -> None: ... def rawq_getchar(self) -> bytes: ... def fill_rawq(self) -> None: ... def sock_avail(self) -> bool: ... def interact(self) -> None: ... def mt_interact(self) -> None: ... def listener(self) -> None: ... def expect(self, list: Sequence[Union[Pattern[bytes], bytes]], timeout: Optional[int] = ...) -> Tuple[int, Optional[Match[bytes]], bytes]: ... if sys.version_info >= (3, 6): def __enter__(self) -> Telnet: ... def __exit__(self, type: Any, value: Any, traceback: Any) -> None: ... mypy-0.560/typeshed/stdlib/2and3/termios.pyi0000644€tŠÔÚ€2›s®0000001472013215007212025113 0ustar jukkaDROPBOX\Domain Users00000000000000# Stubs for termios from typing import IO, List, Union _FD = Union[int, IO[str]] _Attr = List[Union[int, List[bytes]]] # TODO constants not really documented B0 = ... # type: int B1000000 = ... # type: int B110 = ... # type: int B115200 = ... # type: int B1152000 = ... # type: int B1200 = ... # type: int B134 = ... # type: int B150 = ... # type: int B1500000 = ... # type: int B1800 = ... # type: int B19200 = ... # type: int B200 = ... # type: int B2000000 = ... # type: int B230400 = ... # type: int B2400 = ... # type: int B2500000 = ... # type: int B300 = ... # type: int B3000000 = ... # type: int B3500000 = ... # type: int B38400 = ... # type: int B4000000 = ... # type: int B460800 = ... # type: int B4800 = ... # type: int B50 = ... # type: int B500000 = ... # type: int B57600 = ... # type: int B576000 = ... # type: int B600 = ... # type: int B75 = ... # type: int B921600 = ... # type: int B9600 = ... # type: int BRKINT = ... # type: int BS0 = ... # type: int BS1 = ... # type: int BSDLY = ... # type: int CBAUD = ... # type: int CBAUDEX = ... # type: int CDSUSP = ... # type: int CEOF = ... # type: int CEOL = ... # type: int CEOT = ... # type: int CERASE = ... # type: int CFLUSH = ... # type: int CIBAUD = ... # type: int CINTR = ... # type: int CKILL = ... # type: int CLNEXT = ... # type: int CLOCAL = ... # type: int CQUIT = ... # type: int CR0 = ... # type: int CR1 = ... # type: int CR2 = ... # type: int CR3 = ... # type: int CRDLY = ... # type: int CREAD = ... # type: int CRPRNT = ... # type: int CRTSCTS = ... # type: int CS5 = ... # type: int CS6 = ... # type: int CS7 = ... # type: int CS8 = ... # type: int CSIZE = ... # type: int CSTART = ... # type: int CSTOP = ... # type: int CSTOPB = ... # type: int CSUSP = ... # type: int CWERASE = ... # type: int ECHO = ... # type: int ECHOCTL = ... # type: int ECHOE = ... # type: int ECHOK = ... # type: int ECHOKE = ... # type: int ECHONL = ... # type: int ECHOPRT = ... # type: int EXTA = ... # type: int EXTB = ... # type: int FF0 = ... # type: int FF1 = ... # type: int FFDLY = ... # type: int FIOASYNC = ... # type: int FIOCLEX = ... # type: int FIONBIO = ... # type: int FIONCLEX = ... # type: int FIONREAD = ... # type: int FLUSHO = ... # type: int HUPCL = ... # type: int ICANON = ... # type: int ICRNL = ... # type: int IEXTEN = ... # type: int IGNBRK = ... # type: int IGNCR = ... # type: int IGNPAR = ... # type: int IMAXBEL = ... # type: int INLCR = ... # type: int INPCK = ... # type: int IOCSIZE_MASK = ... # type: int IOCSIZE_SHIFT = ... # type: int ISIG = ... # type: int ISTRIP = ... # type: int IUCLC = ... # type: int IXANY = ... # type: int IXOFF = ... # type: int IXON = ... # type: int NCC = ... # type: int NCCS = ... # type: int NL0 = ... # type: int NL1 = ... # type: int NLDLY = ... # type: int NOFLSH = ... # type: int N_MOUSE = ... # type: int N_PPP = ... # type: int N_SLIP = ... # type: int N_STRIP = ... # type: int N_TTY = ... # type: int OCRNL = ... # type: int OFDEL = ... # type: int OFILL = ... # type: int OLCUC = ... # type: int ONLCR = ... # type: int ONLRET = ... # type: int ONOCR = ... # type: int OPOST = ... # type: int PARENB = ... # type: int PARMRK = ... # type: int PARODD = ... # type: int PENDIN = ... # type: int TAB0 = ... # type: int TAB1 = ... # type: int TAB2 = ... # type: int TAB3 = ... # type: int TABDLY = ... # type: int TCFLSH = ... # type: int TCGETA = ... # type: int TCGETS = ... # type: int TCIFLUSH = ... # type: int TCIOFF = ... # type: int TCIOFLUSH = ... # type: int TCION = ... # type: int TCOFLUSH = ... # type: int TCOOFF = ... # type: int TCOON = ... # type: int TCSADRAIN = ... # type: int TCSAFLUSH = ... # type: int TCSANOW = ... # type: int TCSBRK = ... # type: int TCSBRKP = ... # type: int TCSETA = ... # type: int TCSETAF = ... # type: int TCSETAW = ... # type: int TCSETS = ... # type: int TCSETSF = ... # type: int TCSETSW = ... # type: int TCXONC = ... # type: int TIOCCONS = ... # type: int TIOCEXCL = ... # type: int TIOCGETD = ... # type: int TIOCGICOUNT = ... # type: int TIOCGLCKTRMIOS = ... # type: int TIOCGPGRP = ... # type: int TIOCGSERIAL = ... # type: int TIOCGSOFTCAR = ... # type: int TIOCGWINSZ = ... # type: int TIOCINQ = ... # type: int TIOCLINUX = ... # type: int TIOCMBIC = ... # type: int TIOCMBIS = ... # type: int TIOCMGET = ... # type: int TIOCMIWAIT = ... # type: int TIOCMSET = ... # type: int TIOCM_CAR = ... # type: int TIOCM_CD = ... # type: int TIOCM_CTS = ... # type: int TIOCM_DSR = ... # type: int TIOCM_DTR = ... # type: int TIOCM_LE = ... # type: int TIOCM_RI = ... # type: int TIOCM_RNG = ... # type: int TIOCM_RTS = ... # type: int TIOCM_SR = ... # type: int TIOCM_ST = ... # type: int TIOCNOTTY = ... # type: int TIOCNXCL = ... # type: int TIOCOUTQ = ... # type: int TIOCPKT = ... # type: int TIOCPKT_DATA = ... # type: int TIOCPKT_DOSTOP = ... # type: int TIOCPKT_FLUSHREAD = ... # type: int TIOCPKT_FLUSHWRITE = ... # type: int TIOCPKT_NOSTOP = ... # type: int TIOCPKT_START = ... # type: int TIOCPKT_STOP = ... # type: int TIOCSCTTY = ... # type: int TIOCSERCONFIG = ... # type: int TIOCSERGETLSR = ... # type: int TIOCSERGETMULTI = ... # type: int TIOCSERGSTRUCT = ... # type: int TIOCSERGWILD = ... # type: int TIOCSERSETMULTI = ... # type: int TIOCSERSWILD = ... # type: int TIOCSER_TEMT = ... # type: int TIOCSETD = ... # type: int TIOCSLCKTRMIOS = ... # type: int TIOCSPGRP = ... # type: int TIOCSSERIAL = ... # type: int TIOCSSOFTCAR = ... # type: int TIOCSTI = ... # type: int TIOCSWINSZ = ... # type: int TOSTOP = ... # type: int VDISCARD = ... # type: int VEOF = ... # type: int VEOL = ... # type: int VEOL2 = ... # type: int VERASE = ... # type: int VINTR = ... # type: int VKILL = ... # type: int VLNEXT = ... # type: int VMIN = ... # type: int VQUIT = ... # type: int VREPRINT = ... # type: int VSTART = ... # type: int VSTOP = ... # type: int VSUSP = ... # type: int VSWTC = ... # type: int VSWTCH = ... # type: int VT0 = ... # type: int VT1 = ... # type: int VTDLY = ... # type: int VTIME = ... # type: int VWERASE = ... # type: int XCASE = ... # type: int XTABS = ... # type: int def tcgetattr(fd: _FD) -> _Attr: ... def tcsetattr(fd: _FD, when: int, attributes: _Attr) -> None: ... def tcsendbreak(fd: _FD, duration: int) -> None: ... def tcdrain(fd: _FD) -> None: ... def tcflush(fd: _FD, queue: int) -> None: ... def tcflow(fd: _FD, action: int) -> None: ... mypy-0.560/typeshed/stdlib/2and3/threading.pyi0000644€tŠÔÚ€2›s®0000001452113215007212025375 0ustar jukkaDROPBOX\Domain Users00000000000000# Stubs for threading from typing import ( Any, Callable, Iterable, List, Mapping, Optional, Tuple, Type, Union, TypeVar, ) from types import FrameType, TracebackType import sys # TODO recursive type _TF = Callable[[FrameType, str, Any], Optional[Callable[..., Any]]] _PF = Callable[[FrameType, str, Any], None] _T = TypeVar('_T') def active_count() -> int: ... if sys.version_info < (3,): def activeCount() -> int: ... def current_thread() -> Thread: ... if sys.version_info < (3,): def currentThread() -> Thread: ... if sys.version_info >= (3,): def get_ident() -> int: ... def enumerate() -> List[Thread]: ... if sys.version_info >= (3, 4): def main_thread() -> Thread: ... def settrace(func: _TF) -> None: ... def setprofile(func: _PF) -> None: ... def stack_size(size: int = ...) -> int: ... if sys.version_info >= (3,): TIMEOUT_MAX = ... # type: float class ThreadError(Exception): ... # TODO: Change to a class with __getattr__ and __setattr__ # once mypy supports universal __setattr__. # See https://github.com/python/mypy/issues/521 local = ... # type: Any class Thread: name = ... # type: str ident = ... # type: Optional[int] daemon = ... # type: bool if sys.version_info >= (3,): def __init__(self, group: None = ..., target: Optional[Callable[..., None]] = ..., name: Optional[str] = ..., args: Iterable = ..., kwargs: Mapping[str, Any] = ..., *, daemon: Optional[bool] = ...) -> None: ... else: def __init__(self, group: None = ..., target: Optional[Callable[..., None]] = ..., name: Optional[str] = ..., args: Iterable = ..., kwargs: Mapping[str, Any] = ...) -> None: ... def start(self) -> None: ... def run(self) -> None: ... def join(self, timeout: Optional[float] = ...) -> None: ... def getName(self) -> str: ... def setName(self, name: str) -> None: ... def is_alive(self) -> bool: ... if sys.version_info < (3,): def isAlive(self) -> bool: ... def isDaemon(self) -> bool: ... def setDaemon(self, daemonic: bool) -> None: ... class _DummyThread(Thread): pass class Lock: def __init__(self) -> None: ... def __enter__(self) -> bool: ... def __exit__(self, exc_type: Optional[Type[BaseException]], exc_val: Optional[Exception], exc_tb: Optional[TracebackType]) -> bool: ... if sys.version_info >= (3,): def acquire(self, blocking: bool = ..., timeout: float = ...) -> bool: ... else: def acquire(self, blocking: bool = ...) -> bool: ... def release(self) -> None: ... def locked(self) -> bool: ... class _RLock: def __init__(self) -> None: ... def __enter__(self) -> bool: ... def __exit__(self, exc_type: Optional[Type[BaseException]], exc_val: Optional[Exception], exc_tb: Optional[TracebackType]) -> bool: ... if sys.version_info >= (3,): def acquire(self, blocking: bool = ..., timeout: float = ...) -> bool: ... else: def acquire(self, blocking: bool = ...) -> bool: ... def release(self) -> None: ... RLock = _RLock class Condition: def __init__(self, lock: Union[Lock, _RLock, None] = ...) -> None: ... def __enter__(self) -> bool: ... def __exit__(self, exc_type: Optional[Type[BaseException]], exc_val: Optional[Exception], exc_tb: Optional[TracebackType]) -> bool: ... if sys.version_info >= (3,): def acquire(self, blocking: bool = ..., timeout: float = ...) -> bool: ... else: def acquire(self, blocking: bool = ...) -> bool: ... def release(self) -> None: ... def wait(self, timeout: Optional[float] = ...) -> bool: ... if sys.version_info >= (3,): def wait_for(self, predicate: Callable[[], _T], timeout: Optional[float] = ...) -> _T: ... def notify(self, n: int = ...) -> None: ... def notify_all(self) -> None: ... def notifyAll(self) -> None: ... class Semaphore: def __init__(self, value: int = ...) -> None: ... def __enter__(self) -> bool: ... def __exit__(self, exc_type: Optional[Type[BaseException]], exc_val: Optional[Exception], exc_tb: Optional[TracebackType]) -> bool: ... if sys.version_info >= (3,): def acquire(self, blocking: bool = ..., timeout: float = ...) -> bool: ... else: def acquire(self, blocking: bool = ...) -> bool: ... def release(self) -> None: ... class BoundedSemaphore: def __init__(self, value: int = ...) -> None: ... def __enter__(self) -> bool: ... def __exit__(self, exc_type: Optional[Type[BaseException]], exc_val: Optional[Exception], exc_tb: Optional[TracebackType]) -> bool: ... if sys.version_info >= (3,): def acquire(self, blocking: bool = ..., timeout: float = ...) -> bool: ... else: def acquire(self, blocking: bool = ...) -> bool: ... def release(self) -> None: ... class Event: def __init__(self) -> None: ... def is_set(self) -> bool: ... if sys.version_info < (3,): def isSet(self) -> bool: ... def set(self) -> None: ... def clear(self) -> None: ... def wait(self, timeout: Optional[float] = ...) -> bool: ... class Timer(Thread): if sys.version_info >= (3,): def __init__(self, interval: float, function: Callable[..., None], args: Optional[List[Any]] = ..., kwargs: Optional[Mapping[str, Any]] = ...) -> None: ... else: def __init__(self, interval: float, function: Callable[..., None], args: List[Any] = ..., kwargs: Mapping[str, Any] = ...) -> None: ... def cancel(self) -> None: ... if sys.version_info >= (3,): class Barrier: parties = ... # type: int n_waiting = ... # type: int broken = ... # type: bool def __init__(self, parties: int, action: Optional[Callable[[], None]] = ..., timeout: Optional[float] = ...) -> None: ... def wait(self, timeout: Optional[float] = ...) -> int: ... def reset(self) -> None: ... def abort(self) -> None: ... class BrokenBarrierError(RuntimeError): ... mypy-0.560/typeshed/stdlib/2and3/timeit.pyi0000644€tŠÔÚ€2›s®0000000305613215007212024724 0ustar jukkaDROPBOX\Domain Users00000000000000# Stubs for timeit (Python 2 and 3) import sys from typing import Any, Callable, Dict, IO, List, Optional, Text, Tuple, Union _str = Union[str, Text] _Timer = Callable[[], float] _stmt = Union[_str, Callable[[], Any]] default_timer = ... # type: _Timer class Timer: if sys.version_info >= (3, 5): def __init__(self, stmt: _stmt = ..., setup: _stmt = ..., timer: _Timer = ..., globals: Optional[Dict[str, Any]] =...) -> None: ... else: def __init__(self, stmt: _stmt = ..., setup: _stmt = ..., timer: _Timer = ...) -> None: ... def print_exc(self, file: Optional[IO[str]] = ...) -> None: ... def timeit(self, number: int = ...) -> float: ... def repeat(self, repeat: int = ..., number: int = ...) -> List[float]: ... if sys.version_info >= (3, 6): def autorange(self, callback: Optional[Callable[[int, float], Any]] = ...) -> Tuple[int, float]: ... if sys.version_info >= (3, 5): def timeit(stmt: _stmt = ..., setup: _stmt = ..., timer: _Timer = ..., number: int = ..., globals: Optional[Dict[str, Any]] =...) -> float: ... def repeat(stmt: _stmt = ..., setup: _stmt = ..., timer: _Timer = ..., repeat: int = ..., number: int = ..., globals: Optional[Dict[str, Any]] =...) -> List[float]: ... else: def timeit(stmt: _stmt = ..., setup: _stmt = ..., timer: _Timer = ..., number: int = ...) -> float: ... def repeat(stmt: _stmt = ..., setup: _stmt = ..., timer: _Timer = ..., repeat: int = ..., number: int = ...) -> List[float]: ... mypy-0.560/typeshed/stdlib/2and3/token.pyi0000644€tŠÔÚ€2›s®0000000362113215007212024547 0ustar jukkaDROPBOX\Domain Users00000000000000import sys from typing import Dict ENDMARKER = ... # type: int NAME = ... # type: int NUMBER = ... # type: int STRING = ... # type: int NEWLINE = ... # type: int INDENT = ... # type: int DEDENT = ... # type: int LPAR = ... # type: int RPAR = ... # type: int LSQB = ... # type: int RSQB = ... # type: int COLON = ... # type: int COMMA = ... # type: int SEMI = ... # type: int PLUS = ... # type: int MINUS = ... # type: int STAR = ... # type: int SLASH = ... # type: int VBAR = ... # type: int AMPER = ... # type: int LESS = ... # type: int GREATER = ... # type: int EQUAL = ... # type: int DOT = ... # type: int PERCENT = ... # type: int if sys.version_info < (3,): BACKQUOTE = ... # type: int LBRACE = ... # type: int RBRACE = ... # type: int EQEQUAL = ... # type: int NOTEQUAL = ... # type: int LESSEQUAL = ... # type: int GREATEREQUAL = ... # type: int TILDE = ... # type: int CIRCUMFLEX = ... # type: int LEFTSHIFT = ... # type: int RIGHTSHIFT = ... # type: int DOUBLESTAR = ... # type: int PLUSEQUAL = ... # type: int MINEQUAL = ... # type: int STAREQUAL = ... # type: int SLASHEQUAL = ... # type: int PERCENTEQUAL = ... # type: int AMPEREQUAL = ... # type: int VBAREQUAL = ... # type: int CIRCUMFLEXEQUAL = ... # type: int LEFTSHIFTEQUAL = ... # type: int RIGHTSHIFTEQUAL = ... # type: int DOUBLESTAREQUAL = ... # type: int DOUBLESLASH = ... # type: int DOUBLESLASHEQUAL = ... # type: int AT = ... # type: int if sys.version_info >= (3,): RARROW = ... # type: int ELLIPSIS = ... # type: int if sys.version_info >= (3, 5): ATEQUAL = ... # type: int AWAIT = ... # type: int ASYNC = ... # type: int OP = ... # type: int ERRORTOKEN = ... # type: int N_TOKENS = ... # type: int NT_OFFSET = ... # type: int tok_name = ... # type: Dict[int, str] def ISTERMINAL(x: int) -> bool: ... def ISNONTERMINAL(x: int) -> bool: ... def ISEOF(x: int) -> bool: ... mypy-0.560/typeshed/stdlib/2and3/trace.pyi0000644€tŠÔÚ€2›s®0000000400013215007212024515 0ustar jukkaDROPBOX\Domain Users00000000000000# Stubs for trace (Python 2 and 3) import os import sys import types from typing import Any, Callable, Mapping, Optional, Sequence, Text, Tuple, TypeVar, Union _T = TypeVar('_T') _localtrace = Callable[[types.FrameType, str, Any], Callable[..., Any]] if sys.version_info >= (3, 6): _Path = Union[Text, os.PathLike] else: _Path = Text class CoverageResults: def update(self, other: CoverageResults) -> None: ... def write_results(self, show_missing: bool = ..., summary: bool = ..., coverdir: Optional[_Path] = ...) -> None: ... def write_results_file(self, path: _Path, lines: Sequence[str], lnotab: Any, lines_hit: Mapping[int, int], encoding: Optional[str] = ...) -> Tuple[int, int]: ... class Trace: def __init__(self, count: int = ..., trace: int = ..., countfuncs: int = ..., countcallers: int = ..., ignoremods: Sequence[str] = ..., ignoredirs: Sequence[str] = ..., infile: Optional[_Path] = ..., outfile: Optional[_Path] = ..., timing: bool = ...) -> None: ... def run(self, cmd: Union[str, types.CodeType]) -> None: ... def runctx(self, cmd: Union[str, types.CodeType], globals: Optional[Mapping[str, Any]] = ..., locals: Optional[Mapping[str, Any]] = ...) -> None: ... def runfunc(self, func: Callable[..., _T], *args: Any, **kw: Any) -> _T: ... def file_module_function_of(self, frame: types.FrameType) -> Tuple[str, Optional[str], str]: ... def globaltrace_trackcallers(self, frame: types.FrameType, why: str, arg: Any) -> None: ... def globaltrace_countfuncs(self, frame: types.FrameType, why: str, arg: Any) -> None: ... def globaltrace_lt(self, frame: types.FrameType, why: str, arg: Any) -> None: ... def localtrace_trace_and_count(self, frame: types.FrameType, why: str, arg: Any) -> _localtrace: ... def localtrace_trace(self, frame: types.FrameType, why: str, arg: Any) -> _localtrace: ... def localtrace_count(self, frame: types.FrameType, why: str, arg: Any) -> _localtrace: ... def results(self) -> CoverageResults: ... mypy-0.560/typeshed/stdlib/2and3/traceback.pyi0000644€tŠÔÚ€2›s®0000001313213215007212025344 0ustar jukkaDROPBOX\Domain Users00000000000000# Stubs for traceback from typing import Any, Dict, Generator, IO, Iterator, List, Mapping, Optional, Tuple, Type, Iterable from types import FrameType, TracebackType import sys _PT = Tuple[str, int, str, Optional[str]] def print_tb(tb: Optional[TracebackType], limit: Optional[int] = ..., file: Optional[IO[str]] = ...) -> None: ... if sys.version_info >= (3,): def print_exception(etype: Type[BaseException], value: BaseException, tb: Optional[TracebackType], limit: Optional[int] = ..., file: Optional[IO[str]] = ..., chain: bool = ...) -> None: ... def print_exc(limit: Optional[int] = ..., file: Optional[IO[str]] = ..., chain: bool = ...) -> None: ... def print_last(limit: Optional[int] = ..., file: Optional[IO[str]] = ..., chain: bool = ...) -> None: ... else: def print_exception(etype: Optional[Type[BaseException]], value: Optional[BaseException], tb: Optional[TracebackType], limit: Optional[int] = ..., file: Optional[IO[str]] = ...) -> None: ... def print_exc(limit: Optional[int] = ..., file: Optional[IO[str]] = ...) -> None: ... def print_last(limit: Optional[int] = ..., file: Optional[IO[str]] = ...) -> None: ... def print_stack(f: Optional[FrameType] = ..., limit: Optional[int] = ..., file: Optional[IO[str]] = ...) -> None: ... if sys.version_info >= (3, 5): def extract_tb(tb: Optional[TracebackType], limit: Optional[int] = ...) -> StackSummary: ... def extract_stack(f: Optional[FrameType] = ..., limit: Optional[int] = ...) -> StackSummary: ... def format_list(extracted_list: List[FrameSummary]) -> List[str]: ... else: def extract_tb(tb: Optional[TracebackType], limit: Optional[int] = ...) -> List[_PT]: ... def extract_stack(f: Optional[FrameType] = ..., limit: Optional[int] = ...) -> List[_PT]: ... def format_list(extracted_list: List[_PT]) -> List[str]: ... def format_exception_only(etype: Type[BaseException], value: BaseException) -> List[str]: ... if sys.version_info >= (3,): def format_exception(etype: Type[BaseException], value: BaseException, tb: TracebackType, limit: Optional[int] = ..., chain: bool = ...) -> List[str]: ... def format_exc(limit: Optional[int] = ..., chain: bool = ...) -> str: ... else: def format_exception(etype: Optional[Type[BaseException]], value: Optional[BaseException], tb: Optional[TracebackType], limit: Optional[int] = ...) -> List[str]: ... def format_exc(limit: Optional[int] = ...) -> str: ... def format_tb(tb: Optional[TracebackType], limit: Optional[int] = ...) -> List[str]: ... def format_stack(f: Optional[FrameType] = ..., limit: Optional[int] = ...) -> List[str]: ... if sys.version_info >= (3, 4): def clear_frames(tb: TracebackType) -> None: ... if sys.version_info >= (3, 5): def walk_stack(f: Optional[FrameType]) -> Iterator[Tuple[FrameType, int]]: ... def walk_tb(tb: Optional[TracebackType]) -> Iterator[Tuple[FrameType, int]]: ... if sys.version_info < (3,): def tb_lineno(tb: TracebackType) -> int: ... if sys.version_info >= (3, 5): class TracebackException: __cause__ = ... # type:TracebackException __context__ = ... # type:TracebackException __suppress_context__ = ... # type: bool stack = ... # type: StackSummary exc_type = ... # type: Type[BaseException] filename = ... # type: str lineno = ... # type: int text = ... # type: str offset = ... # type: int msg = ... # type: str def __init__(self, exc_type: Type[BaseException], exc_value: BaseException, exc_traceback: TracebackType, *, limit: Optional[int] = ..., lookup_lines: bool = ..., capture_locals: bool = ...) -> None: ... @classmethod def from_exception(cls, exc: BaseException, *, limit: Optional[int] = ..., lookup_lines: bool = ..., capture_locals: bool = ...) -> TracebackException: ... def format(self, *, chain: bool = ...) -> Generator[str, None, None]: ... def format_exception_only(self) -> Generator[str, None, None]: ... if sys.version_info >= (3, 5): class FrameSummary(Iterable): filename: str lineno: int name: str line: str locals: Optional[Dict[str, str]] def __init__(self, filename: str, lineno: int, name: str, lookup_line: bool = ..., locals: Optional[Mapping[str, str]] = ..., line: Optional[int] = ...) -> None: ... # TODO: more precise typing for __getitem__ and __iter__, # for a namedtuple-like view on (filename, lineno, name, str). def __getitem__(self, i: int) -> Any: ... def __iter__(self) -> Iterator[Any]: ... class StackSummary(List[FrameSummary]): @classmethod def extract(cls, frame_gen: Generator[Tuple[FrameType, int], None, None], *, limit: Optional[int] = ..., lookup_lines: bool = ..., capture_locals: bool = ...) -> StackSummary: ... @classmethod def from_list(cls, a_list: List[_PT]) -> StackSummary: ... def format(self) -> List[str]: ... mypy-0.560/typeshed/stdlib/2and3/tty.pyi0000644€tŠÔÚ€2›s®0000000053213215007212024245 0ustar jukkaDROPBOX\Domain Users00000000000000# Stubs for tty (Python 3.6) # XXX: Undocumented integer constants IFLAG = ... # type: int OFLAG = ... # type: int CFLAG = ... # type: int LFLAG = ... # type: int ISPEED = ... # type: int OSPEED = ... # type: int CC = ... # type: int def setraw(fd: int, when: int = ...) -> None: ... def setcbreak(fd: int, when: int = ...) -> None: ... mypy-0.560/typeshed/stdlib/2and3/unicodedata.pyi0000644€tŠÔÚ€2›s®0000000353013215007212025706 0ustar jukkaDROPBOX\Domain Users00000000000000# Stubs for unicodedata (Python 2.7 and 3.4) from typing import Any, Text, TypeVar, Union ucd_3_2_0 = ... # type: UCD ucnhash_CAPI = ... # type: Any unidata_version = ... # type: str _default = TypeVar('_default') def bidirectional(__chr: Text) -> Text: ... def category(__chr: Text) -> Text: ... def combining(__chr: Text) -> int: ... def decimal(__chr: Text, __default: _default=...) -> Union[int, _default]: ... def decomposition(__chr: Text) -> Text: ... def digit(__chr: Text, __default: _default=...) -> Union[int, _default]: ... def east_asian_width(__chr: Text) -> Text: ... def lookup(__name: Union[Text, bytes]) -> Text: ... def mirrored(__chr: Text) -> int: ... def name(__chr: Text, __default: _default=...) -> Union[Text, _default]: ... def normalize(__form: Text, __unistr: Text) -> Text: ... def numeric(__chr: Text, __default: _default=...) -> Union[float, _default]: ... class UCD(object): # The methods below are constructed from the same array in C # (unicodedata_functions) and hence identical to the methods above. unidata_version = ... # type: str def bidirectional(self, __chr: Text) -> str: ... def category(self, __chr: Text) -> str: ... def combining(self, __chr: Text) -> int: ... def decimal(self, __chr: Text, __default: _default=...) -> Union[int, _default]: ... def decomposition(self, __chr: Text) -> str: ... def digit(self, __chr: Text, __default: _default=...) -> Union[int, _default]: ... def east_asian_width(self, __chr: Text) -> str: ... def lookup(self, __name: Union[Text, bytes]) -> Text: ... def mirrored(self, __chr: Text) -> int: ... def name(self, __chr: Text, __default: _default=...) -> Union[Text, _default]: ... def normalize(self, __form: Text, __unistr: Text) -> Text: ... def numeric(self, __chr: Text, __default: _default=...) -> Union[float, _default]: ... mypy-0.560/typeshed/stdlib/2and3/uu.pyi0000644€tŠÔÚ€2›s®0000000056613215007212024065 0ustar jukkaDROPBOX\Domain Users00000000000000# Stubs for uu (Python 2 and 3) from typing import BinaryIO, Union, Optional, Text _File = Union[Text, BinaryIO] class Error(Exception): ... def encode(in_file: _File, out_file: _File, name: Optional[str] = ..., mode: Optional[int] = ...) -> None: ... def decode(in_file: _File, out_file: Optional[_File] = ..., mode: Optional[int] = ..., quiet: int = ...) -> None: ... mypy-0.560/typeshed/stdlib/2and3/uuid.pyi0000644€tŠÔÚ€2›s®0000000550213215007212024375 0ustar jukkaDROPBOX\Domain Users00000000000000# Stubs for uuid import sys from typing import Tuple, Optional, Any # Because UUID has properties called int and bytes we need to rename these temporarily. _Int = int _Bytes = bytes _FieldsType = Tuple[int, int, int, int, int, int] class UUID: def __init__(self, hex: Optional[str] = ..., bytes: Optional[_Bytes] = ..., bytes_le: Optional[_Bytes] = ..., fields: Optional[_FieldsType] = ..., int: Optional[_Int] = ..., version: Optional[_Int] = ...) -> None: ... @property def bytes(self) -> _Bytes: ... @property def bytes_le(self) -> _Bytes: ... @property def clock_seq(self) -> _Int: ... @property def clock_seq_hi_variant(self) -> _Int: ... @property def clock_seq_low(self) -> _Int: ... @property def fields(self) -> _FieldsType: ... @property def hex(self) -> str: ... @property def int(self) -> _Int: ... @property def node(self) -> _Int: ... @property def time(self) -> _Int: ... @property def time_hi_version(self) -> _Int: ... @property def time_low(self) -> _Int: ... @property def time_mid(self) -> _Int: ... @property def urn(self) -> str: ... @property def variant(self) -> str: ... @property def version(self) -> Optional[_Int]: ... if sys.version_info >= (3,): def __eq__(self, other: Any) -> bool: ... def __lt__(self, other: Any) -> bool: ... def __le__(self, other: Any) -> bool: ... def __gt__(self, other: Any) -> bool: ... def __ge__(self, other: Any) -> bool: ... else: def get_bytes(self) -> _Bytes: ... def get_bytes_le(self) -> _Bytes: ... def get_clock_seq(self) -> _Int: ... def get_clock_seq_hi_variant(self) -> _Int: ... def get_clock_seq_low(self) -> _Int: ... def get_fields(self) -> _FieldsType: ... def get_hex(self) -> str: ... def get_node(self) -> _Int: ... def get_time(self) -> _Int: ... def get_time_hi_version(self) -> _Int: ... def get_time_low(self) -> _Int: ... def get_time_mid(self) -> _Int: ... def get_urn(self) -> str: ... def get_variant(self) -> str: ... def get_version(self) -> Optional[_Int]: ... def __cmp__(self, other: Any) -> _Int: ... def getnode() -> int: ... def uuid1(node: Optional[_Int] = ..., clock_seq: Optional[_Int] = ...) -> UUID: ... def uuid3(namespace: UUID, name: str) -> UUID: ... def uuid4() -> UUID: ... def uuid5(namespace: UUID, name: str) -> UUID: ... NAMESPACE_DNS = ... # type: UUID NAMESPACE_URL = ... # type: UUID NAMESPACE_OID = ... # type: UUID NAMESPACE_X500 = ... # type: UUID RESERVED_NCS = ... # type: str RFC_4122 = ... # type: str RESERVED_MICROSOFT = ... # type: str RESERVED_FUTURE = ... # type: str mypy-0.560/typeshed/stdlib/2and3/warnings.pyi0000644€tŠÔÚ€2›s®0000000347313215007212025264 0ustar jukkaDROPBOX\Domain Users00000000000000# Stubs for warnings from typing import Any, Dict, List, NamedTuple, Optional, TextIO, Tuple, Type, Union from types import ModuleType, TracebackType def warn(message: Union[str, Warning], category: Optional[Type[Warning]] = ..., stacklevel: int = ...) -> None: ... def warn_explicit(message: Union[str, Warning], category: Type[Warning], filename: str, lineno: int, module: Optional[str] = ..., registry: Optional[Dict[Union[str, Tuple[str, Type[Warning], int]], int]] = ..., module_globals: Optional[Dict[str, Any]] = ...) -> None: ... def showwarning(message: str, category: Type[Warning], filename: str, lineno: int, file: Optional[TextIO] = ..., line: Optional[str] = ...) -> None: ... def formatwarning(message: str, category: Type[Warning], filename: str, lineno: int, line: Optional[str] = ...) -> str: ... def filterwarnings(action: str, message: str = ..., category: Type[Warning] = ..., module: str = ..., lineno: int = ..., append: bool = ...) -> None: ... def simplefilter(action: str, category: Type[Warning] = ..., lineno: int = ..., append: bool = ...) -> None: ... def resetwarnings() -> None: ... _Record = NamedTuple('_Record', [('message', str), ('category', Type[Warning]), ('filename', str), ('lineno', int), ('file', Optional[TextIO]), ('line', Optional[str])] ) class catch_warnings: def __init__(self, *, record: bool = ..., module: Optional[ModuleType] = ...) -> None: ... def __enter__(self) -> Optional[List[_Record]]: ... def __exit__(self, exc_type: Optional[Type[BaseException]], exc_val: Optional[Exception], exc_tb: Optional[TracebackType]) -> bool: ... mypy-0.560/typeshed/stdlib/2and3/wave.pyi0000644€tŠÔÚ€2›s®0000000526013215007212024372 0ustar jukkaDROPBOX\Domain Users00000000000000# Stubs for wave (Python 2 and 3) import sys from mypy_extensions import NoReturn from typing import Any, NamedTuple, Optional, Text, BinaryIO, Union, Tuple _File = Union[Text, BinaryIO] class Error(Exception): ... WAVE_FORMAT_PCM = ... # type: int if sys.version_info < (3, 0): _wave_params = Tuple[int, int, int, int, str, str] else: _wave_params = NamedTuple('_wave_params', [ ('nchannels', int), ('sampwidth', int), ('framerate', int), ('nframes', int), ('comptype', str), ('compname', str), ]) class Wave_read: def __init__(self, f: _File) -> None: ... if sys.version_info >= (3, 0): def __enter__(self) -> Wave_read: ... def __exit__(self, *args: Any) -> None: ... def getfp(self) -> Optional[BinaryIO]: ... def rewind(self) -> None: ... def close(self) -> None: ... def tell(self) -> int: ... def getnchannels(self) -> int: ... def getnframes(self) -> int: ... def getsampwidth(self) -> int: ... def getframerate(self) -> int: ... def getcomptype(self) -> str: ... def getcompname(self) -> str: ... def getparams(self) -> _wave_params: ... def getmarkers(self) -> None: ... def getmark(self, id: Any) -> NoReturn: ... def setpos(self, pos: int) -> None: ... def readframes(self, nframes: int) -> bytes: ... class Wave_write: def __init__(self, f: _File) -> None: ... if sys.version_info >= (3, 0): def __enter__(self) -> Wave_write: ... def __exit__(self, *args: Any) -> None: ... def setnchannels(self, nchannels: int) -> None: ... def getnchannels(self) -> int: ... def setsampwidth(self, sampwidth: int) -> None: ... def getsampwidth(self) -> int: ... def setframerate(self, framerate: float) -> None: ... def getframerate(self) -> int: ... def setnframes(self, nframes: int) -> None: ... def getnframes(self) -> int: ... def setcomptype(self, comptype: str, compname: str) -> None: ... def getcomptype(self) -> str: ... def getcompname(self) -> str: ... def setparams(self, params: _wave_params) -> None: ... def getparams(self) -> _wave_params: ... def setmark(self, id: Any, pos: Any, name: Any) -> NoReturn: ... def getmark(self, id: Any) -> NoReturn: ... def getmarkers(self) -> None: ... def tell(self) -> int: ... # should be any bytes-like object after 3.4, but we don't have a type for that def writeframesraw(self, data: bytes) -> None: ... def writeframes(self, data: bytes) -> None: ... def close(self) -> None: ... # Returns a Wave_read if mode is rb and Wave_write if mode is wb def open(f: _File, mode: Optional[str] = ...) -> Any: ... openfp = open mypy-0.560/typeshed/stdlib/2and3/weakref.pyi0000644€tŠÔÚ€2›s®0000001030013215007212025043 0ustar jukkaDROPBOX\Domain Users00000000000000import sys import types from typing import ( TypeVar, Generic, Any, Callable, overload, Mapping, Iterator, Tuple, Iterable, Optional, Type, MutableMapping, Union, List, Dict ) from _weakref import ( getweakrefcount as getweakrefcount, getweakrefs as getweakrefs, ref as ref, proxy as proxy, CallableProxyType as CallableProxyType, ProxyType as ProxyType, ReferenceType as ReferenceType) from _weakrefset import WeakSet as WeakSet if sys.version_info < (3, 0): from exceptions import ReferenceError as ReferenceError _S = TypeVar('_S') _T = TypeVar('_T') _KT = TypeVar('_KT') _VT = TypeVar('_VT') ProxyTypes: Tuple[Type[Any], ...] if sys.version_info >= (3, 4): class WeakMethod(ref[types.MethodType]): def __new__(cls, meth: types.MethodType, callback: Optional[Callable[[types.MethodType], Any]] = ...) -> WeakMethod: ... def __call__(self) -> Optional[types.MethodType]: ... class WeakValueDictionary(MutableMapping[_KT, _VT]): @overload def __init__(self) -> None: ... @overload def __init__(self, __map: Union[Mapping[_KT, _VT], Iterable[Tuple[_KT, _VT]]], **kwargs: _VT) -> None: ... def __len__(self) -> int: ... def __getitem__(self, k: _KT) -> _VT: ... def __setitem__(self, k: _KT, v: _VT) -> None: ... def __delitem__(self, v: _KT) -> None: ... if sys.version_info < (3, 0): def has_key(self, key: object) -> bool: ... def __contains__(self, o: object) -> bool: ... def __iter__(self) -> Iterator[_KT]: ... def __str__(self) -> str: ... def copy(self) -> WeakValueDictionary[_KT, _VT]: ... if sys.version_info < (3, 0): def keys(self) -> List[_KT]: ... def values(self) -> List[_VT]: ... def items(self) -> List[Tuple[_KT, _VT]]: ... def iterkeys(self) -> Iterator[_KT]: ... def itervalues(self) -> Iterator[_VT]: ... def iteritems(self) -> Iterator[Tuple[_KT, _VT]]: ... else: # These are incompatible with Mapping def keys(self) -> Iterator[_KT]: ... # type: ignore def values(self) -> Iterator[_VT]: ... # type: ignore def items(self) -> Iterator[Tuple[_KT, _VT]]: ... # type: ignore def itervaluerefs(self) -> Iterator[KeyedRef[_KT, _VT]]: ... def valuerefs(self) -> List[KeyedRef[_KT, _VT]]: ... class KeyedRef(ref[_T], Generic[_KT, _T]): key: _KT def __init__(self, ob: _T, callback: Callable[[_T], Any], key: _KT) -> None: ... class WeakKeyDictionary(MutableMapping[_KT, _VT]): @overload def __init__(self) -> None: ... @overload def __init__(self, __map: Union[Mapping[_KT, _VT], Iterable[Tuple[_KT, _VT]]], **kwargs: _VT) -> None: ... def __len__(self) -> int: ... def __getitem__(self, k: _KT) -> _VT: ... def __setitem__(self, k: _KT, v: _VT) -> None: ... def __delitem__(self, v: _KT) -> None: ... if sys.version_info < (3, 0): def has_key(self, key: object) -> bool: ... def __contains__(self, o: object) -> bool: ... def __iter__(self) -> Iterator[_KT]: ... def __str__(self) -> str: ... def copy(self) -> WeakKeyDictionary[_KT, _VT]: ... if sys.version_info < (3, 0): def keys(self) -> List[_KT]: ... def values(self) -> List[_VT]: ... def items(self) -> List[Tuple[_KT, _VT]]: ... def iterkeys(self) -> Iterator[_KT]: ... def itervalues(self) -> Iterator[_VT]: ... def iteritems(self) -> Iterator[Tuple[_KT, _VT]]: ... def iterkeyrefs(self) -> Iterator[ref[_KT]]: ... else: # These are incompatible with Mapping def keys(self) -> Iterator[_KT]: ... # type: ignore def values(self) -> Iterator[_VT]: ... # type: ignore def items(self) -> Iterator[Tuple[_KT, _VT]]: ... # type: ignore def keyrefs(self) -> List[ref[_KT]]: ... if sys.version_info >= (3, 4): class finalize: def __init__(self, obj: _S, func: Callable[..., _T], *args: Any, **kwargs: Any) -> None: ... def __call__(self, _: Any = ...) -> Optional[_T]: ... def detach(self) -> Optional[Tuple[_S, _T, Tuple[Any, ...], Dict[str, Any]]]: ... def peek(self) -> Optional[Tuple[_S, _T, Tuple[Any, ...], Dict[str, Any]]]: ... alive: bool atexit: bool mypy-0.560/typeshed/stdlib/2and3/webbrowser.pyi0000644€tŠÔÚ€2›s®0000000710513215007212025611 0ustar jukkaDROPBOX\Domain Users00000000000000import sys from typing import Any, Optional, Callable, List, Text, Union, Sequence class Error(Exception): ... def register(name: Text, klass: Optional[Callable[[], BaseBrowser]], instance: BaseBrowser = ..., update_tryorder: int = ...) -> None: ... def get(using: Optional[Text] = ...) -> BaseBrowser: ... def open(url: Text, new: int = ..., autoraise: bool = ...) -> bool: ... def open_new(url: Text) -> bool: ... def open_new_tab(url: Text) -> bool: ... class BaseBrowser: args = ... # type: List[str] name = ... # type: str basename = ... # type: str def __init__(self, name: Text = ...) -> None: ... def open(self, url: Text, new: int = ..., autoraise: bool = ...) -> bool: ... def open_new(self, url: Text) -> bool: ... def open_new_tab(self, url: Text) -> bool: ... class GenericBrowser(BaseBrowser): args = ... # type: List[str] name = ... # type: str basename = ... # type: str def __init__(self, name: Union[Text, Sequence[Text]]) -> None: ... def open(self, url: Text, new: int = ..., autoraise: bool = ...) -> bool: ... class BackgroundBrowser(GenericBrowser): def open(self, url: Text, new: int = ..., autoraise: bool = ...) -> bool: ... class UnixBrowser(BaseBrowser): raise_opts = ... # type: List[str] background = ... # type: bool redirect_stdout = ... # type: bool remote_args = ... # type: List[str] remote_action = ... # type: str remote_action_newwin = ... # type: str remote_action_newtab = ... # type: str def open(self, url: Text, new: int = ..., autoraise: bool = ...) -> bool: ... class Mozilla(UnixBrowser): raise_opts = ... # type: List[str] remote_args = ... # type: List[str] remote_action = ... # type: str remote_action_newwin = ... # type: str remote_action_newtab = ... # type: str background = ... # type: bool class Galeon(UnixBrowser): raise_opts = ... # type: List[str] remote_args = ... # type: List[str] remote_action = ... # type: str remote_action_newwin = ... # type: str background = ... # type: bool if sys.version_info[:2] == (2, 7) or sys.version_info >= (3, 3): class Chrome(UnixBrowser): remote_args = ... # type: List[str] remote_action = ... # type: str remote_action_newwin = ... # type: str remote_action_newtab = ... # type: str background = ... # type: bool class Opera(UnixBrowser): raise_opts = ... # type: List[str] remote_args = ... # type: List[str] remote_action = ... # type: str remote_action_newwin = ... # type: str remote_action_newtab = ... # type: str background = ... # type: bool class Elinks(UnixBrowser): remote_args = ... # type: List[str] remote_action = ... # type: str remote_action_newwin = ... # type: str remote_action_newtab = ... # type: str background = ... # type: bool redirect_stdout = ... # type: bool class Konqueror(BaseBrowser): def open(self, url: Text, new: int = ..., autoraise: bool = ...) -> bool: ... class Grail(BaseBrowser): def open(self, url: Text, new: int = ..., autoraise: bool = ...) -> bool: ... class WindowsDefault(BaseBrowser): def open(self, url: Text, new: int = ..., autoraise: bool = ...) -> bool: ... class MacOSX(BaseBrowser): name = ... # type: str def __init__(self, name: Text) -> None: ... def open(self, url: Text, new: int = ..., autoraise: bool = ...) -> bool: ... class MacOSXOSAScript(BaseBrowser): def __init__(self, name: Text) -> None: ... def open(self, url: Text, new: int = ..., autoraise: bool = ...) -> bool: ... mypy-0.560/typeshed/stdlib/2and3/xdrlib.pyi0000644€tŠÔÚ€2›s®0000000447713215007212024725 0ustar jukkaDROPBOX\Domain Users00000000000000# Structs for xdrlib (Python 2 and 3) from typing import Callable, List, Sequence, TypeVar _T = TypeVar('_T') class Error(Exception): msg = ... # type: str def __init__(self, msg: str) -> None: ... class ConversionError(Error): ... class Packer: def __init__(self) -> None: ... def reset(self) -> None: ... def get_buffer(self) -> bytes: ... def get_buf(self) -> bytes: ... def pack_uint(self, x: int) -> None: ... def pack_int(self, x: int) -> None: ... def pack_enum(self, x: int) -> None: ... def pack_bool(self, x: bool) -> None: ... def pack_uhyper(self, x: int) -> None: ... def pack_hyper(self, x: int) -> None: ... def pack_float(self, x: float) -> None: ... def pack_double(self, x: float) -> None: ... def pack_fstring(self, n: int, s: bytes) -> None: ... def pack_fopaque(self, n: int, s: bytes) -> None: ... def pack_string(self, s: bytes) -> None: ... def pack_opaque(self, s: bytes) -> None: ... def pack_bytes(self, s: bytes) -> None: ... def pack_list(self, list: Sequence[_T], pack_item: Callable[[_T], None]) -> None: ... def pack_farray(self, n: int, list: Sequence[_T], pack_item: Callable[[_T], None]) -> None: ... def pack_array(self, list: Sequence[_T], pack_item: Callable[[_T], None]) -> None: ... class Unpacker: def __init__(self, data: bytes) -> None: ... def reset(self, data: bytes) -> None: ... def get_position(self) -> int: ... def set_position(self, position: int) -> None: ... def get_buffer(self) -> bytes: ... def done(self) -> None: ... def unpack_uint(self) -> int: ... def unpack_int(self) -> int: ... def unpack_enum(self) -> int: ... def unpack_bool(self) -> bool: ... def unpack_uhyper(self) -> int: ... def unpack_hyper(self) -> int: ... def unpack_float(self) -> float: ... def unpack_double(self) -> float: ... def unpack_fstring(self, n: int) -> bytes: ... def unpack_fopaque(self, n: int) -> bytes: ... def unpack_string(self) -> bytes: ... def unpack_opaque(self) -> bytes: ... def unpack_bytes(self) -> bytes: ... def unpack_list(self, unpack_item: Callable[[], _T]) -> List[_T]: ... def unpack_farray(self, n: int, unpack_item: Callable[[], _T]) -> List[_T]: ... def unpack_array(self, unpack_item: Callable[[], _T]) -> List[_T]: ... mypy-0.560/typeshed/stdlib/2and3/xml/0000755€tŠÔÚ€2›s®0000000000013215007244023507 5ustar jukkaDROPBOX\Domain Users00000000000000mypy-0.560/typeshed/stdlib/2and3/xml/__init__.pyi0000644€tŠÔÚ€2›s®0000000000013215007212025752 0ustar jukkaDROPBOX\Domain Users00000000000000mypy-0.560/typeshed/stdlib/2and3/xml/etree/0000755€tŠÔÚ€2›s®0000000000013215007244024613 5ustar jukkaDROPBOX\Domain Users00000000000000mypy-0.560/typeshed/stdlib/2and3/xml/etree/__init__.pyi0000644€tŠÔÚ€2›s®0000000000013215007212027056 0ustar jukkaDROPBOX\Domain Users00000000000000mypy-0.560/typeshed/stdlib/2and3/xml/etree/cElementTree.pyi0000644€tŠÔÚ€2›s®0000000014313215007212027703 0ustar jukkaDROPBOX\Domain Users00000000000000# Stubs for xml.etree.cElementTree (Python 3.4) from xml.etree.ElementTree import * # noqa: F403 mypy-0.560/typeshed/stdlib/2and3/xml/etree/ElementInclude.pyi0000644€tŠÔÚ€2›s®0000000126513215007212030232 0ustar jukkaDROPBOX\Domain Users00000000000000# Stubs for xml.etree.ElementInclude (Python 3.4) from typing import Union, Optional, Callable from xml.etree.ElementTree import Element XINCLUDE = ... # type: str XINCLUDE_INCLUDE = ... # type: str XINCLUDE_FALLBACK = ... # type: str class FatalIncludeError(SyntaxError): ... def default_loader(href: Union[str, bytes, int], parse: str, encoding: Optional[str]=...) -> Union[str, Element]: ... # TODO: loader is of type default_loader ie it takes a callable that has the # same signature as default_loader. But default_loader has a keyword argument # Which can't be represented using Callable... def include(elem: Element, loader: Callable[..., Union[str, Element]]=...) -> None: ... mypy-0.560/typeshed/stdlib/2and3/xml/etree/ElementPath.pyi0000644€tŠÔÚ€2›s®0000000306213215007212027540 0ustar jukkaDROPBOX\Domain Users00000000000000# Stubs for xml.etree.ElementPath (Python 3.4) from typing import Pattern, Dict, Generator, Tuple, List, Union, TypeVar, Callable, Optional from xml.etree.ElementTree import Element xpath_tokenizer_re = ... # type: Pattern _token = Tuple[str, str] _next = Callable[[], _token] _callback = Callable[['_SelectorContext', List[Element]], Generator[Element, None, None]] def xpath_tokenizer(pattern: str, namespaces: Dict[str, str]=...) -> Generator[_token, None, None]: ... def get_parent_map(context: '_SelectorContext') -> Dict[Element, Element]: ... def prepare_child(next: _next, token: _token) -> _callback: ... def prepare_star(next: _next, token: _token) -> _callback: ... def prepare_self(next: _next, token: _token) -> _callback: ... def prepare_descendant(next: _next, token: _token) -> _callback: ... def prepare_parent(next: _next, token: _token) -> _callback: ... def prepare_predicate(next: _next, token: _token) -> _callback: ... ops = ... # type: Dict[str, Callable[[_next, _token], _callback]] class _SelectorContext: parent_map = ... # type: Dict[Element, Element] root = ... # type: Element def __init__(self, root: Element) -> None: ... _T = TypeVar('_T') def iterfind(elem: Element, path: str, namespaces: Dict[str, str]=...) -> List[Element]: ... def find(elem: Element, path: str, namespaces: Dict[str, str]=...) -> Optional[Element]: ... def findall(elem: Element, path: str, namespaces: Dict[str, str]=...) -> List[Element]: ... def findtext(elem: Element, path: str, default: _T=..., namespaces: Dict[str, str]=...) -> Union[_T, str]: ... mypy-0.560/typeshed/stdlib/2and3/xml/etree/ElementTree.pyi0000644€tŠÔÚ€2›s®0000002172213215007212027546 0ustar jukkaDROPBOX\Domain Users00000000000000# Stubs for xml.etree.ElementTree from typing import Any, Union, IO, Callable, Dict, List, Tuple, Sequence, Iterator, TypeVar, Optional, KeysView, ItemsView, Generator, Text import io import sys VERSION = ... # type: str class ParseError(SyntaxError): ... def iselement(element: 'Element') -> bool: ... _T = TypeVar('_T') # Type for parser inputs. Parser will accept any unicode/str/bytes and coerce, # and this is true in py2 and py3 (even fromstringlist() in python3 can be # called with a heterogeneous list) _parser_input_type = Union[bytes, Text] # Type for individual tag/attr/ns/text values in args to most functions. # In py2, the library accepts str or unicode everywhere and coerces # aggressively. # In py3, bytes is not coerced to str and so use of bytes is probably an error, # so we exclude it. (why? the parser never produces bytes when it parses XML, # so e.g., element.get(b'name') will always return None for parsed XML, even if # there is a 'name' attribute.) _str_argument_type = Union[str, Text] # Type for return values from individual tag/attr/text values and serialization if sys.version_info >= (3,): # note: in python3, everything comes out as str, yay: _str_result_type = str # unfortunately, tostring and tostringlist can return either bytes or str # depending on the value of `encoding` parameter. Client code knows best: _tostring_result_type = Any else: # in python2, if the tag/attribute/text wasn't decode-able as ascii, it # comes out as a unicode string; otherwise it comes out as str. (see # _fixtext function in the source). Client code knows best: _str_result_type = Any # On the bright side, tostring and tostringlist always return bytes: _tostring_result_type = bytes class Element(Sequence['Element']): tag = ... # type: _str_result_type attrib = ... # type: Dict[_str_result_type, _str_result_type] text = ... # type: Optional[_str_result_type] tail = ... # type: Optional[_str_result_type] def __init__(self, tag: Union[_str_argument_type, Callable[..., 'Element']], attrib: Dict[_str_argument_type, _str_argument_type]=..., **extra: _str_argument_type) -> None: ... def append(self, subelement: 'Element') -> None: ... def clear(self) -> None: ... def copy(self) -> 'Element': ... def extend(self, elements: Sequence['Element']) -> None: ... def find(self, path: _str_argument_type, namespaces: Dict[_str_argument_type, _str_argument_type]=...) -> Optional['Element']: ... def findall(self, path: _str_argument_type, namespaces: Dict[_str_argument_type, _str_argument_type]=...) -> List['Element']: ... def findtext(self, path: _str_argument_type, default: _T=..., namespaces: Dict[_str_argument_type, _str_argument_type]=...) -> Union[_T, _str_result_type]: ... def get(self, key: _str_argument_type, default: _T=...) -> Union[_str_result_type, _T]: ... def getchildren(self) -> List['Element']: ... def getiterator(self, tag: _str_argument_type=...) -> List['Element']: ... if sys.version_info >= (3, 2): def insert(self, index: int, subelement: 'Element') -> None: ... else: def insert(self, index: int, element: 'Element') -> None: ... def items(self) -> ItemsView[_str_result_type, _str_result_type]: ... def iter(self, tag: _str_argument_type=...) -> Generator['Element', None, None]: ... def iterfind(self, path: _str_argument_type, namespaces: Dict[_str_argument_type, _str_argument_type]=...) -> List['Element']: ... def itertext(self) -> Generator[_str_result_type, None, None]: ... def keys(self) -> KeysView[_str_result_type]: ... def makeelement(self, tag: _str_argument_type, attrib: Dict[_str_argument_type, _str_argument_type]) -> 'Element': ... def remove(self, subelement: 'Element') -> None: ... def set(self, key: _str_argument_type, value: _str_argument_type) -> None: ... def __bool__(self) -> bool: ... def __delitem__(self, index: int) -> None: ... def __getitem__(self, index) -> 'Element': ... def __len__(self) -> int: ... def __setitem__(self, index: int, element: 'Element') -> None: ... def SubElement(parent: Element, tag: _str_argument_type, attrib: Dict[_str_argument_type, _str_argument_type]=..., **extra: _str_argument_type) -> Element: ... def Comment(text: _str_argument_type=...) -> Element: ... def ProcessingInstruction(target: _str_argument_type, text: _str_argument_type=...) -> Element: ... PI = ... # type: Callable[..., Element] class QName: text = ... # type: str def __init__(self, text_or_uri: _str_argument_type, tag: _str_argument_type=...) -> None: ... _file_or_filename = Union[str, bytes, int, IO[Any]] class ElementTree: def __init__(self, element: Element=..., file: _file_or_filename=...) -> None: ... def getroot(self) -> Element: ... def parse(self, source: _file_or_filename, parser: 'XMLParser'=...) -> Element: ... def iter(self, tag: _str_argument_type=...) -> Generator[Element, None, None]: ... def getiterator(self, tag: _str_argument_type=...) -> List[Element]: ... def find(self, path: _str_argument_type, namespaces: Dict[_str_argument_type, _str_argument_type]=...) -> Optional[Element]: ... def findtext(self, path: _str_argument_type, default: _T=..., namespaces: Dict[_str_argument_type, _str_argument_type]=...) -> Union[_T, _str_result_type]: ... def findall(self, path: _str_argument_type, namespaces: Dict[_str_argument_type, _str_argument_type]=...) -> List[Element]: ... def iterfind(self, path: _str_argument_type, namespaces: Dict[_str_argument_type, _str_argument_type]=...) -> List[Element]: ... if sys.version_info >= (3, 4): def write(self, file_or_filename: _file_or_filename, encoding: str=..., xml_declaration: Optional[bool]=..., default_namespace: _str_argument_type=..., method: str=..., *, short_empty_elements: bool=...) -> None: ... else: def write(self, file_or_filename: _file_or_filename, encoding: str=..., xml_declaration: Optional[bool]=..., default_namespace: _str_argument_type=..., method: str=...) -> None: ... def write_c14n(self, file: _file_or_filename) -> None: ... def register_namespace(prefix: _str_argument_type, uri: _str_argument_type) -> None: ... if sys.version_info >= (3, 4): def tostring(element: Element, encoding: str=..., method: str=..., *, short_empty_elements: bool=...) -> _tostring_result_type: ... def tostringlist(element: Element, encoding: str=..., method: str=..., *, short_empty_elements: bool=...) -> List[_tostring_result_type]: ... else: def tostring(element: Element, encoding: str=..., method: str=...) -> _tostring_result_type: ... def tostringlist(element: Element, encoding: str=..., method: str=...) -> List[_tostring_result_type]: ... def dump(elem: Element) -> None: ... def parse(source: _file_or_filename, parser: 'XMLParser'=...) -> ElementTree: ... def iterparse(source: _file_or_filename, events: Sequence[str]=..., parser: 'XMLParser'=...) -> Iterator[Tuple[str, Any]]: ... if sys.version_info >= (3, 4): class XMLPullParser: def __init__(self, events: Sequence[str]=..., *, _parser: 'XMLParser'=...) -> None: ... def feed(self, data: bytes) -> None: ... def close(self) -> None: ... def read_events(self) -> Iterator[Tuple[str, Element]]: ... def XML(text: _parser_input_type, parser: 'XMLParser'=...) -> Element: ... def XMLID(text: _parser_input_type, parser: 'XMLParser'=...) -> Tuple[Element, Dict[_str_result_type, Element]]: ... # This is aliased to XML in the source. fromstring = XML def fromstringlist(sequence: Sequence[_parser_input_type], parser: 'XMLParser'=...) -> Element: ... # This type is both not precise enough and too precise. The TreeBuilder # requires the elementfactory to accept tag and attrs in its args and produce # some kind of object that has .text and .tail properties. # I've chosen to constrain the ElementFactory to always produce an Element # because that is how almost everyone will use it. # Unfortunately, the type of the factory arguments is dependent on how # TreeBuilder is called by client code (they could pass strs, bytes or whatever); # but we don't want to use a too-broad type, or it would be too hard to write # elementfactories. _ElementFactory = Callable[[Any, Dict[Any, Any]], Element] class TreeBuilder: def __init__(self, element_factory: _ElementFactory=...) -> None: ... def close(self) -> Element: ... def data(self, data: _parser_input_type) -> None: ... def start(self, tag: _parser_input_type, attrs: Dict[_parser_input_type, _parser_input_type]) -> Element: ... def end(self, tag: _parser_input_type) -> Element: ... class XMLParser: parser = ... # type: Any target = ... # type: TreeBuilder # TODO-what is entity used for??? entity = ... # type: Any version = ... # type: str def __init__(self, html: int=..., target: TreeBuilder=..., encoding: str=...) -> None: ... def doctype(self, name: str, pubid: str, system: str) -> None: ... def close(self) -> Element: ... def feed(self, data: _parser_input_type) -> None: ... mypy-0.560/typeshed/stdlib/2and3/xml/sax/0000755€tŠÔÚ€2›s®0000000000013215007244024302 5ustar jukkaDROPBOX\Domain Users00000000000000mypy-0.560/typeshed/stdlib/2and3/xml/sax/__init__.pyi0000644€tŠÔÚ€2›s®0000000262613215007212026565 0ustar jukkaDROPBOX\Domain Users00000000000000from typing import Any, List, Optional, Text, Union, IO from mypy_extensions import NoReturn import xml.sax from xml.sax.xmlreader import InputSource, Locator from xml.sax.handler import ContentHandler, ErrorHandler class SAXException(Exception): def __init__(self, msg: str, exception: Optional[Exception] = ...) -> None: ... def getMessage(self) -> str: ... def getException(self) -> Exception: ... def __getitem__(self, ix: Any) -> NoReturn: ... class SAXParseException(SAXException): def __init__(self, msg: str, exception: Exception, locator: Locator) -> None: ... def getColumnNumber(self) -> int: ... def getLineNumber(self) -> int: ... def getPublicId(self): ... def getSystemId(self): ... class SAXNotRecognizedException(SAXException): ... class SAXNotSupportedException(SAXException): ... class SAXReaderNotAvailable(SAXNotSupportedException): ... default_parser_list = ... # type: List[str] def make_parser(parser_list: List[str] = ...) -> xml.sax.xmlreader.XMLReader: ... def parse(source: Union[str, IO[str]], handler: xml.sax.handler.ContentHandler, errorHandler: xml.sax.handler.ErrorHandler = ...) -> None: ... def parseString(string: Union[bytes, Text], handler: xml.sax.handler.ContentHandler, errorHandler: Optional[xml.sax.handler.ErrorHandler] = ...) -> None: ... def _create_parser(parser_name: str) -> xml.sax.xmlreader.XMLReader: ... mypy-0.560/typeshed/stdlib/2and3/xml/sax/handler.pyi0000644€tŠÔÚ€2›s®0000000310113215007212026430 0ustar jukkaDROPBOX\Domain Users00000000000000from typing import Any version = ... # type: Any class ErrorHandler: def error(self, exception): ... def fatalError(self, exception): ... def warning(self, exception): ... class ContentHandler: def __init__(self) -> None: ... def setDocumentLocator(self, locator): ... def startDocument(self): ... def endDocument(self): ... def startPrefixMapping(self, prefix, uri): ... def endPrefixMapping(self, prefix): ... def startElement(self, name, attrs): ... def endElement(self, name): ... def startElementNS(self, name, qname, attrs): ... def endElementNS(self, name, qname): ... def characters(self, content): ... def ignorableWhitespace(self, whitespace): ... def processingInstruction(self, target, data): ... def skippedEntity(self, name): ... class DTDHandler: def notationDecl(self, name, publicId, systemId): ... def unparsedEntityDecl(self, name, publicId, systemId, ndata): ... class EntityResolver: def resolveEntity(self, publicId, systemId): ... feature_namespaces = ... # type: Any feature_namespace_prefixes = ... # type: Any feature_string_interning = ... # type: Any feature_validation = ... # type: Any feature_external_ges = ... # type: Any feature_external_pes = ... # type: Any all_features = ... # type: Any property_lexical_handler = ... # type: Any property_declaration_handler = ... # type: Any property_dom_node = ... # type: Any property_xml_string = ... # type: Any property_encoding = ... # type: Any property_interning_dict = ... # type: Any all_properties = ... # type: Any mypy-0.560/typeshed/stdlib/2and3/xml/sax/saxutils.pyi0000644€tŠÔÚ€2›s®0000000452013215007212026675 0ustar jukkaDROPBOX\Domain Users00000000000000import sys from typing import AnyStr, Mapping from xml.sax import handler from xml.sax import xmlreader def escape(data: AnyStr, entities: Mapping[str, str] = ...) -> AnyStr: ... def unescape(data: AnyStr, entities: Mapping[str, str] = ...) -> AnyStr: ... def quoteattr(data: AnyStr, entities: Mapping[str, str] = ...) -> AnyStr: ... class XMLGenerator(handler.ContentHandler): if sys.version_info >= (3, 0): def __init__(self, out=..., encoding=..., short_empty_elements: bool=...) -> None: ... else: def __init__(self, out=..., encoding=...) -> None: ... def startDocument(self): ... def endDocument(self): ... def startPrefixMapping(self, prefix, uri): ... def endPrefixMapping(self, prefix): ... def startElement(self, name, attrs): ... def endElement(self, name): ... def startElementNS(self, name, qname, attrs): ... def endElementNS(self, name, qname): ... def characters(self, content): ... def ignorableWhitespace(self, content): ... def processingInstruction(self, target, data): ... class XMLFilterBase(xmlreader.XMLReader): def __init__(self, parent=...) -> None: ... def error(self, exception): ... def fatalError(self, exception): ... def warning(self, exception): ... def setDocumentLocator(self, locator): ... def startDocument(self): ... def endDocument(self): ... def startPrefixMapping(self, prefix, uri): ... def endPrefixMapping(self, prefix): ... def startElement(self, name, attrs): ... def endElement(self, name): ... def startElementNS(self, name, qname, attrs): ... def endElementNS(self, name, qname): ... def characters(self, content): ... def ignorableWhitespace(self, chars): ... def processingInstruction(self, target, data): ... def skippedEntity(self, name): ... def notationDecl(self, name, publicId, systemId): ... def unparsedEntityDecl(self, name, publicId, systemId, ndata): ... def resolveEntity(self, publicId, systemId): ... def parse(self, source): ... def setLocale(self, locale): ... def getFeature(self, name): ... def setFeature(self, name, state): ... def getProperty(self, name): ... def setProperty(self, name, value): ... def getParent(self): ... def setParent(self, parent): ... def prepare_input_source(source, base=...): ... mypy-0.560/typeshed/stdlib/2and3/xml/sax/xmlreader.pyi0000644€tŠÔÚ€2›s®0000000442713215007212027012 0ustar jukkaDROPBOX\Domain Users00000000000000class XMLReader: def __init__(self) -> None: ... def parse(self, source): ... def getContentHandler(self): ... def setContentHandler(self, handler): ... def getDTDHandler(self): ... def setDTDHandler(self, handler): ... def getEntityResolver(self): ... def setEntityResolver(self, resolver): ... def getErrorHandler(self): ... def setErrorHandler(self, handler): ... def setLocale(self, locale): ... def getFeature(self, name): ... def setFeature(self, name, state): ... def getProperty(self, name): ... def setProperty(self, name, value): ... class IncrementalParser(XMLReader): def __init__(self, bufsize=...) -> None: ... def parse(self, source): ... def feed(self, data): ... def prepareParser(self, source): ... def close(self): ... def reset(self): ... class Locator: def getColumnNumber(self): ... def getLineNumber(self): ... def getPublicId(self): ... def getSystemId(self): ... class InputSource: def __init__(self, system_id=...) -> None: ... def setPublicId(self, public_id): ... def getPublicId(self): ... def setSystemId(self, system_id): ... def getSystemId(self): ... def setEncoding(self, encoding): ... def getEncoding(self): ... def setByteStream(self, bytefile): ... def getByteStream(self): ... def setCharacterStream(self, charfile): ... def getCharacterStream(self): ... class AttributesImpl: def __init__(self, attrs) -> None: ... def getLength(self): ... def getType(self, name): ... def getValue(self, name): ... def getValueByQName(self, name): ... def getNameByQName(self, name): ... def getQNameByName(self, name): ... def getNames(self): ... def getQNames(self): ... def __len__(self): ... def __getitem__(self, name): ... def keys(self): ... def has_key(self, name): ... def __contains__(self, name): ... def get(self, name, alternative=...): ... def copy(self): ... def items(self): ... def values(self): ... class AttributesNSImpl(AttributesImpl): def __init__(self, attrs, qnames) -> None: ... def getValueByQName(self, name): ... def getNameByQName(self, name): ... def getQNameByName(self, name): ... def getQNames(self): ... def copy(self): ... mypy-0.560/typeshed/stdlib/2and3/zipfile.pyi0000644€tŠÔÚ€2›s®0000000673513215007212025102 0ustar jukkaDROPBOX\Domain Users00000000000000# Stubs for zipfile from typing import Callable, IO, List, Optional, Tuple, Type, Union from types import TracebackType import sys _SZI = Union[str, ZipInfo] _DT = Tuple[int, int, int, int, int, int] if sys.version_info >= (3,): class BadZipFile(Exception): ... BadZipfile = BadZipFile else: class BadZipfile(Exception): ... error = BadZipfile class LargeZipFile(Exception): ... class ZipFile: debug = ... # type: int comment = ... # type: bytes def __init__(self, file: Union[str, IO[bytes]], mode: str = ..., compression: int = ..., allowZip64: bool = ...) -> None: ... def __enter__(self) -> ZipFile: ... def __exit__(self, exc_type: Optional[Type[BaseException]], exc_val: Optional[Exception], exc_tb: Optional[TracebackType]) -> bool: ... def close(self) -> None: ... def getinfo(self, name: str) -> ZipInfo: ... def infolist(self) -> List[ZipInfo]: ... def namelist(self) -> List[str]: ... def open(self, name: _SZI, mode: str = ..., pwd: Optional[bytes] = ...) -> IO[bytes]: ... def extract(self, member: _SZI, path: Optional[_SZI] = ..., pwd: bytes = ...) -> str: ... def extractall(self, path: Optional[str] = ..., members: Optional[List[str]] = ..., pwd: Optional[bytes] = ...) -> None: ... def printdir(self) -> None: ... def setpassword(self, pwd: bytes) -> None: ... def read(self, name: _SZI, pwd: Optional[bytes] = ...) -> bytes: ... def testzip(self) -> Optional[str]: ... def write(self, filename: str, arcname: Optional[str] = ..., compress_type: Optional[int] = ...) -> None: ... if sys.version_info >= (3,): def writestr(self, zinfo_or_arcname: _SZI, data: Union[bytes, str], compress_type: Optional[int] = ...) -> None: ... else: def writestr(self, zinfo_or_arcname: _SZI, bytes: bytes, compress_type: Optional[int] = ...) -> None: ... class PyZipFile(ZipFile): if sys.version_info >= (3,): def __init__(self, file: Union[str, IO[bytes]], mode: str = ..., compression: int = ..., allowZip64: bool = ..., opimize: int = ...) -> None: ... def writepy(self, pathname: str, basename: str = ..., filterfunc: Optional[Callable[[str], bool]] = ...) -> None: ... else: def writepy(self, pathname: str, basename: str = ...) -> None: ... class ZipInfo: filename = ... # type: str date_time = ... # type: _DT compress_type = ... # type: int comment = ... # type: bytes extra = ... # type: bytes create_system = ... # type: int create_version = ... # type: int extract_version = ... # type: int reserved = ... # type: int flag_bits = ... # type: int volume = ... # type: int internal_attr = ... # type: int external_attr = ... # type: int header_offset = ... # type: int CRC = ... # type: int compress_size = ... # type: int file_size = ... # type: int if sys.version_info < (3,): def __init__(self, filename: Optional[str] = ..., date_time: Optional[_DT] = ...) -> None: ... def is_zipfile(filename: Union[str, IO[bytes]]) -> bool: ... ZIP_STORED = ... # type: int ZIP_DEFLATED = ... # type: int if sys.version_info >= (3, 3): ZIP_BZIP2 = ... # type: int ZIP_LZMA = ... # type: int mypy-0.560/typeshed/stdlib/2and3/zipimport.pyi0000644€tŠÔÚ€2›s®0000000131513215007212025462 0ustar jukkaDROPBOX\Domain Users00000000000000"""Stub file for the 'zipimport' module.""" from typing import Optional from types import CodeType, ModuleType class ZipImportError(ImportError): ... class zipimporter(object): archive = ... # type: str prefix = ... # type: str def __init__(self, archivepath: str) -> None: ... def find_module(self, fullname: str, path: str = ...) -> Optional[zipimporter]: ... def get_code(self, fullname: str) -> CodeType: ... def get_data(self, pathname: str) -> str: ... def get_filename(self, fullname: str) -> str: ... def get_source(self, fullname: str) -> Optional[str]: ... def is_package(self, fullname: str) -> bool: ... def load_module(self, fullname: str) -> ModuleType: ... mypy-0.560/typeshed/stdlib/2and3/zlib.pyi0000644€tŠÔÚ€2›s®0000000360013215007212024364 0ustar jukkaDROPBOX\Domain Users00000000000000# Stubs for zlib import sys DEFLATED = ... # type: int DEF_MEM_LEVEL = ... # type: int MAX_WBITS = ... # type: int ZLIB_VERSION = ... # type: str Z_BEST_COMPRESSION = ... # type: int Z_BEST_SPEED = ... # type: int Z_DEFAULT_COMPRESSION = ... # type: int Z_DEFAULT_STRATEGY = ... # type: int Z_FILTERED = ... # type: int Z_FINISH = ... # type: int Z_FULL_FLUSH = ... # type: int Z_HUFFMAN_ONLY = ... # type: int Z_NO_FLUSH = ... # type: int Z_SYNC_FLUSH = ... # type: int if sys.version_info >= (3,): DEF_BUF_SIZE = ... # type: int ZLIB_RUNTIME_VERSION = ... # type: str class error(Exception): ... class _Compress: def compress(self, data: bytes) -> bytes: ... def flush(self, mode: int = ...) -> bytes: ... def copy(self) -> _Compress: ... class _Decompress: unused_data = ... # type: bytes unconsumed_tail = ... # type: bytes if sys.version_info >= (3,): eof = ... # type: bool def decompress(self, data: bytes, max_length: int = ...) -> bytes: ... def flush(self, length: int = ...) -> bytes: ... def copy(self) -> _Decompress: ... def adler32(data: bytes, value: int = ...) -> int: ... def compress(data: bytes, level: int = ...) -> bytes: ... if sys.version_info >= (3,): def compressobj(level: int = ..., method: int = ..., wbits: int = ..., memLevel: int = ..., strategy: int = ..., zdict: bytes = ...) -> _Compress: ... else: def compressobj(level: int = ..., method: int = ..., wbits: int = ..., memlevel: int = ..., strategy: int = ...) -> _Compress: ... def crc32(data: bytes, value: int = ...) -> int: ... def decompress(data: bytes, wbits: int = ..., bufsize: int = ...) -> bytes: ... if sys.version_info >= (3,): def decompressobj(wbits: int = ..., zdict: bytes = ...) -> _Decompress: ... else: def decompressobj(wbits: int = ...) -> _Decompress: ... mypy-0.560/typeshed/stdlib/3/0000755€tŠÔÚ€2›s®0000000000013215007244022142 5ustar jukkaDROPBOX\Domain Users00000000000000mypy-0.560/typeshed/stdlib/3/_ast.pyi0000644€tŠÔÚ€2›s®0000002033413215007212023610 0ustar jukkaDROPBOX\Domain Users00000000000000# Python 3.5 _ast import typing from typing import Any, Optional, Union PyCF_ONLY_AST = ... # type: int _identifier = str class AST: _attributes = ... # type: typing.Tuple[str, ...] _fields = ... # type: typing.Tuple[str, ...] def __init__(self, *args: Any, **kwargs: Any) -> None: ... class mod(AST): ... class Module(mod): body = ... # type: typing.List[stmt] class Interactive(mod): body = ... # type: typing.List[stmt] class Expression(mod): body = ... # type: expr class Suite(mod): body = ... # type: typing.List[stmt] class stmt(AST): lineno = ... # type: int col_offset = ... # type: int class FunctionDef(stmt): name = ... # type: _identifier args = ... # type: arguments body = ... # type: typing.List[stmt] decorator_list = ... # type: typing.List[expr] returns = ... # type: Optional[expr] class AsyncFunctionDef(stmt): name = ... # type: _identifier args = ... # type: arguments body = ... # type: typing.List[stmt] decorator_list = ... # type: typing.List[expr] returns = ... # type: Optional[expr] class ClassDef(stmt): name = ... # type: _identifier bases = ... # type: typing.List[expr] keywords = ... # type: typing.List[keyword] body = ... # type: typing.List[stmt] decorator_list = ... # type: typing.List[expr] class Return(stmt): value = ... # type: Optional[expr] class Delete(stmt): targets = ... # type: typing.List[expr] class Assign(stmt): targets = ... # type: typing.List[expr] value = ... # type: expr class AugAssign(stmt): target = ... # type: expr op = ... # type: operator value = ... # type: expr class For(stmt): target = ... # type: expr iter = ... # type: expr body = ... # type: typing.List[stmt] orelse = ... # type: typing.List[stmt] class AsyncFor(stmt): target = ... # type: expr iter = ... # type: expr body = ... # type: typing.List[stmt] orelse = ... # type: typing.List[stmt] class While(stmt): test = ... # type: expr body = ... # type: typing.List[stmt] orelse = ... # type: typing.List[stmt] class If(stmt): test = ... # type: expr body = ... # type: typing.List[stmt] orelse = ... # type: typing.List[stmt] class With(stmt): items = ... # type: typing.List[withitem] body = ... # type: typing.List[stmt] class AsyncWith(stmt): items = ... # type: typing.List[withitem] body = ... # type: typing.List[stmt] class Raise(stmt): exc = ... # type: Optional[expr] cause = ... # type: Optional[expr] class Try(stmt): body = ... # type: typing.List[stmt] handlers = ... # type: typing.List[ExceptHandler] orelse = ... # type: typing.List[stmt] finalbody = ... # type: typing.List[stmt] class Assert(stmt): test = ... # type: expr msg = ... # type: Optional[expr] class Import(stmt): names = ... # type: typing.List[alias] class ImportFrom(stmt): module = ... # type: Optional[_identifier] names = ... # type: typing.List[alias] level = ... # type: Optional[int] class Global(stmt): names = ... # type: typing.List[_identifier] class Nonlocal(stmt): names = ... # type: typing.List[_identifier] class Expr(stmt): value = ... # type: expr class Pass(stmt): ... class Break(stmt): ... class Continue(stmt): ... class slice(AST): ... _slice = slice # this lets us type the variable named 'slice' below class Slice(slice): lower = ... # type: Optional[expr] upper = ... # type: Optional[expr] step = ... # type: Optional[expr] class ExtSlice(slice): dims = ... # type: typing.List[slice] class Index(slice): value = ... # type: expr class expr(AST): lineno = ... # type: int col_offset = ... # type: int class BoolOp(expr): op = ... # type: boolop values = ... # type: typing.List[expr] class BinOp(expr): left = ... # type: expr op = ... # type: operator right = ... # type: expr class UnaryOp(expr): op = ... # type: unaryop operand = ... # type: expr class Lambda(expr): args = ... # type: arguments body = ... # type: expr class IfExp(expr): test = ... # type: expr body = ... # type: expr orelse = ... # type: expr class Dict(expr): keys = ... # type: typing.List[expr] values = ... # type: typing.List[expr] class Set(expr): elts = ... # type: typing.List[expr] class ListComp(expr): elt = ... # type: expr generators = ... # type: typing.List[comprehension] class SetComp(expr): elt = ... # type: expr generators = ... # type: typing.List[comprehension] class DictComp(expr): key = ... # type: expr value = ... # type: expr generators = ... # type: typing.List[comprehension] class GeneratorExp(expr): elt = ... # type: expr generators = ... # type: typing.List[comprehension] class Await(expr): value = ... # type: expr class Yield(expr): value = ... # type: Optional[expr] class YieldFrom(expr): value = ... # type: expr class Compare(expr): left = ... # type: expr ops = ... # type: typing.List[cmpop] comparators = ... # type: typing.List[expr] class Call(expr): func = ... # type: expr args = ... # type: typing.List[expr] keywords = ... # type: typing.List[keyword] class Num(expr): n = ... # type: Union[int, float] class Str(expr): s = ... # type: str class Bytes(expr): s = ... # type: bytes class NameConstant(expr): value = ... # type: Any class Ellipsis(expr): ... class Attribute(expr): value = ... # type: expr attr = ... # type: _identifier ctx = ... # type: expr_context class Subscript(expr): value = ... # type: expr slice = ... # type: _slice ctx = ... # type: expr_context class Starred(expr): value = ... # type: expr ctx = ... # type: expr_context class Name(expr): id = ... # type: _identifier ctx = ... # type: expr_context class List(expr): elts = ... # type: typing.List[expr] ctx = ... # type: expr_context class Tuple(expr): elts = ... # type: typing.List[expr] ctx = ... # type: expr_context class expr_context(AST): ... class AugLoad(expr_context): ... class AugStore(expr_context): ... class Del(expr_context): ... class Load(expr_context): ... class Param(expr_context): ... class Store(expr_context): ... class boolop(AST): ... class And(boolop): ... class Or(boolop): ... class operator(AST): ... class Add(operator): ... class BitAnd(operator): ... class BitOr(operator): ... class BitXor(operator): ... class Div(operator): ... class FloorDiv(operator): ... class LShift(operator): ... class Mod(operator): ... class Mult(operator): ... class MatMult(operator): ... class Pow(operator): ... class RShift(operator): ... class Sub(operator): ... class unaryop(AST): ... class Invert(unaryop): ... class Not(unaryop): ... class UAdd(unaryop): ... class USub(unaryop): ... class cmpop(AST): ... class Eq(cmpop): ... class Gt(cmpop): ... class GtE(cmpop): ... class In(cmpop): ... class Is(cmpop): ... class IsNot(cmpop): ... class Lt(cmpop): ... class LtE(cmpop): ... class NotEq(cmpop): ... class NotIn(cmpop): ... class comprehension(AST): target = ... # type: expr iter = ... # type: expr ifs = ... # type: typing.List[expr] class ExceptHandler(AST): type = ... # type: Optional[expr] name = ... # type: Optional[_identifier] body = ... # type: typing.List[stmt] lineno = ... # type: int col_offset = ... # type: int class arguments(AST): args = ... # type: typing.List[arg] vararg = ... # type: Optional[arg] kwonlyargs = ... # type: typing.List[arg] kw_defaults = ... # type: typing.List[expr] kwarg = ... # type: Optional[arg] defaults = ... # type: typing.List[expr] class arg(AST): arg = ... # type: _identifier annotation = ... # type: Optional[expr] lineno = ... # type: int col_offset = ... # type: int class keyword(AST): arg = ... # type: Optional[_identifier] value = ... # type: expr class alias(AST): name = ... # type: _identifier asname = ... # type: Optional[_identifier] class withitem(AST): context_expr = ... # type: expr optional_vars = ... # type: Optional[expr] mypy-0.560/typeshed/stdlib/3/_compression.pyi0000644€tŠÔÚ€2›s®0000000067213215007212025365 0ustar jukkaDROPBOX\Domain Users00000000000000from typing import Any import io BUFFER_SIZE = ... # type: Any class BaseStream(io.BufferedIOBase): ... class DecompressReader(io.RawIOBase): def readable(self): ... def __init__(self, fp, decomp_factory, trailing_error=..., **decomp_args): ... def close(self): ... def seekable(self): ... def readinto(self, b): ... def read(self, size=-1): ... def seek(self, offset, whence=...): ... def tell(self): ... mypy-0.560/typeshed/stdlib/3/_curses.pyi0000644€tŠÔÚ€2›s®0000003660313215007212024333 0ustar jukkaDROPBOX\Domain Users00000000000000from typing import Any, BinaryIO, IO, Optional, Tuple, Union, overload _chtype = Union[str, bytes, int] ALL_MOUSE_EVENTS = ... # type: int A_ALTCHARSET = ... # type: int A_ATTRIBUTES = ... # type: int A_BLINK = ... # type: int A_BOLD = ... # type: int A_CHARTEXT = ... # type: int A_COLOR = ... # type: int A_DIM = ... # type: int A_HORIZONTAL = ... # type: int A_INVIS = ... # type: int A_LEFT = ... # type: int A_LOW = ... # type: int A_NORMAL = ... # type: int A_PROTECT = ... # type: int A_REVERSE = ... # type: int A_RIGHT = ... # type: int A_STANDOUT = ... # type: int A_TOP = ... # type: int A_UNDERLINE = ... # type: int A_VERTICAL = ... # type: int BUTTON1_CLICKED = ... # type: int BUTTON1_DOUBLE_CLICKED = ... # type: int BUTTON1_PRESSED = ... # type: int BUTTON1_RELEASED = ... # type: int BUTTON1_TRIPLE_CLICKED = ... # type: int BUTTON2_CLICKED = ... # type: int BUTTON2_DOUBLE_CLICKED = ... # type: int BUTTON2_PRESSED = ... # type: int BUTTON2_RELEASED = ... # type: int BUTTON2_TRIPLE_CLICKED = ... # type: int BUTTON3_CLICKED = ... # type: int BUTTON3_DOUBLE_CLICKED = ... # type: int BUTTON3_PRESSED = ... # type: int BUTTON3_RELEASED = ... # type: int BUTTON3_TRIPLE_CLICKED = ... # type: int BUTTON4_CLICKED = ... # type: int BUTTON4_DOUBLE_CLICKED = ... # type: int BUTTON4_PRESSED = ... # type: int BUTTON4_RELEASED = ... # type: int BUTTON4_TRIPLE_CLICKED = ... # type: int BUTTON_ALT = ... # type: int BUTTON_CTRL = ... # type: int BUTTON_SHIFT = ... # type: int COLOR_BLACK = ... # type: int COLOR_BLUE = ... # type: int COLOR_CYAN = ... # type: int COLOR_GREEN = ... # type: int COLOR_MAGENTA = ... # type: int COLOR_RED = ... # type: int COLOR_WHITE = ... # type: int COLOR_YELLOW = ... # type: int ERR = ... # type: int KEY_A1 = ... # type: int KEY_A3 = ... # type: int KEY_B2 = ... # type: int KEY_BACKSPACE = ... # type: int KEY_BEG = ... # type: int KEY_BREAK = ... # type: int KEY_BTAB = ... # type: int KEY_C1 = ... # type: int KEY_C3 = ... # type: int KEY_CANCEL = ... # type: int KEY_CATAB = ... # type: int KEY_CLEAR = ... # type: int KEY_CLOSE = ... # type: int KEY_COMMAND = ... # type: int KEY_COPY = ... # type: int KEY_CREATE = ... # type: int KEY_CTAB = ... # type: int KEY_DC = ... # type: int KEY_DL = ... # type: int KEY_DOWN = ... # type: int KEY_EIC = ... # type: int KEY_END = ... # type: int KEY_ENTER = ... # type: int KEY_EOL = ... # type: int KEY_EOS = ... # type: int KEY_EXIT = ... # type: int KEY_F0 = ... # type: int KEY_F1 = ... # type: int KEY_F10 = ... # type: int KEY_F11 = ... # type: int KEY_F12 = ... # type: int KEY_F13 = ... # type: int KEY_F14 = ... # type: int KEY_F15 = ... # type: int KEY_F16 = ... # type: int KEY_F17 = ... # type: int KEY_F18 = ... # type: int KEY_F19 = ... # type: int KEY_F2 = ... # type: int KEY_F20 = ... # type: int KEY_F21 = ... # type: int KEY_F22 = ... # type: int KEY_F23 = ... # type: int KEY_F24 = ... # type: int KEY_F25 = ... # type: int KEY_F26 = ... # type: int KEY_F27 = ... # type: int KEY_F28 = ... # type: int KEY_F29 = ... # type: int KEY_F3 = ... # type: int KEY_F30 = ... # type: int KEY_F31 = ... # type: int KEY_F32 = ... # type: int KEY_F33 = ... # type: int KEY_F34 = ... # type: int KEY_F35 = ... # type: int KEY_F36 = ... # type: int KEY_F37 = ... # type: int KEY_F38 = ... # type: int KEY_F39 = ... # type: int KEY_F4 = ... # type: int KEY_F40 = ... # type: int KEY_F41 = ... # type: int KEY_F42 = ... # type: int KEY_F43 = ... # type: int KEY_F44 = ... # type: int KEY_F45 = ... # type: int KEY_F46 = ... # type: int KEY_F47 = ... # type: int KEY_F48 = ... # type: int KEY_F49 = ... # type: int KEY_F5 = ... # type: int KEY_F50 = ... # type: int KEY_F51 = ... # type: int KEY_F52 = ... # type: int KEY_F53 = ... # type: int KEY_F54 = ... # type: int KEY_F55 = ... # type: int KEY_F56 = ... # type: int KEY_F57 = ... # type: int KEY_F58 = ... # type: int KEY_F59 = ... # type: int KEY_F6 = ... # type: int KEY_F60 = ... # type: int KEY_F61 = ... # type: int KEY_F62 = ... # type: int KEY_F63 = ... # type: int KEY_F7 = ... # type: int KEY_F8 = ... # type: int KEY_F9 = ... # type: int KEY_FIND = ... # type: int KEY_HELP = ... # type: int KEY_HOME = ... # type: int KEY_IC = ... # type: int KEY_IL = ... # type: int KEY_LEFT = ... # type: int KEY_LL = ... # type: int KEY_MARK = ... # type: int KEY_MAX = ... # type: int KEY_MESSAGE = ... # type: int KEY_MIN = ... # type: int KEY_MOUSE = ... # type: int KEY_MOVE = ... # type: int KEY_NEXT = ... # type: int KEY_NPAGE = ... # type: int KEY_OPEN = ... # type: int KEY_OPTIONS = ... # type: int KEY_PPAGE = ... # type: int KEY_PREVIOUS = ... # type: int KEY_PRINT = ... # type: int KEY_REDO = ... # type: int KEY_REFERENCE = ... # type: int KEY_REFRESH = ... # type: int KEY_REPLACE = ... # type: int KEY_RESET = ... # type: int KEY_RESIZE = ... # type: int KEY_RESTART = ... # type: int KEY_RESUME = ... # type: int KEY_RIGHT = ... # type: int KEY_SAVE = ... # type: int KEY_SBEG = ... # type: int KEY_SCANCEL = ... # type: int KEY_SCOMMAND = ... # type: int KEY_SCOPY = ... # type: int KEY_SCREATE = ... # type: int KEY_SDC = ... # type: int KEY_SDL = ... # type: int KEY_SELECT = ... # type: int KEY_SEND = ... # type: int KEY_SEOL = ... # type: int KEY_SEXIT = ... # type: int KEY_SF = ... # type: int KEY_SFIND = ... # type: int KEY_SHELP = ... # type: int KEY_SHOME = ... # type: int KEY_SIC = ... # type: int KEY_SLEFT = ... # type: int KEY_SMESSAGE = ... # type: int KEY_SMOVE = ... # type: int KEY_SNEXT = ... # type: int KEY_SOPTIONS = ... # type: int KEY_SPREVIOUS = ... # type: int KEY_SPRINT = ... # type: int KEY_SR = ... # type: int KEY_SREDO = ... # type: int KEY_SREPLACE = ... # type: int KEY_SRESET = ... # type: int KEY_SRIGHT = ... # type: int KEY_SRSUME = ... # type: int KEY_SSAVE = ... # type: int KEY_SSUSPEND = ... # type: int KEY_STAB = ... # type: int KEY_SUNDO = ... # type: int KEY_SUSPEND = ... # type: int KEY_UNDO = ... # type: int KEY_UP = ... # type: int OK = ... # type: int REPORT_MOUSE_POSITION = ... # type: int _C_API = ... # type: Any version = ... # type: bytes def baudrate() -> int: ... def beep() -> None: ... def can_change_color() -> bool: ... def cbreak(flag: bool = ...) -> None: ... def color_content(color_number: int) -> Tuple[int, int, int]: ... def color_pair(color_number: int) -> int: ... def curs_set(visibility: int) -> int: ... def def_prog_mode() -> None: ... def def_shell_mode() -> None: ... def delay_output(ms: int) -> None: ... def doupdate() -> None: ... def echo(flag: bool = ...) -> None: ... def endwin() -> None: ... def erasechar() -> bytes: ... def filter() -> None: ... def flash() -> None: ... def flushinp() -> None: ... def getmouse() -> Tuple[int, int, int, int, int]: ... def getsyx() -> Tuple[int, int]: ... def getwin(f: BinaryIO): ... def halfdelay(tenths: int) -> None: ... def has_colors() -> bool: ... def has_ic() -> bool: ... def has_il() -> bool: ... def has_key(ch: int) -> bool: ... def init_color(color_number: int, r: int, g: int, b: int) -> None: ... def init_pair(pair_number: int, fg: int, bg: int) -> None: ... def initscr() -> _CursesWindow: ... def intrflush(ch: bool) -> None: ... def is_term_resized(nlines: int, ncols: int) -> bool: ... def isendwin() -> bool: ... def keyname(k: int) -> bytes: ... def killchar() -> bytes: ... def longname() -> bytes: ... def meta(yes: bool) -> None: ... def mouseinterval(interval: int) -> None: ... def mousemask(mousemask: int) -> Tuple[int, int]: ... def napms(ms: int) -> int: ... def newpad(nlines: int, ncols: int): ... def newwin(nlines: int, ncols: int, begin_y: int = ..., begin_x: int = ...) -> _CursesWindow: ... def nl(flag: bool = ...) -> None: ... def nocbreak() -> None: ... def noecho() -> None: ... def nonl() -> None: ... def noqiflush() -> None: ... def noraw() -> None: ... def pair_content(pair_number: int) -> Tuple[int, int]: ... def pair_number(attr: int) -> int: ... def putp(string: bytes) -> None: ... def qiflush(flag: bool = ...) -> None: ... def raw(flag: bool = ...) -> None: ... def reset_prog_mode() -> None: ... def reset_shell_mode() -> None: ... def resetty() -> None: ... def resize_term(nlines: int, ncols: int) -> None: ... def resizeterm(nlines: int, ncols: int) -> None: ... def savetty() -> None: ... def setsyx(y: int, x: int) -> None: ... def setupterm(termstr: str = ..., fd: int = ...) -> None: ... def start_color() -> None: ... def termattrs() -> int: ... def termname() -> bytes: ... def tigetflag(capname: str) -> int: ... def tigetnum(capname: str) -> int: ... def tigetstr(capname: str) -> bytes: ... def tparm(fmt: str, i1: int = ..., i2: int = ..., i3: int = ..., i4: int = ..., i5: int = ..., i6: int = ..., i7: int = ..., i8: int = ..., i9: int = ...) -> str: ... def typeahead(fd: int) -> None: ... def unctrl(ch: _chtype) -> bytes: ... def unget_wch(ch: _chtype) -> None: ... def ungetch(ch: _chtype) -> None: ... def ungetmouse(id: int, x: int, y: int, z: int, bstate: int) -> None: ... def update_lines_cols() -> int: ... def use_default_colors() -> None: ... def use_env(flag: bool) -> None: ... class error(Exception): ... class _CursesWindow: encoding = ... # type: str @overload def addch(self, ch: _chtype, attr: Optional[int]) -> None: ... @overload def addch(self, y: int, x: int, ch: _chtype, attr: Optional[int]) -> None: ... @overload def addnstr(self, str: str, n: int, attr: Optional[int]) -> None: ... @overload def addnstr(self, y: int, x: int, str: str, n: int, attr: Optional[int]) -> None: ... @overload def addstr(self, str: str, attr: Optional[int]) -> None: ... @overload def addstr(self, y: int, x: int, str: str, attr: Optional[int]) -> None: ... def attroff(self, attr: int) -> None: ... def attron(self, attr: int) -> None: ... def attrset(self, attr: int) -> None: ... def bkgd(self, ch: _chtype, attr: Optional[int]) -> None: ... def bkgset(self, ch: _chtype, attr: Optional[int]) -> None: ... def border(self, ls: Optional[int], rs: Optional[int], ts: Optional[int], bs: Optional[int], tl: Optional[int], tr: Optional[int], bl: Optional[int], br: Optional[int]) -> None: ... def box(self, vertch: Optional[Tuple[int, int]], horch: Optional[Tuple[int, int]]) -> None: ... @overload def chgat(self, attr: int) -> None: ... @overload def chgat(self, num: int, attr: int) -> None: ... @overload def chgat(self, y: int, x: int, attr: int) -> None: ... @overload def chgat(self, y: int, x: int, num: int, attr: int) -> None: ... def clear(self) -> None: ... def clearok(self, yes: int): ... def clrtobot(self) -> None: ... def clrtoeol(self) -> None: ... def cursyncup(self) -> None: ... @overload def delch(self) -> None: ... @overload def delch(self, y: int, x: int) -> None: ... def deleteln(self) -> None: ... @overload def derwin(self, begin_y: int, begin_x: int) -> '_CursesWindow': ... @overload def derwin(self, nlines: int, ncols: int, begin_y: int, begin_x: int) -> '_CursesWindow': ... def echochar(self, ch: _chtype, attr: Optional[int]) -> None: ... def enclose(self, y: int, x: int) -> bool: ... def erase(self) -> None: ... def getbegyx(self) -> Tuple[int, int]: ... def getbkgd(self) -> Tuple[int, int]: ... def getch(self, y: Optional[int], x: Optional[int]) -> _chtype: ... def get_wch(self, y: Optional[int], x: Optional[int]) -> _chtype: ... def getkey(self, y: Optional[int], x: Optional[int]) -> str: ... def getmaxyx(self) -> Tuple['_CursesWindow', int, int]: ... def getparyx(self) -> Tuple[int, int]: ... def getstr(self, y: Optional[int], x: Optional[int]) -> None: ... def getyx(self) -> Tuple['_CursesWindow', int, int]: ... @overload def hline(self, ch: _chtype, n: int) -> None: ... @overload def hline(self, y: int, x: int, ch: _chtype, n: int) -> None: ... def idcok(self, flag: bool) -> None: ... def idlok(self, yes: bool) -> None: ... def immedok(self, flag: bool) -> None: ... def inch(self, y: Optional[int], x: Optional[int]) -> _chtype: ... @overload def insch(self, ch: _chtype, attr: Optional[int]) -> None: ... @overload def insch(self, y: int, x: int, ch: _chtype, attr: Optional[int]) -> None: ... def insdelln(self, nlines: int) -> None: ... def insertln(self) -> None: ... @overload def insnstr(self, str: str, n: int, attr: Optional[int]) -> None: ... @overload def insnstr(self, y: int, x: int, str: str, n: int, attr: Optional[int]) -> None: ... @overload def insstr(self, str: str, attr: Optional[int]) -> None: ... @overload def insstr(self, y: int, x: int, str: str, attr: Optional[int]) -> None: ... @overload def instr(self, n: Optional[int]) -> str: ... @overload def instr(self, y: int, x: int, n: Optional[int]) -> str: ... def is_linetouched(self, line: int) -> bool: ... def is_wintouched(self) -> bool: ... def keypad(self, yes: bool) -> None: ... def leaveok(self, yes: bool) -> None: ... def move(self, new_y: int, new_x: int) -> None: ... def mvderwin(self, y: int, x: int) -> None: ... def mvwin(self, new_y: int, new_x: int) -> None: ... def nodelay(self, yes: bool) -> None: ... def notimeout(self, yes: bool) -> None: ... def noutrefresh(self) -> None: ... def overlay(self, destwin: '_CursesWindow', sminrow: Optional[int], smincol: Optional[int], dminrow: Optional[int], dmincol: Optional[int], dmaxrow: Optional[int], dmaxcol: Optional[int]) -> None: ... def overwrite(self, destwin: '_CursesWindow', sminrow: Optional[int], smincol: Optional[int], dminrow: Optional[int], dmincol: Optional[int], dmaxrow: Optional[int], dmaxcol: Optional[int]) -> None: ... def putwin(self, file: IO[Any]) -> None: ... def redrawln(self, beg: int, num: int) -> None: ... def redrawwin(self) -> None: ... def refresh(self, pminrow: Optional[int], pmincol: Optional[int], sminrow: Optional[int], smincol: Optional[int], smaxrow: Optional[int], smaxcol: Optional[int]) -> None: ... def resize(self, nlines: int, ncols: int) -> None: ... def scroll(self, lines: int) -> None: ... def scrollok(self, flag: bool) -> None: ... def setscrreg(self, top: int, bottom: int) -> None: ... def standend(self) -> None: ... def standout(self) -> None: ... @overload def subpad(self, begin_y: int, begin_x: int) -> '_CursesWindow': ... @overload def subpad(self, nlines: int, ncols: int, begin_y: int, begin_x: int) -> '_CursesWindow': ... @overload def subwin(self, begin_y: int, begin_x: int) -> '_CursesWindow': ... @overload def subwin(self, nlines: int, ncols: int, begin_y: int, begin_x: int) -> '_CursesWindow': ... def syncdown(self) -> None: ... def syncok(self, flag: bool) -> None: ... def syncup(self) -> None: ... def timeout(self, delay: int) -> None: ... def touchline(self, start: int, count: int, changed: Optional[bool]) -> None: ... def touchwin(self) -> None: ... def untouchwin(self) -> None: ... @overload def vline(self, ch: _chtype, n: int) -> None: ... @overload def vline(self, y: int, x: int, ch: _chtype, n: int) -> None: ... mypy-0.560/typeshed/stdlib/3/_dummy_thread.pyi0000644€tŠÔÚ€2›s®0000000147313215007212025506 0ustar jukkaDROPBOX\Domain Users00000000000000from mypy_extensions import NoReturn from typing import Any, Callable, Dict, Optional, Tuple TIMEOUT_MAX: int error = RuntimeError def start_new_thread(function: Callable[..., Any], args: Tuple[Any, ...], kwargs: Dict[str, Any] = ...) -> None: ... def exit() -> NoReturn: ... def get_ident() -> int: ... def allocate_lock() -> LockType: ... def stack_size(size: Optional[int] = ...) -> int: ... class LockType(object): locked_status: bool def __init__(self) -> None: ... def acquire(self, waitflag: Optional[bool] = ..., timeout: int = ...) -> bool: ... def __enter__(self, waitflag: Optional[bool] = ..., timeout: int = ...) -> bool: ... def __exit__(self, typ: Any, val: Any, tb: Any) -> None: ... def release(self) -> bool: ... def locked(self) -> bool: ... def interrupt_main() -> None: ... mypy-0.560/typeshed/stdlib/3/_imp.pyi0000644€tŠÔÚ€2›s®0000000140313215007212023602 0ustar jukkaDROPBOX\Domain Users00000000000000# Stubs for _imp (Python 3.6) import sys import types from typing import Any, List if sys.version_info >= (3, 5): from importlib.machinery import ModuleSpec def create_builtin(spec: ModuleSpec) -> types.ModuleType: ... def create_dynamic(spec: ModuleSpec, file: Any = ...) -> None: ... def acquire_lock() -> None: ... def exec_builtin(mod: types.ModuleType) -> int: ... def exec_dynamic(mod: types.ModuleType) -> int: ... def extension_suffixes() -> List[str]: ... def get_frozen_object(name: str) -> types.CodeType: ... def init_frozen(name: str) -> types.ModuleType: ... def is_builtin(name: str) -> int: ... def is_frozen(name: str) -> bool: ... def is_frozen_package(name: str) -> bool: ... def lock_held() -> bool: ... def release_lock() -> None: ... mypy-0.560/typeshed/stdlib/3/_importlib_modulespec.pyi0000644€tŠÔÚ€2›s®0000000342513215007212027244 0ustar jukkaDROPBOX\Domain Users00000000000000# ModuleSpec, ModuleType, Loader are part of a dependency cycle. # They are officially defined/exported in other places: # # - ModuleType in types # - Loader in importlib.abc # - ModuleSpec in importlib.machinery (3.4 and later only) from abc import ABCMeta import sys from typing import Any, Dict, List, Optional if sys.version_info >= (3, 4): class ModuleSpec: def __init__(self, name: str, loader: Optional['Loader'], *, origin: Optional[str] = ..., loader_state: Any = ..., is_package: Optional[bool] = ...) -> None: ... name = ... # type: str loader = ... # type: Optional[Loader] origin = ... # type: Optional[str] submodule_search_locations = ... # type: Optional[List[str]] loader_state = ... # type: Any cached = ... # type: Optional[str] parent = ... # type: Optional[str] has_location = ... # type: bool class ModuleType: __name__ = ... # type: str __file__ = ... # type: str __dict__ = ... # type: Dict[str, Any] if sys.version_info >= (3, 4): __loader__ = ... # type: Optional[Loader] __package__ = ... # type: Optional[str] __spec__ = ... # type: Optional[ModuleSpec] def __init__(self, name: str, doc: Optional[str] = ...) -> None: ... class Loader(metaclass=ABCMeta): def load_module(self, fullname: str) -> ModuleType: ... if sys.version_info >= (3, 3): def module_repr(self, module: ModuleType) -> str: ... if sys.version_info >= (3, 4): def create_module(self, spec: ModuleSpec) -> Optional[ModuleType]: ... # Not defined on the actual class for backwards-compatibility reasons, # but expected in new code. def exec_module(self, module: ModuleType) -> None: ... mypy-0.560/typeshed/stdlib/3/_json.pyi0000644€tŠÔÚ€2›s®0000000215213215007212023770 0ustar jukkaDROPBOX\Domain Users00000000000000"""Stub file for the '_json' module.""" from typing import Any, Tuple class make_encoder: sort_keys = ... # type: Any skipkeys = ... # type: Any key_separator = ... # type: Any indent = ... # type: Any markers = ... # type: Any default = ... # type: Any encoder = ... # type: Any item_separator = ... # type: Any def __init__(self, markers, default, encoder, indent, key_separator, item_separator, sort_keys, skipkeys, allow_nan) -> None: ... def __call__(self, *args, **kwargs) -> Any: ... class make_scanner: object_hook = ... # type: Any object_pairs_hook = ... # type: Any parse_int = ... # type: Any parse_constant = ... # type: Any parse_float = ... # type: Any strict = ... # type: bool # TODO: 'context' needs the attrs above (ducktype), but not __call__. def __init__(self, context: "make_scanner") -> None: ... def __call__(self, string: str, index: int) -> Tuple[Any, int]: ... def encode_basestring_ascii(s: str) -> str: ... def scanstring(string: str, end: int, strict: bool = ...) -> Tuple[str, int]: ... mypy-0.560/typeshed/stdlib/3/_markupbase.pyi0000644€tŠÔÚ€2›s®0000000040013215007212025143 0ustar jukkaDROPBOX\Domain Users00000000000000from typing import Tuple class ParserBase: def __init__(self) -> None: ... def error(self, message: str) -> None: ... def reset(self) -> None: ... def getpos(self) -> Tuple[int, int]: ... def unkown_decl(self, data: str) -> None: ... mypy-0.560/typeshed/stdlib/3/_operator.pyi0000644€tŠÔÚ€2›s®0000000265413215007212024661 0ustar jukkaDROPBOX\Domain Users00000000000000# Stubs for _operator (Python 3.5) import sys from typing import AnyStr # In reality the import is the other way around, but this way we can keep the operator stub in 2and3 from operator import ( truth as truth, contains as contains, indexOf as indexOf, countOf as countOf, is_ as is_, is_not as is_not, index as index, add as add, sub as sub, mul as mul, floordiv as floordiv, truediv as truediv, mod as mod, neg as neg, pos as pos, abs as abs, inv as inv, invert as invert, lshift as lshift, rshift as rshift, not_ as not_, and_ as and_, xor as xor, or_ as or_, iadd as iadd, isub as isub, imul as imul, ifloordiv as ifloordiv, itruediv as itruediv, imod as imod, ilshift as ilshift, irshift as irshift, iand as iand, ixor as ixor, ior as ior, concat as concat, iconcat as iconcat, getitem as getitem, setitem as setitem, delitem as delitem, pow as pow, ipow as ipow, eq as eq, ne as ne, lt as lt, le as le, gt as gt, ge as ge, itemgetter as itemgetter, attrgetter as attrgetter, methodcaller as methodcaller, ) if sys.version_info >= (3, 5): from operator import matmul as matmul, imatmul as imatmul if sys.version_info >= (3, 4): from operator import length_hint as length_hint def _compare_digest(a: AnyStr, b: AnyStr) -> bool: ... mypy-0.560/typeshed/stdlib/3/_posixsubprocess.pyi0000644€tŠÔÚ€2›s®0000000113013215007212026265 0ustar jukkaDROPBOX\Domain Users00000000000000# Stubs for _posixsubprocess # NOTE: These are incomplete! from typing import Tuple, Sequence, Callable def cloexec_pipe() -> Tuple[int, int]: ... def fork_exec(args: Sequence[str], executable_list: Sequence[bytes], close_fds: bool, fds_to_keep: Sequence[int], cwd: str, env_list: Sequence[bytes], p2cread: int, p2cwrite: int, c2pred: int, c2pwrite: int, errread: int, errwrite: int, errpipe_read: int, errpipe_write: int, restore_signals: int, start_new_session: int, preexec_fn: Callable[[], None]) -> int: ... mypy-0.560/typeshed/stdlib/3/_subprocess.pyi0000644€tŠÔÚ€2›s®0000000236413215007212025214 0ustar jukkaDROPBOX\Domain Users00000000000000# Stubs for _subprocess # NOTE: These are incomplete! from typing import Mapping, Any, Tuple CREATE_NEW_CONSOLE = 0 CREATE_NEW_PROCESS_GROUP = 0 STD_INPUT_HANDLE = 0 STD_OUTPUT_HANDLE = 0 STD_ERROR_HANDLE = 0 SW_HIDE = 0 STARTF_USESTDHANDLES = 0 STARTF_USESHOWWINDOW = 0 INFINITE = 0 DUPLICATE_SAME_ACCESS = 0 WAIT_OBJECT_0 = 0 # TODO not exported by the Python module class Handle: def Close(self) -> None: ... def GetVersion() -> int: ... def GetExitCodeProcess(handle: Handle) -> int: ... def WaitForSingleObject(handle: Handle, timeout: int) -> int: ... def CreateProcess(executable: str, cmd_line: str, proc_attrs, thread_attrs, inherit: int, flags: int, env_mapping: Mapping[str, str], curdir: str, startupinfo: Any) -> Tuple[Any, Handle, int, int]: ... def GetModuleFileName(module: int) -> str: ... def GetCurrentProcess() -> Handle: ... def DuplicateHandle(source_proc: Handle, source: Handle, target_proc: Handle, target: Any, access: int, inherit: int) -> int: ... def CreatePipe(pipe_attrs, size: int) -> Tuple[Handle, Handle]: ... def GetStdHandle(arg: int) -> int: ... def TerminateProcess(handle: Handle, exit_code: int) -> None: ... mypy-0.560/typeshed/stdlib/3/_thread.pyi0000644€tŠÔÚ€2›s®0000000043313215007212024266 0ustar jukkaDROPBOX\Domain Users00000000000000# Stubs for _thread # NOTE: These are incomplete! from typing import Any def _count() -> int: ... _dangling = ... # type: Any class LockType: def acquire(self) -> None: ... def release(self) -> None: ... def allocate_lock() -> LockType: ... def get_ident() -> int: ... mypy-0.560/typeshed/stdlib/3/_threading_local.pyi0000644€tŠÔÚ€2›s®0000000111613215007212026135 0ustar jukkaDROPBOX\Domain Users00000000000000# Source: https://github.com/python/cpython/blob/master/Lib/_threading_local.py from typing import Any, Dict, List, Tuple from weakref import ReferenceType __all__: List[str] localdict = Dict[Any, Any] class _localimpl: key: str dicts: Dict[int, Tuple[ReferenceType, localdict]] def __init__(self) -> None: ... def get_dict(self) -> localdict: ... def create_dict(self) -> localdict: ... class local: def __getattribute__(self, name: str) -> Any: ... def __setattr__(self, name: str, value: Any) -> None: ... def __delattr__(self, name: str) -> None: ... mypy-0.560/typeshed/stdlib/3/_warnings.pyi0000644€tŠÔÚ€2›s®0000000075113215007212024652 0ustar jukkaDROPBOX\Domain Users00000000000000from typing import Any, List, Optional, Type _defaultaction = ... # type: str _onceregistry = ... # type: dict filters = ... # type: List[tuple] def warn(message: Warning, category: Optional[Type[Warning]] = ..., stacklevel: int = ...) -> None: ... def warn_explicit(message: Warning, category: Optional[Type[Warning]], filename: str, lineno: int, module: Any = ..., registry: dict = ..., module_globals: dict = ...) -> None: ... mypy-0.560/typeshed/stdlib/3/abc.pyi0000644€tŠÔÚ€2›s®0000000145113215007212023406 0ustar jukkaDROPBOX\Domain Users00000000000000from typing import Any, Callable, Type, TypeVar import sys # Stubs for abc. _T = TypeVar('_T') _FuncT = TypeVar('_FuncT', bound=Callable[..., Any]) # Thesee definitions have special processing in mypy class ABCMeta(type): if sys.version_info >= (3, 3): def register(cls: "ABCMeta", subclass: Type[_T]) -> Type[_T]: ... else: def register(cls: "ABCMeta", subclass: Type[Any]) -> None: ... def abstractmethod(callable: _FuncT) -> _FuncT: ... def abstractproperty(callable: _FuncT) -> _FuncT: ... # These two are deprecated and not supported by mypy def abstractstaticmethod(callable: _FuncT) -> _FuncT: ... def abstractclassmethod(callable: _FuncT) -> _FuncT: ... if sys.version_info >= (3, 4): class ABC(metaclass=ABCMeta): pass def get_cache_token() -> object: ... mypy-0.560/typeshed/stdlib/3/ast.pyi0000644€tŠÔÚ€2›s®0000000172713215007212023456 0ustar jukkaDROPBOX\Domain Users00000000000000# Python 3.5 ast import typing from typing import Any, Union, Iterator from _ast import * class NodeVisitor(): def visit(self, node: AST) -> Any: ... def generic_visit(self, node: AST) -> None: ... class NodeTransformer(NodeVisitor): def generic_visit(self, node: AST) -> None: ... def parse(source: Union[str, bytes], filename: Union[str, bytes] = ..., mode: str = ...) -> Module: ... def copy_location(new_node: AST, old_node: AST) -> AST: ... def dump(node: AST, annotate_fields: bool = ..., include_attributes: bool = ...) -> str: ... def fix_missing_locations(node: AST) -> AST: ... def get_docstring(node: AST, clean: bool = ...) -> str: ... def increment_lineno(node: AST, n: int = ...) -> AST: ... def iter_child_nodes(node: AST) -> Iterator[AST]: ... def iter_fields(node: AST) -> Iterator[typing.Tuple[str, Any]]: ... def literal_eval(node_or_string: Union[str, AST]) -> Any: ... def walk(node: AST) -> Iterator[AST]: ... PyCF_ONLY_AST = ... # type: int mypy-0.560/typeshed/stdlib/3/atexit.pyi0000644€tŠÔÚ€2›s®0000000047113215007212024160 0ustar jukkaDROPBOX\Domain Users00000000000000"""Stub file for the 'atexit' module.""" from typing import Any, Callable def _clear() -> None: ... def _ncallbacks() -> int: ... def _run_exitfuncs() -> None: ... def register(func: Callable[..., Any], *args: Any, **kwargs: Any) -> Callable[..., Any]: ... def unregister(func: Callable[..., Any]) -> None: ... mypy-0.560/typeshed/stdlib/3/builtins.pyi0000644€tŠÔÚ€2›s®0000012444613215007212024524 0ustar jukkaDROPBOX\Domain Users00000000000000# Stubs for builtins (Python 3) from typing import ( TypeVar, Iterator, Iterable, overload, Container, Sequence, MutableSequence, Mapping, MutableMapping, Tuple, List, Any, Dict, Callable, Generic, Set, AbstractSet, FrozenSet, MutableSet, Sized, Reversible, SupportsInt, SupportsFloat, SupportsBytes, SupportsAbs, SupportsRound, IO, Union, ItemsView, KeysView, ValuesView, ByteString, Optional, AnyStr, Type, ) from abc import abstractmethod, ABCMeta from types import TracebackType, CodeType import sys from mypy_extensions import NoReturn # Note that names imported above are not automatically made visible via the # implicit builtins import. _T = TypeVar('_T') _T_co = TypeVar('_T_co', covariant=True) _KT = TypeVar('_KT') _VT = TypeVar('_VT') _S = TypeVar('_S') _T1 = TypeVar('_T1') _T2 = TypeVar('_T2') _T3 = TypeVar('_T3') _T4 = TypeVar('_T4') _T5 = TypeVar('_T5') _TT = TypeVar('_TT', bound='type') class object: __doc__ = ... # type: Optional[str] __class__ = ... # type: type __dict__ = ... # type: Dict[str, Any] __slots__ = ... # type: Optional[Union[str, Iterable[str]]] __module__ = ... # type: str if sys.version_info >= (3, 6): __annotations__ = ... # type: Dict[str, Any] def __init__(self) -> None: ... def __new__(cls) -> Any: ... def __setattr__(self, name: str, value: Any) -> None: ... def __eq__(self, o: object) -> bool: ... def __ne__(self, o: object) -> bool: ... def __str__(self) -> str: ... def __repr__(self) -> str: ... def __hash__(self) -> int: ... def __format__(self, format_spec: str) -> str: ... def __getattribute__(self, name: str) -> Any: ... def __delattr__(self, name: str) -> None: ... def __sizeof__(self) -> int: ... def __reduce__(self) -> tuple: ... def __reduce_ex__(self, protocol: int) -> tuple: ... def __dir__(self) -> Iterable[str]: ... if sys.version_info >= (3, 6): def __init_subclass__(cls) -> None: ... class staticmethod: # Special, only valid as a decorator. __func__ = ... # type: function __isabstractmethod__ = ... # type: bool def __init__(self, f: function) -> None: ... def __new__(cls: Type[_T], *args: Any, **kwargs: Any) -> _T: ... def __get__(self, obj: _T, type: Optional[Type[_T]]=...) -> function: ... class classmethod: # Special, only valid as a decorator. __func__ = ... # type: function __isabstractmethod__ = ... # type: bool def __init__(self, f: function) -> None: ... def __new__(cls: Type[_T], *args: Any, **kwargs: Any) -> _T: ... def __get__(self, obj: _T, type: Optional[Type[_T]]=...) -> function: ... class type: __bases__ = ... # type: Tuple[type, ...] __name__ = ... # type: str __qualname__ = ... # type: str __module__ = ... # type: str __dict__ = ... # type: Dict[str, Any] __mro__ = ... # type: Tuple[type, ...] @overload def __init__(self, o: object) -> None: ... @overload def __init__(self, name: str, bases: Tuple[type, ...], dict: Dict[str, Any]) -> None: ... @overload def __new__(cls, o: object) -> type: ... @overload def __new__(cls, name: str, bases: Tuple[type, ...], namespace: Dict[str, Any]) -> type: ... def __call__(self, *args: Any, **kwds: Any) -> Any: ... def __subclasses__(self: _TT) -> List[_TT]: ... # Note: the documentation doesnt specify what the return type is, the standard # implementation seems to be returning a list. def mro(self) -> List[type]: ... def __instancecheck__(self, instance: Any) -> bool: ... def __subclasscheck__(self, subclass: type) -> bool: ... class super: @overload def __init__(self, t: Any, obj: Any) -> None: ... @overload def __init__(self, t: Any) -> None: ... @overload def __init__(self) -> None: ... class int: @overload def __init__(self, x: Union[str, bytes, SupportsInt] = ...) -> None: ... @overload def __init__(self, x: Union[str, bytes], base: int) -> None: ... def bit_length(self) -> int: ... def to_bytes(self, length: int, byteorder: str, *, signed: bool = ...) -> bytes: ... @classmethod def from_bytes(cls, bytes: Sequence[int], byteorder: str, *, signed: bool = ...) -> int: ... # TODO buffer object argument def __add__(self, x: int) -> int: ... def __sub__(self, x: int) -> int: ... def __mul__(self, x: int) -> int: ... def __floordiv__(self, x: int) -> int: ... def __truediv__(self, x: int) -> float: ... def __mod__(self, x: int) -> int: ... def __radd__(self, x: int) -> int: ... def __rsub__(self, x: int) -> int: ... def __rmul__(self, x: int) -> int: ... def __rfloordiv__(self, x: int) -> int: ... def __rtruediv__(self, x: int) -> float: ... def __rmod__(self, x: int) -> int: ... def __pow__(self, x: int) -> Any: ... # Return type can be int or float, depending on x. def __rpow__(self, x: int) -> Any: ... def __and__(self, n: int) -> int: ... def __or__(self, n: int) -> int: ... def __xor__(self, n: int) -> int: ... def __lshift__(self, n: int) -> int: ... def __rshift__(self, n: int) -> int: ... def __rand__(self, n: int) -> int: ... def __ror__(self, n: int) -> int: ... def __rxor__(self, n: int) -> int: ... def __rlshift__(self, n: int) -> int: ... def __rrshift__(self, n: int) -> int: ... def __neg__(self) -> int: ... def __pos__(self) -> int: ... def __invert__(self) -> int: ... def __eq__(self, x: object) -> bool: ... def __ne__(self, x: object) -> bool: ... def __lt__(self, x: int) -> bool: ... def __le__(self, x: int) -> bool: ... def __gt__(self, x: int) -> bool: ... def __ge__(self, x: int) -> bool: ... def __str__(self) -> str: ... def __float__(self) -> float: ... def __int__(self) -> int: ... def __abs__(self) -> int: ... def __hash__(self) -> int: ... def __bool__(self) -> bool: ... class float: def __init__(self, x: Union[SupportsFloat, str, bytes] = ...) -> None: ... def as_integer_ratio(self) -> Tuple[int, int]: ... def hex(self) -> str: ... def is_integer(self) -> bool: ... @classmethod def fromhex(cls, s: str) -> float: ... def __add__(self, x: float) -> float: ... def __sub__(self, x: float) -> float: ... def __mul__(self, x: float) -> float: ... def __floordiv__(self, x: float) -> float: ... def __truediv__(self, x: float) -> float: ... def __mod__(self, x: float) -> float: ... def __pow__(self, x: float) -> float: ... def __radd__(self, x: float) -> float: ... def __rsub__(self, x: float) -> float: ... def __rmul__(self, x: float) -> float: ... def __rfloordiv__(self, x: float) -> float: ... def __rtruediv__(self, x: float) -> float: ... def __rmod__(self, x: float) -> float: ... def __rpow__(self, x: float) -> float: ... def __eq__(self, x: object) -> bool: ... def __ne__(self, x: object) -> bool: ... def __lt__(self, x: float) -> bool: ... def __le__(self, x: float) -> bool: ... def __gt__(self, x: float) -> bool: ... def __ge__(self, x: float) -> bool: ... def __neg__(self) -> float: ... def __pos__(self) -> float: ... def __str__(self) -> str: ... def __int__(self) -> int: ... def __float__(self) -> float: ... def __abs__(self) -> float: ... def __hash__(self) -> int: ... def __bool__(self) -> bool: ... class complex: @overload def __init__(self, re: float = ..., im: float = ...) -> None: ... @overload def __init__(self, s: str) -> None: ... @property def real(self) -> float: ... @property def imag(self) -> float: ... def conjugate(self) -> complex: ... def __add__(self, x: complex) -> complex: ... def __sub__(self, x: complex) -> complex: ... def __mul__(self, x: complex) -> complex: ... def __pow__(self, x: complex) -> complex: ... def __truediv__(self, x: complex) -> complex: ... def __radd__(self, x: complex) -> complex: ... def __rsub__(self, x: complex) -> complex: ... def __rmul__(self, x: complex) -> complex: ... def __rpow__(self, x: complex) -> complex: ... def __rtruediv__(self, x: complex) -> complex: ... def __eq__(self, x: object) -> bool: ... def __ne__(self, x: object) -> bool: ... def __neg__(self) -> complex: ... def __pos__(self) -> complex: ... def __str__(self) -> str: ... def __abs__(self) -> float: ... def __hash__(self) -> int: ... def __bool__(self) -> bool: ... class str(Sequence[str]): @overload def __init__(self, o: object = ...) -> None: ... @overload def __init__(self, o: bytes, encoding: str = ..., errors: str = ...) -> None: ... def capitalize(self) -> str: ... def casefold(self) -> str: ... def center(self, width: int, fillchar: str = ...) -> str: ... def count(self, x: str, __start: Optional[int] = ..., __end: Optional[int] = ...) -> int: ... def encode(self, encoding: str = ..., errors: str = ...) -> bytes: ... def endswith(self, suffix: Union[str, Tuple[str, ...]], start: Optional[int] = ..., end: Optional[int] = ...) -> bool: ... def expandtabs(self, tabsize: int = ...) -> str: ... def find(self, sub: str, __start: Optional[int] = ..., __end: Optional[int] = ...) -> int: ... def format(self, *args: Any, **kwargs: Any) -> str: ... def format_map(self, map: Mapping[str, Any]) -> str: ... def index(self, sub: str, __start: Optional[int] = ..., __end: Optional[int] = ...) -> int: ... def isalnum(self) -> bool: ... def isalpha(self) -> bool: ... def isdecimal(self) -> bool: ... def isdigit(self) -> bool: ... def isidentifier(self) -> bool: ... def islower(self) -> bool: ... def isnumeric(self) -> bool: ... def isprintable(self) -> bool: ... def isspace(self) -> bool: ... def istitle(self) -> bool: ... def isupper(self) -> bool: ... def join(self, iterable: Iterable[str]) -> str: ... def ljust(self, width: int, fillchar: str = ...) -> str: ... def lower(self) -> str: ... def lstrip(self, chars: Optional[str] = ...) -> str: ... def partition(self, sep: str) -> Tuple[str, str, str]: ... def replace(self, old: str, new: str, count: int = ...) -> str: ... def rfind(self, sub: str, __start: Optional[int] = ..., __end: Optional[int] = ...) -> int: ... def rindex(self, sub: str, __start: Optional[int] = ..., __end: Optional[int] = ...) -> int: ... def rjust(self, width: int, fillchar: str = ...) -> str: ... def rpartition(self, sep: str) -> Tuple[str, str, str]: ... def rsplit(self, sep: Optional[str] = ..., maxsplit: int = ...) -> List[str]: ... def rstrip(self, chars: Optional[str] = ...) -> str: ... def split(self, sep: Optional[str] = ..., maxsplit: int = ...) -> List[str]: ... def splitlines(self, keepends: bool = ...) -> List[str]: ... def startswith(self, prefix: Union[str, Tuple[str, ...]], start: Optional[int] = ..., end: Optional[int] = ...) -> bool: ... def strip(self, chars: Optional[str] = ...) -> str: ... def swapcase(self) -> str: ... def title(self) -> str: ... def translate(self, table: Union[Mapping[int, Union[int, str, None]], Sequence[Union[int, str, None]]]) -> str: ... def upper(self) -> str: ... def zfill(self, width: int) -> str: ... @staticmethod @overload def maketrans(x: Union[Dict[int, _T], Dict[str, _T], Dict[Union[str, int], _T]]) -> Dict[int, _T]: ... @staticmethod @overload def maketrans(x: str, y: str, z: str = ...) -> Dict[int, Union[int, None]]: ... def __getitem__(self, i: Union[int, slice]) -> str: ... def __add__(self, s: str) -> str: ... def __mul__(self, n: int) -> str: ... def __rmul__(self, n: int) -> str: ... def __mod__(self, value: Any) -> str: ... def __eq__(self, x: object) -> bool: ... def __ne__(self, x: object) -> bool: ... def __lt__(self, x: str) -> bool: ... def __le__(self, x: str) -> bool: ... def __gt__(self, x: str) -> bool: ... def __ge__(self, x: str) -> bool: ... def __len__(self) -> int: ... def __contains__(self, s: object) -> bool: ... def __iter__(self) -> Iterator[str]: ... def __str__(self) -> str: ... def __repr__(self) -> str: ... def __hash__(self) -> int: ... class bytes(ByteString): @overload def __init__(self, ints: Iterable[int]) -> None: ... @overload def __init__(self, string: str, encoding: str, errors: str = ...) -> None: ... @overload def __init__(self, length: int) -> None: ... @overload def __init__(self) -> None: ... @overload def __init__(self, o: SupportsBytes) -> None: ... def capitalize(self) -> bytes: ... def center(self, width: int, fillchar: bytes = ...) -> bytes: ... if sys.version_info >= (3, 3): def count(self, sub: Union[bytes, int], start: Optional[int] = ..., end: Optional[int] = ...) -> int: ... else: def count(self, sub: bytes, start: Optional[int] = ..., end: Optional[int] = ...) -> int: ... def decode(self, encoding: str = ..., errors: str = ...) -> str: ... def endswith(self, suffix: Union[bytes, Tuple[bytes, ...]]) -> bool: ... def expandtabs(self, tabsize: int = ...) -> bytes: ... if sys.version_info >= (3, 3): def find(self, sub: Union[bytes, int], start: Optional[int] = ..., end: Optional[int] = ...) -> int: ... else: def find(self, sub: bytes, start: Optional[int] = ..., end: Optional[int] = ...) -> int: ... if sys.version_info >= (3, 5): def hex(self) -> str: ... if sys.version_info >= (3, 3): def index(self, sub: Union[bytes, int], start: Optional[int] = ..., end: Optional[int] = ...) -> int: ... else: def index(self, sub: bytes, start: Optional[int] = ..., end: Optional[int] = ...) -> int: ... def isalnum(self) -> bool: ... def isalpha(self) -> bool: ... def isdigit(self) -> bool: ... def islower(self) -> bool: ... def isspace(self) -> bool: ... def istitle(self) -> bool: ... def isupper(self) -> bool: ... def join(self, iterable: Iterable[bytes]) -> bytes: ... def ljust(self, width: int, fillchar: bytes = ...) -> bytes: ... def lower(self) -> bytes: ... def lstrip(self, chars: Optional[bytes] = ...) -> bytes: ... def partition(self, sep: bytes) -> Tuple[bytes, bytes, bytes]: ... def replace(self, old: bytes, new: bytes, count: int = ...) -> bytes: ... if sys.version_info >= (3, 3): def rfind(self, sub: Union[bytes, int], start: Optional[int] = ..., end: Optional[int] = ...) -> int: ... else: def rfind(self, sub: bytes, start: Optional[int] = ..., end: Optional[int] = ...) -> int: ... if sys.version_info >= (3, 3): def rindex(self, sub: Union[bytes, int], start: Optional[int] = ..., end: Optional[int] = ...) -> int: ... else: def rindex(self, sub: bytes, start: Optional[int] = ..., end: Optional[int] = ...) -> int: ... def rjust(self, width: int, fillchar: bytes = ...) -> bytes: ... def rpartition(self, sep: bytes) -> Tuple[bytes, bytes, bytes]: ... def rsplit(self, sep: Optional[bytes] = ..., maxsplit: int = ...) -> List[bytes]: ... def rstrip(self, chars: Optional[bytes] = ...) -> bytes: ... def split(self, sep: Optional[bytes] = ..., maxsplit: int = ...) -> List[bytes]: ... def splitlines(self, keepends: bool = ...) -> List[bytes]: ... def startswith(self, prefix: Union[bytes, Tuple[bytes, ...]]) -> bool: ... def strip(self, chars: Optional[bytes] = ...) -> bytes: ... def swapcase(self) -> bytes: ... def title(self) -> bytes: ... def translate(self, table: Optional[bytes], delete: bytes = ...) -> bytes: ... def upper(self) -> bytes: ... def zfill(self, width: int) -> bytes: ... @classmethod def fromhex(cls, s: str) -> bytes: ... @classmethod def maketrans(cls, frm: bytes, to: bytes) -> bytes: ... def __len__(self) -> int: ... def __iter__(self) -> Iterator[int]: ... def __str__(self) -> str: ... def __repr__(self) -> str: ... def __int__(self) -> int: ... def __float__(self) -> float: ... def __hash__(self) -> int: ... @overload def __getitem__(self, i: int) -> int: ... @overload def __getitem__(self, s: slice) -> bytes: ... def __add__(self, s: bytes) -> bytes: ... def __mul__(self, n: int) -> bytes: ... def __rmul__(self, n: int) -> bytes: ... if sys.version_info >= (3, 5): def __mod__(self, value: Any) -> bytes: ... def __contains__(self, o: object) -> bool: ... def __eq__(self, x: object) -> bool: ... def __ne__(self, x: object) -> bool: ... def __lt__(self, x: bytes) -> bool: ... def __le__(self, x: bytes) -> bool: ... def __gt__(self, x: bytes) -> bool: ... def __ge__(self, x: bytes) -> bool: ... class bytearray(MutableSequence[int], ByteString): @overload def __init__(self, ints: Iterable[int]) -> None: ... @overload def __init__(self, string: str, encoding: str, errors: str = ...) -> None: ... @overload def __init__(self, length: int) -> None: ... @overload def __init__(self) -> None: ... def capitalize(self) -> bytearray: ... def center(self, width: int, fillchar: bytes = ...) -> bytearray: ... if sys.version_info >= (3, 3): def count(self, sub: Union[bytes, int], start: Optional[int] = ..., end: Optional[int] = ...) -> int: ... else: def count(self, sub: bytes, start: Optional[int] = ..., end: Optional[int] = ...) -> int: ... def decode(self, encoding: str = ..., errors: str = ...) -> str: ... def endswith(self, suffix: bytes) -> bool: ... def expandtabs(self, tabsize: int = ...) -> bytearray: ... if sys.version_info >= (3, 3): def find(self, sub: Union[bytes, int], start: Optional[int] = ..., end: Optional[int] = ...) -> int: ... else: def find(self, sub: bytes, start: Optional[int] = ..., end: Optional[int] = ...) -> int: ... if sys.version_info >= (3, 5): def hex(self) -> str: ... if sys.version_info >= (3, 3): def index(self, sub: Union[bytes, int], start: Optional[int] = ..., end: Optional[int] = ...) -> int: ... else: def index(self, sub: bytes, start: Optional[int] = ..., end: Optional[int] = ...) -> int: ... def insert(self, index: int, object: int) -> None: ... def isalnum(self) -> bool: ... def isalpha(self) -> bool: ... def isdigit(self) -> bool: ... def islower(self) -> bool: ... def isspace(self) -> bool: ... def istitle(self) -> bool: ... def isupper(self) -> bool: ... def join(self, iterable: Iterable[bytes]) -> bytearray: ... def ljust(self, width: int, fillchar: bytes = ...) -> bytearray: ... def lower(self) -> bytearray: ... def lstrip(self, chars: Optional[bytes] = ...) -> bytearray: ... def partition(self, sep: bytes) -> Tuple[bytearray, bytearray, bytearray]: ... def replace(self, old: bytes, new: bytes, count: int = ...) -> bytearray: ... if sys.version_info >= (3, 3): def rfind(self, sub: Union[bytes, int], start: Optional[int] = ..., end: Optional[int] = ...) -> int: ... else: def rfind(self, sub: bytes, start: Optional[int] = ..., end: Optional[int] = ...) -> int: ... if sys.version_info >= (3, 3): def rindex(self, sub: Union[bytes, int], start: Optional[int] = ..., end: Optional[int] = ...) -> int: ... else: def rindex(self, sub: bytes, start: Optional[int] = ..., end: Optional[int] = ...) -> int: ... def rjust(self, width: int, fillchar: bytes = ...) -> bytearray: ... def rpartition(self, sep: bytes) -> Tuple[bytearray, bytearray, bytearray]: ... def rsplit(self, sep: Optional[bytes] = ..., maxsplit: int = ...) -> List[bytearray]: ... def rstrip(self, chars: Optional[bytes] = ...) -> bytearray: ... def split(self, sep: Optional[bytes] = ..., maxsplit: int = ...) -> List[bytearray]: ... def splitlines(self, keepends: bool = ...) -> List[bytearray]: ... def startswith(self, prefix: bytes) -> bool: ... def strip(self, chars: Optional[bytes] = ...) -> bytearray: ... def swapcase(self) -> bytearray: ... def title(self) -> bytearray: ... def translate(self, table: Optional[bytes], delete: bytes = ...) -> bytearray: ... def upper(self) -> bytearray: ... def zfill(self, width: int) -> bytearray: ... @classmethod def fromhex(cls, s: str) -> bytearray: ... @classmethod def maketrans(cls, frm: bytes, to: bytes) -> bytes: ... def __len__(self) -> int: ... def __iter__(self) -> Iterator[int]: ... def __str__(self) -> str: ... def __repr__(self) -> str: ... def __int__(self) -> int: ... def __float__(self) -> float: ... def __hash__(self) -> int: ... @overload def __getitem__(self, i: int) -> int: ... @overload def __getitem__(self, s: slice) -> bytearray: ... @overload def __setitem__(self, i: int, x: int) -> None: ... @overload def __setitem__(self, s: slice, x: Union[Iterable[int], bytes]) -> None: ... def __delitem__(self, i: Union[int, slice]) -> None: ... def __add__(self, s: bytes) -> bytearray: ... def __iadd__(self, s: Iterable[int]) -> bytearray: ... def __mul__(self, n: int) -> bytearray: ... def __rmul__(self, n: int) -> bytearray: ... def __imul__(self, n: int) -> bytearray: ... if sys.version_info >= (3, 5): def __mod__(self, value: Any) -> bytes: ... def __contains__(self, o: object) -> bool: ... def __eq__(self, x: object) -> bool: ... def __ne__(self, x: object) -> bool: ... def __lt__(self, x: bytes) -> bool: ... def __le__(self, x: bytes) -> bool: ... def __gt__(self, x: bytes) -> bool: ... def __ge__(self, x: bytes) -> bool: ... class memoryview(Sized, Container[bytes]): format = ... # type: str itemsize = ... # type: int shape = ... # type: Optional[Tuple[int, ...]] strides = ... # type: Optional[Tuple[int, ...]] suboffsets = ... # type: Optional[Tuple[int, ...]] readonly = ... # type: bool ndim = ... # type: int def __init__(self, obj: Union[str, bytes, bytearray, memoryview]) -> None: ... @overload def __getitem__(self, i: int) -> int: ... @overload def __getitem__(self, s: slice) -> memoryview: ... def __contains__(self, x: object) -> bool: ... def __iter__(self) -> Iterator[bytes]: ... def __len__(self) -> int: ... @overload def __setitem__(self, i: int, o: bytes) -> None: ... @overload def __setitem__(self, s: slice, o: Sequence[bytes]) -> None: ... @overload def __setitem__(self, s: slice, o: memoryview) -> None: ... def tobytes(self) -> bytes: ... def tolist(self) -> List[int]: ... if sys.version_info >= (3, 5): def hex(self) -> str: ... class bool(int): def __init__(self, o: object = ...) -> None: ... class slice: start = ... # type: Optional[int] step = ... # type: Optional[int] stop = ... # type: Optional[int] @overload def __init__(self, stop: Optional[int]) -> None: ... @overload def __init__(self, start: Optional[int], stop: Optional[int], step: Optional[int] = ...) -> None: ... def indices(self, len: int) -> Tuple[int, int, int]: ... class tuple(Sequence[_T_co], Generic[_T_co]): def __init__(self, iterable: Iterable[_T_co] = ...) -> None: ... def __len__(self) -> int: ... def __contains__(self, x: object) -> bool: ... @overload def __getitem__(self, x: int) -> _T_co: ... @overload def __getitem__(self, x: slice) -> Tuple[_T_co, ...]: ... def __iter__(self) -> Iterator[_T_co]: ... def __lt__(self, x: Tuple[_T_co, ...]) -> bool: ... def __le__(self, x: Tuple[_T_co, ...]) -> bool: ... def __gt__(self, x: Tuple[_T_co, ...]) -> bool: ... def __ge__(self, x: Tuple[_T_co, ...]) -> bool: ... def __add__(self, x: Tuple[_T_co, ...]) -> Tuple[_T_co, ...]: ... def __mul__(self, n: int) -> Tuple[_T_co, ...]: ... def __rmul__(self, n: int) -> Tuple[_T_co, ...]: ... def count(self, x: Any) -> int: ... if sys.version_info >= (3, 5): def index(self, x: Any, start: int = ..., end: int = ...) -> int: ... else: def index(self, x: Any) -> int: ... class function: # TODO not defined in builtins! __name__ = ... # type: str __qualname__ = ... # type: str __module__ = ... # type: str __code__ = ... # type: CodeType __annotations__ = ... # type: Dict[str, Any] class list(MutableSequence[_T], Generic[_T]): @overload def __init__(self) -> None: ... @overload def __init__(self, iterable: Iterable[_T]) -> None: ... def clear(self) -> None: ... def copy(self) -> List[_T]: ... def append(self, object: _T) -> None: ... def extend(self, iterable: Iterable[_T]) -> None: ... def pop(self, index: int = ...) -> _T: ... def index(self, object: _T, start: int = ..., stop: int = ...) -> int: ... def count(self, object: _T) -> int: ... def insert(self, index: int, object: _T) -> None: ... def remove(self, object: _T) -> None: ... def reverse(self) -> None: ... def sort(self, *, key: Optional[Callable[[_T], Any]] = ..., reverse: bool = ...) -> None: ... def __len__(self) -> int: ... def __iter__(self) -> Iterator[_T]: ... def __str__(self) -> str: ... def __hash__(self) -> int: ... @overload def __getitem__(self, i: int) -> _T: ... @overload def __getitem__(self, s: slice) -> List[_T]: ... @overload def __setitem__(self, i: int, o: _T) -> None: ... @overload def __setitem__(self, s: slice, o: Iterable[_T]) -> None: ... def __delitem__(self, i: Union[int, slice]) -> None: ... def __add__(self, x: List[_T]) -> List[_T]: ... def __iadd__(self, x: Iterable[_T]) -> List[_T]: ... def __mul__(self, n: int) -> List[_T]: ... def __rmul__(self, n: int) -> List[_T]: ... def __imul__(self, n: int) -> List[_T]: ... def __contains__(self, o: object) -> bool: ... def __reversed__(self) -> Iterator[_T]: ... def __gt__(self, x: List[_T]) -> bool: ... def __ge__(self, x: List[_T]) -> bool: ... def __lt__(self, x: List[_T]) -> bool: ... def __le__(self, x: List[_T]) -> bool: ... class dict(MutableMapping[_KT, _VT], Generic[_KT, _VT]): # NOTE: Keyword arguments are special. If they are used, _KT must include # str, but we have no way of enforcing it here. @overload def __init__(self, **kwargs: _VT) -> None: ... @overload def __init__(self, map: Mapping[_KT, _VT], **kwargs: _VT) -> None: ... @overload def __init__(self, iterable: Iterable[Tuple[_KT, _VT]], **kwargs: _VT) -> None: ... def __new__(cls: Type[_T1], *args: Any, **kwargs: Any) -> _T1: ... def clear(self) -> None: ... def copy(self) -> Dict[_KT, _VT]: ... def popitem(self) -> Tuple[_KT, _VT]: ... def setdefault(self, k: _KT, default: Optional[_VT] = ...) -> _VT: ... @overload def update(self, __m: Mapping[_KT, _VT], **kwargs: _VT) -> None: ... @overload def update(self, __m: Iterable[Tuple[_KT, _VT]], **kwargs: _VT) -> None: ... @overload def update(self, **kwargs: _VT) -> None: ... def keys(self) -> KeysView[_KT]: ... def values(self) -> ValuesView[_VT]: ... def items(self) -> ItemsView[_KT, _VT]: ... @staticmethod @overload def fromkeys(seq: Sequence[_T]) -> Dict[_T, Any]: ... # TODO: Actually a class method (mypy/issues#328) @staticmethod @overload def fromkeys(seq: Sequence[_T], value: _S) -> Dict[_T, _S]: ... def __len__(self) -> int: ... def __getitem__(self, k: _KT) -> _VT: ... def __setitem__(self, k: _KT, v: _VT) -> None: ... def __delitem__(self, v: _KT) -> None: ... def __iter__(self) -> Iterator[_KT]: ... def __str__(self) -> str: ... class set(MutableSet[_T], Generic[_T]): def __init__(self, iterable: Iterable[_T] = ...) -> None: ... def add(self, element: _T) -> None: ... def clear(self) -> None: ... def copy(self) -> Set[_T]: ... def difference(self, *s: Iterable[object]) -> Set[_T]: ... def difference_update(self, *s: Iterable[object]) -> None: ... def discard(self, element: _T) -> None: ... def intersection(self, *s: Iterable[object]) -> Set[_T]: ... def intersection_update(self, *s: Iterable[Any]) -> None: ... def isdisjoint(self, s: Iterable[Any]) -> bool: ... def issubset(self, s: Iterable[Any]) -> bool: ... def issuperset(self, s: Iterable[Any]) -> bool: ... def pop(self) -> _T: ... def remove(self, element: _T) -> None: ... def symmetric_difference(self, s: Iterable[_T]) -> Set[_T]: ... def symmetric_difference_update(self, s: Iterable[_T]) -> None: ... def union(self, *s: Iterable[_T]) -> Set[_T]: ... def update(self, *s: Iterable[_T]) -> None: ... def __len__(self) -> int: ... def __contains__(self, o: object) -> bool: ... def __iter__(self) -> Iterator[_T]: ... def __str__(self) -> str: ... def __and__(self, s: AbstractSet[object]) -> Set[_T]: ... def __iand__(self, s: AbstractSet[object]) -> Set[_T]: ... def __or__(self, s: AbstractSet[_S]) -> Set[Union[_T, _S]]: ... def __ior__(self, s: AbstractSet[_S]) -> Set[Union[_T, _S]]: ... def __sub__(self, s: AbstractSet[object]) -> Set[_T]: ... def __isub__(self, s: AbstractSet[object]) -> Set[_T]: ... def __xor__(self, s: AbstractSet[_S]) -> Set[Union[_T, _S]]: ... def __ixor__(self, s: AbstractSet[_S]) -> Set[Union[_T, _S]]: ... def __le__(self, s: AbstractSet[object]) -> bool: ... def __lt__(self, s: AbstractSet[object]) -> bool: ... def __ge__(self, s: AbstractSet[object]) -> bool: ... def __gt__(self, s: AbstractSet[object]) -> bool: ... # TODO more set operations class frozenset(AbstractSet[_T], Generic[_T]): def __init__(self, iterable: Iterable[_T] = ...) -> None: ... def copy(self) -> FrozenSet[_T]: ... def difference(self, *s: Iterable[object]) -> FrozenSet[_T]: ... def intersection(self, *s: Iterable[object]) -> FrozenSet[_T]: ... def isdisjoint(self, s: Iterable[_T]) -> bool: ... def issubset(self, s: Iterable[object]) -> bool: ... def issuperset(self, s: Iterable[object]) -> bool: ... def symmetric_difference(self, s: Iterable[_T]) -> FrozenSet[_T]: ... def union(self, *s: Iterable[_T]) -> FrozenSet[_T]: ... def __len__(self) -> int: ... def __contains__(self, o: object) -> bool: ... def __iter__(self) -> Iterator[_T]: ... def __str__(self) -> str: ... def __and__(self, s: AbstractSet[_T]) -> FrozenSet[_T]: ... def __or__(self, s: AbstractSet[_S]) -> FrozenSet[Union[_T, _S]]: ... def __sub__(self, s: AbstractSet[_T]) -> FrozenSet[_T]: ... def __xor__(self, s: AbstractSet[_S]) -> FrozenSet[Union[_T, _S]]: ... def __le__(self, s: AbstractSet[object]) -> bool: ... def __lt__(self, s: AbstractSet[object]) -> bool: ... def __ge__(self, s: AbstractSet[object]) -> bool: ... def __gt__(self, s: AbstractSet[object]) -> bool: ... class enumerate(Iterator[Tuple[int, _T]], Generic[_T]): def __init__(self, iterable: Iterable[_T], start: int = ...) -> None: ... def __iter__(self) -> Iterator[Tuple[int, _T]]: ... def __next__(self) -> Tuple[int, _T]: ... class range(Sequence[int]): start = ... # type: int stop = ... # type: int step = ... # type: int @overload def __init__(self, stop: int) -> None: ... @overload def __init__(self, start: int, stop: int, step: int = ...) -> None: ... def count(self, value: int) -> int: ... def index(self, value: int, start: int = ..., stop: Optional[int] = ...) -> int: ... def __len__(self) -> int: ... def __contains__(self, o: object) -> bool: ... def __iter__(self) -> Iterator[int]: ... @overload def __getitem__(self, i: int) -> int: ... @overload def __getitem__(self, s: slice) -> range: ... def __repr__(self) -> str: ... def __reversed__(self) -> Iterator[int]: ... class property: def __init__(self, fget: Optional[Callable[[Any], Any]] = ..., fset: Optional[Callable[[Any, Any], None]] = ..., fdel: Optional[Callable[[Any], None]] = ..., doc: Optional[str] = ...) -> None: ... def getter(self, fget: Callable[[Any], Any]) -> property: ... def setter(self, fset: Callable[[Any, Any], None]) -> property: ... def deleter(self, fdel: Callable[[Any], None]) -> property: ... def __get__(self, obj: Any, type: Optional[type] = ...) -> Any: ... def __set__(self, obj: Any, value: Any) -> None: ... def __delete__(self, obj: Any) -> None: ... def fget(self) -> Any: ... def fset(self, value: Any) -> None: ... def fdel(self) -> None: ... NotImplemented = ... # type: Any def abs(n: SupportsAbs[_T]) -> _T: ... def all(i: Iterable[object]) -> bool: ... def any(i: Iterable[object]) -> bool: ... def ascii(o: object) -> str: ... def bin(number: int) -> str: ... def callable(o: object) -> bool: ... def chr(code: int) -> str: ... def compile(source: Any, filename: Union[str, bytes], mode: str, flags: int = ..., dont_inherit: int = ...) -> CodeType: ... def copyright() -> None: ... def credits() -> None: ... def delattr(o: Any, name: str) -> None: ... def dir(o: object = ...) -> List[str]: ... _N = TypeVar('_N', int, float) def divmod(a: _N, b: _N) -> Tuple[_N, _N]: ... def eval(source: Union[str, bytes, CodeType], globals: Optional[Dict[str, Any]] = ..., locals: Optional[Mapping[str, Any]] = ...) -> Any: ... def exec(object: Union[str, bytes, CodeType], globals: Optional[Dict[str, Any]] = ..., locals: Optional[Mapping[str, Any]] = ...) -> Any: ... def exit(code: Any = ...) -> NoReturn: ... @overload def filter(function: Optional[Callable[[_T], Any]], iterable: Iterable[_T]) -> Iterator[_T]: ... @overload def filter(function: None, iterable: Iterable[Optional[_T]]) -> Iterator[_T]: ... def format(o: object, format_spec: str = ...) -> str: ... def getattr(o: Any, name: str, default: Any = ...) -> Any: ... def globals() -> Dict[str, Any]: ... def hasattr(o: Any, name: str) -> bool: ... def hash(o: object) -> int: ... def help(*args: Any, **kwds: Any) -> None: ... def hex(i: int) -> str: ... # TODO __index__ def id(o: object) -> int: ... def input(prompt: Optional[Any] = ...) -> str: ... @overload def iter(iterable: Iterable[_T]) -> Iterator[_T]: ... @overload def iter(function: Callable[[], _T], sentinel: _T) -> Iterator[_T]: ... def isinstance(o: object, t: Union[type, Tuple[Union[type, Tuple], ...]]) -> bool: ... def issubclass(cls: type, classinfo: Union[type, Tuple[Union[type, Tuple], ...]]) -> bool: ... def len(o: Sized) -> int: ... def license() -> None: ... def locals() -> Dict[str, Any]: ... @overload def map(func: Callable[[_T1], _S], iter1: Iterable[_T1]) -> Iterator[_S]: ... @overload def map(func: Callable[[_T1, _T2], _S], iter1: Iterable[_T1], iter2: Iterable[_T2]) -> Iterator[_S]: ... # TODO more than two iterables @overload def max(arg1: _T, arg2: _T, *args: _T, key: Callable[[_T], Any] = ...) -> _T: ... @overload def max(iterable: Iterable[_T], key: Callable[[_T], Any] = ..., default: _T = ...) -> _T: ... @overload def min(arg1: _T, arg2: _T, *args: _T, key: Callable[[_T], Any] = ...) -> _T: ... @overload def min(iterable: Iterable[_T], key: Callable[[_T], Any] = ..., default: _T = ...) -> _T: ... @overload def next(i: Iterator[_T]) -> _T: ... @overload def next(i: Iterator[_T], default: _VT) -> Union[_T, _VT]: ... def oct(i: int) -> str: ... # TODO __index__ if sys.version_info >= (3, 6): # This class is to be exported as PathLike from os, # but we define it here as _PathLike to avoid import cycle issues. # See https://github.com/python/typeshed/pull/991#issuecomment-288160993 class _PathLike(Generic[AnyStr]): def __fspath__(self) -> AnyStr: ... def open(file: Union[str, bytes, int, _PathLike], mode: str = ..., buffering: int = ..., encoding: Optional[str] = ..., errors: Optional[str] = ..., newline: Optional[str] = ..., closefd: bool = ...) -> IO[Any]: ... else: def open(file: Union[str, bytes, int], mode: str = ..., buffering: int = ..., encoding: Optional[str] = ..., errors: Optional[str] = ..., newline: Optional[str] = ..., closefd: bool = ...) -> IO[Any]: ... def ord(c: Union[str, bytes, bytearray]) -> int: ... # TODO: in Python 3.2, print() does not support flush def print(*values: Any, sep: str = ..., end: str = ..., file: Optional[IO[str]] = ..., flush: bool = ...) -> None: ... @overload def pow(x: int, y: int) -> Any: ... # The return type can be int or float, depending on y @overload def pow(x: int, y: int, z: int) -> Any: ... @overload def pow(x: float, y: float) -> float: ... @overload def pow(x: float, y: float, z: float) -> float: ... def quit(code: Optional[int] = ...) -> None: ... @overload def reversed(object: Reversible[_T]) -> Iterator[_T]: ... @overload def reversed(object: Sequence[_T]) -> Iterator[_T]: ... def repr(o: object) -> str: ... @overload def round(number: float) -> int: ... @overload def round(number: float, ndigits: int) -> float: ... # Always return a float if given ndigits. @overload def round(number: SupportsRound[_T]) -> _T: ... @overload def round(number: SupportsRound[_T], ndigits: int) -> _T: ... def setattr(object: Any, name: str, value: Any) -> None: ... def sorted(iterable: Iterable[_T], *, key: Optional[Callable[[_T], Any]] = ..., reverse: bool = ...) -> List[_T]: ... @overload def sum(iterable: Iterable[_T]) -> Union[_T, int]: ... @overload def sum(iterable: Iterable[_T], start: _S) -> Union[_T, _S]: ... def vars(object: Any = ...) -> Dict[str, Any]: ... @overload def zip(iter1: Iterable[_T1]) -> Iterator[Tuple[_T1]]: ... @overload def zip(iter1: Iterable[_T1], iter2: Iterable[_T2]) -> Iterator[Tuple[_T1, _T2]]: ... @overload def zip(iter1: Iterable[_T1], iter2: Iterable[_T2], iter3: Iterable[_T3]) -> Iterator[Tuple[_T1, _T2, _T3]]: ... @overload def zip(iter1: Iterable[_T1], iter2: Iterable[_T2], iter3: Iterable[_T3], iter4: Iterable[_T4]) -> Iterator[Tuple[_T1, _T2, _T3, _T4]]: ... @overload def zip(iter1: Iterable[_T1], iter2: Iterable[_T2], iter3: Iterable[_T3], iter4: Iterable[_T4], iter5: Iterable[_T5]) -> Iterator[Tuple[_T1, _T2, _T3, _T4, _T5]]: ... @overload def zip(iter1: Iterable[Any], iter2: Iterable[Any], iter3: Iterable[Any], iter4: Iterable[Any], iter5: Iterable[Any], iter6: Iterable[Any], *iterables: Iterable[Any]) -> Iterator[Tuple[Any, ...]]: ... def __import__(name: str, globals: Dict[str, Any] = ..., locals: Dict[str, Any] = ..., fromlist: List[str] = ..., level: int = ...) -> Any: ... # Ellipsis # Actually the type of Ellipsis is , but since it's # not exposed anywhere under that name, we make it private here. class ellipsis: ... Ellipsis = ... # type: ellipsis # Exceptions class BaseException: args = ... # type: Tuple[Any, ...] __cause__ = ... # type: BaseException __context__ = ... # type: BaseException __traceback__ = ... # type: TracebackType def __init__(self, *args: object, **kwargs: object) -> None: ... def with_traceback(self, tb: Any) -> BaseException: ... class GeneratorExit(BaseException): ... class KeyboardInterrupt(BaseException): ... class SystemExit(BaseException): code = 0 class Exception(BaseException): ... class ArithmeticError(Exception): ... class EnvironmentError(Exception): errno = 0 strerror = ... # type: str # TODO can this be bytes? filename = ... # type: str class LookupError(Exception): ... class RuntimeError(Exception): ... class ValueError(Exception): ... class AssertionError(Exception): ... class AttributeError(Exception): ... class BufferError(Exception): ... class EOFError(Exception): ... class FloatingPointError(ArithmeticError): ... class IOError(EnvironmentError): ... class ImportError(Exception): ... if sys.version_info >= (3, 6): class ModuleNotFoundError(ImportError): ... class IndexError(LookupError): ... class KeyError(LookupError): ... class MemoryError(Exception): ... class NameError(Exception): ... class NotImplementedError(RuntimeError): ... class OSError(EnvironmentError): ... class BlockingIOError(OSError): characters_written = 0 class ChildProcessError(OSError): ... class ConnectionError(OSError): ... class BrokenPipeError(ConnectionError): ... class ConnectionAbortedError(ConnectionError): ... class ConnectionRefusedError(ConnectionError): ... class ConnectionResetError(ConnectionError): ... class FileExistsError(OSError): ... class FileNotFoundError(OSError): ... class InterruptedError(OSError): ... class IsADirectoryError(OSError): ... class NotADirectoryError(OSError): ... class PermissionError(OSError): ... class ProcessLookupError(OSError): ... class TimeoutError(OSError): ... class WindowsError(OSError): winerror = ... # type: int class OverflowError(ArithmeticError): ... class ReferenceError(Exception): ... class StopIteration(Exception): value = ... # type: Any if sys.version_info >= (3, 5): class StopAsyncIteration(Exception): value = ... # type: Any class RecursionError(RuntimeError): ... class SyntaxError(Exception): msg = ... # type: str lineno = ... # type: int offset = ... # type: int text = ... # type: str filename = ... # type: str class IndentationError(SyntaxError): ... class TabError(IndentationError): ... class SystemError(Exception): ... class TypeError(Exception): ... class UnboundLocalError(NameError): ... class UnicodeError(ValueError): ... class UnicodeDecodeError(UnicodeError): encoding = ... # type: str object = ... # type: bytes start = ... # type: int end = ... # type: int reason = ... # type: str def __init__(self, __encoding: str, __object: bytes, __start: int, __end: int, __reason: str) -> None: ... class UnicodeEncodeError(UnicodeError): encoding = ... # type: str object = ... # type: str start = ... # type: int end = ... # type: int reason = ... # type: str def __init__(self, __encoding: str, __object: str, __start: int, __end: int, __reason: str) -> None: ... class UnicodeTranslateError(UnicodeError): ... class ZeroDivisionError(ArithmeticError): ... class Warning(Exception): ... class UserWarning(Warning): ... class DeprecationWarning(Warning): ... class SyntaxWarning(Warning): ... class RuntimeWarning(Warning): ... class FutureWarning(Warning): ... class PendingDeprecationWarning(Warning): ... class ImportWarning(Warning): ... class UnicodeWarning(Warning): ... class BytesWarning(Warning): ... class ResourceWarning(Warning): ... mypy-0.560/typeshed/stdlib/3/collections/0000755€tŠÔÚ€2›s®0000000000013215007244024460 5ustar jukkaDROPBOX\Domain Users00000000000000mypy-0.560/typeshed/stdlib/3/collections/__init__.pyi0000644€tŠÔÚ€2›s®0000003251013215007212026736 0ustar jukkaDROPBOX\Domain Users00000000000000# Stubs for collections # Based on http://docs.python.org/3.2/library/collections.html # These are not exported. import sys import typing from typing import ( TypeVar, Generic, Dict, overload, List, Tuple, Any, Type, Optional, Union ) # These are exported. from . import abc from typing import ( Callable as Callable, Container as Container, Hashable as Hashable, Iterable as Iterable, Iterator as Iterator, Sized as Sized, Generator as Generator, ByteString as ByteString, Reversible as Reversible, Mapping as Mapping, MappingView as MappingView, ItemsView as ItemsView, KeysView as KeysView, ValuesView as ValuesView, MutableMapping as MutableMapping, Sequence as Sequence, MutableSequence as MutableSequence, MutableSet as MutableSet, AbstractSet as Set, ) if sys.version_info >= (3, 6): from typing import ( Collection as Collection, AsyncGenerator as AsyncGenerator, ) if sys.version_info >= (3, 5): from typing import ( Awaitable as Awaitable, Coroutine as Coroutine, AsyncIterable as AsyncIterable, AsyncIterator as AsyncIterator, ) _T = TypeVar('_T') _KT = TypeVar('_KT') _VT = TypeVar('_VT') # namedtuple is special-cased in the type checker; the initializer is ignored. if sys.version_info >= (3, 6): def namedtuple(typename: str, field_names: Union[str, Iterable[str]], *, verbose: bool = ..., rename: bool = ..., module: Optional[str] = ...) -> Type[tuple]: ... else: def namedtuple(typename: str, field_names: Union[str, Iterable[str]], verbose: bool = ..., rename: bool = ...) -> Type[tuple]: ... _UserDictT = TypeVar('_UserDictT', bound=UserDict) class UserDict(MutableMapping[_KT, _VT]): def __len__(self) -> int: ... def __getitem__(self, key: _KT) -> _VT: ... def __setitem__(self, key: _KT, item: _VT) -> None: ... def __delitem__(self, key: _KT) -> None: ... def __iter__(self) -> Iterator[_KT]: ... def __contains__(self, key: object) -> bool: ... def copy(self: _UserDictT) -> _UserDictT: ... @classmethod def fromkeys(cls: Type[_UserDictT], iterable: Iterable[_KT], value: Optional[_VT] = ...) -> _UserDictT: ... _UserListT = TypeVar('_UserListT', bound=UserList) class UserList(MutableSequence[_T]): def __init__(self, initlist: Optional[Iterable[_T]] = ...) -> None: ... def __lt__(self, other: object) -> bool: ... def __le__(self, other: object) -> bool: ... def __gt__(self, other: object) -> bool: ... def __ge__(self, other: object) -> bool: ... def __contains__(self, item: object) -> bool: ... def __len__(self) -> int: ... @overload def __getitem__(self, i: int) -> _T: ... @overload def __getitem__(self, i: slice) -> Sequence[_T]: ... @overload def __setitem__(self, i: int, o: _T) -> None: ... @overload def __setitem__(self, i: slice, o: Iterable[_T]) -> None: ... def __delitem__(self, i: Union[int, slice]) -> None: ... def __add__(self: _UserListT, other: Iterable[_T]) -> _UserListT: ... def __iadd__(self: _UserListT, other: Iterable[_T]) -> _UserListT: ... def __mul__(self: _UserListT, n: int) -> _UserListT: ... def __imul__(self: _UserListT, n: int) -> _UserListT: ... def append(self, item: _T) -> None: ... def insert(self, i: int, item: _T) -> None: ... def pop(self, i: int = ...) -> _T: ... def remove(self, item: _T) -> None: ... def clear(self) -> None: ... def copy(self: _UserListT) -> _UserListT: ... def count(self, item: _T) -> int: ... def index(self, item: _T, *args: Any) -> int: ... def reverse(self) -> None: ... def sort(self, *args: Any, **kwds: Any) -> None: ... def extend(self, other: Iterable[_T]) -> None: ... _UserStringT = TypeVar('_UserStringT', bound=UserString) class UserString(Sequence[str]): def __init__(self, seq: object) -> None: ... def __int__(self) -> int: ... def __float__(self) -> float: ... def __complex__(self) -> complex: ... if sys.version_info >= (3, 5): def __getnewargs__(self) -> Tuple[str]: ... def __lt__(self, string: Union[str, UserString]) -> bool: ... def __le__(self, string: Union[str, UserString]) -> bool: ... def __gt__(self, string: Union[str, UserString]) -> bool: ... def __ge__(self, string: Union[str, UserString]) -> bool: ... def __contains__(self, char: object) -> bool: ... def __len__(self) -> int: ... # It should return a str to implement Sequence correctly, but it doesn't. def __getitem__(self: _UserStringT, i: Union[int, slice]) -> _UserStringT: ... # type: ignore def __add__(self: _UserStringT, other: object) -> _UserStringT: ... def __mul__(self: _UserStringT, n: int) -> _UserStringT: ... def __mod__(self: _UserStringT, args: Any) -> _UserStringT: ... def capitalize(self: _UserStringT) -> _UserStringT: ... if sys.version_info >= (3, 5): def casefold(self: _UserStringT) -> _UserStringT: ... def center(self: _UserStringT, width: int, *args: Any) -> _UserStringT: ... def count(self, sub: Union[str, UserString], start: int = ..., end: int = ...) -> int: ... def encode(self: _UserStringT, encoding: Optional[str] = ..., errors: Optional[str] = ...) -> _UserStringT: ... def endswith(self, suffix: Union[str, Tuple[str, ...]], start: int = ..., end: int = ...) -> bool: ... def expandtabs(self: _UserStringT, tabsize: int = ...) -> _UserStringT: ... def find(self, sub: Union[str, UserString], start: int = ..., end: int = ...) -> int: ... def format(self, *args: Any, **kwds: Any) -> str: ... if sys.version_info >= (3, 5): def format_map(self, mapping: Mapping[str, Any]) -> str: ... def index(self, sub: str, start: int = ..., end: int = ...) -> int: ... def isalpha(self) -> bool: ... def isalnum(self) -> bool: ... def isdecimal(self) -> bool: ... def isdigit(self) -> bool: ... def isidentifier(self) -> bool: ... def islower(self) -> bool: ... def isnumeric(self) -> bool: ... if sys.version_info >= (3, 5): def isprintable(self) -> bool: ... def isspace(self) -> bool: ... def istitle(self) -> bool: ... def isupper(self) -> bool: ... def join(self, seq: Iterable[str]) -> str: ... def ljust(self: _UserStringT, width: int, *args: Any) -> _UserStringT: ... def lower(self: _UserStringT) -> _UserStringT: ... def lstrip(self: _UserStringT, chars: Optional[str] = ...) -> _UserStringT: ... if sys.version_info >= (3, 5): @staticmethod @overload def maketrans(x: Union[Dict[int, _T], Dict[str, _T], Dict[Union[str, int], _T]]) -> Dict[int, _T]: ... @staticmethod @overload def maketrans(x: str, y: str, z: str = ...) -> Dict[int, Union[int, None]]: ... def partition(self, sep: str) -> Tuple[str, str, str]: ... def replace(self: _UserStringT, old: Union[str, UserString], new: Union[str, UserString], maxsplit: int = ...) -> _UserStringT: ... def rfind(self, sub: Union[str, UserString], start: int = ..., end: int = ...) -> int: ... def rindex(self, sub: Union[str, UserString], start: int = ..., end: int = ...) -> int: ... def rjust(self: _UserStringT, width: int, *args: Any) -> _UserStringT: ... def rpartition(self, sep: str) -> Tuple[str, str, str]: ... def rstrip(self: _UserStringT, chars: Optional[str] = ...) -> _UserStringT: ... def split(self, sep: Optional[str] = ..., maxsplit: int = ...) -> List[str]: ... def rsplit(self, sep: Optional[str] = ..., maxsplit: int = ...) -> List[str]: ... def splitlines(self, keepends: bool = ...) -> List[str]: ... def startswith(self, prefix: Union[str, Tuple[str, ...]], start: int = ..., end: int = ...) -> bool: ... def strip(self: _UserStringT, chars: Optional[str] = ...) -> _UserStringT: ... def swapcase(self: _UserStringT) -> _UserStringT: ... def title(self: _UserStringT) -> _UserStringT: ... def translate(self: _UserStringT, *args: Any) -> _UserStringT: ... def upper(self: _UserStringT) -> _UserStringT: ... def zfill(self: _UserStringT, width: int) -> _UserStringT: ... # Technically, deque only derives from MutableSequence in 3.5. # But in practice it's not worth losing sleep over. class deque(MutableSequence[_T], Generic[_T]): maxlen = ... # type: Optional[int] # TODO readonly def __init__(self, iterable: Iterable[_T] = ..., maxlen: int = ...) -> None: ... def append(self, x: _T) -> None: ... def appendleft(self, x: _T) -> None: ... def clear(self) -> None: ... if sys.version_info >= (3, 5): def copy(self) -> deque[_T]: ... def count(self, x: _T) -> int: ... def extend(self, iterable: Iterable[_T]) -> None: ... def extendleft(self, iterable: Iterable[_T]) -> None: ... if sys.version_info >= (3, 5): def insert(self, i: int, x: _T) -> None: ... def index(self, x: _T, start: int = ..., stop: int = ...) -> int: ... def pop(self, i: int = ...) -> _T: ... def popleft(self) -> _T: ... def remove(self, value: _T) -> None: ... def reverse(self) -> None: ... def rotate(self, n: int) -> None: ... def __len__(self) -> int: ... def __iter__(self) -> Iterator[_T]: ... def __str__(self) -> str: ... def __hash__(self) -> int: ... # These methods of deque don't really take slices, but we need to # define them as taking a slice to satisfy MutableSequence. @overload def __getitem__(self, index: int) -> _T: ... @overload def __getitem__(self, s: slice) -> Sequence[_T]: raise TypeError @overload def __setitem__(self, i: int, x: _T) -> None: ... @overload def __setitem__(self, s: slice, o: Iterable[_T]) -> None: raise TypeError @overload def __delitem__(self, i: int) -> None: ... @overload def __delitem__(self, s: slice) -> None: raise TypeError def __contains__(self, o: object) -> bool: ... def __reversed__(self) -> Iterator[_T]: ... if sys.version_info >= (3, 5): def __add__(self, other: deque[_T]) -> deque[_T]: ... def __mul__(self, other: int) -> deque[_T]: ... def __imul__(self, other: int) -> None: ... class Counter(Dict[_T, int], Generic[_T]): @overload def __init__(self, **kwargs: int) -> None: ... @overload def __init__(self, mapping: Mapping[_T, int]) -> None: ... @overload def __init__(self, iterable: Iterable[_T]) -> None: ... def elements(self) -> Iterator[_T]: ... def most_common(self, n: Optional[int] = ...) -> List[Tuple[_T, int]]: ... @overload def subtract(self, __mapping: Mapping[_T, int]) -> None: ... @overload def subtract(self, iterable: Iterable[_T]) -> None: ... # The Iterable[Tuple[...]] argument type is not actually desirable # (the tuples will be added as keys, breaking type safety) but # it's included so that the signature is compatible with # Dict.update. Not sure if we should use '# type: ignore' instead # and omit the type from the union. @overload def update(self, __m: Mapping[_T, int], **kwargs: int) -> None: ... @overload def update(self, __m: Union[Iterable[_T], Iterable[Tuple[_T, int]]], **kwargs: int) -> None: ... @overload def update(self, **kwargs: int) -> None: ... def __add__(self, other: Counter[_T]) -> Counter[_T]: ... def __sub__(self, other: Counter[_T]) -> Counter[_T]: ... def __and__(self, other: Counter[_T]) -> Counter[_T]: ... def __or__(self, other: Counter[_T]) -> Counter[_T]: ... def __pos__(self) -> Counter[_T]: ... def __neg__(self) -> Counter[_T]: ... def __iadd__(self, other: Counter[_T]) -> Counter[_T]: ... def __isub__(self, other: Counter[_T]) -> Counter[_T]: ... def __iand__(self, other: Counter[_T]) -> Counter[_T]: ... def __ior__(self, other: Counter[_T]) -> Counter[_T]: ... class OrderedDict(Dict[_KT, _VT], Reversible[_KT], Generic[_KT, _VT]): def popitem(self, last: bool = ...) -> Tuple[_KT, _VT]: ... def move_to_end(self, key: _KT, last: bool = ...) -> None: ... def __reversed__(self) -> Iterator[_KT]: ... def __copy__(self) -> OrderedDict[_KT, _VT]: ... class defaultdict(Dict[_KT, _VT], Generic[_KT, _VT]): default_factory = ... # type: Callable[[], _VT] @overload def __init__(self, **kwargs: _VT) -> None: ... @overload def __init__(self, default_factory: Optional[Callable[[], _VT]]) -> None: ... @overload def __init__(self, default_factory: Optional[Callable[[], _VT]], map: Mapping[_KT, _VT]) -> None: ... @overload def __init__(self, default_factory: Optional[Callable[[], _VT]], iterable: Iterable[Tuple[_KT, _VT]]) -> None: ... def __missing__(self, key: _KT) -> _VT: ... # TODO __reversed__ if sys.version_info >= (3, 3): class ChainMap(MutableMapping[_KT, _VT], Generic[_KT, _VT]): def __init__(self, *maps: Mapping[_KT, _VT]) -> None: ... @property def maps(self) -> List[Mapping[_KT, _VT]]: ... def new_child(self, m: Mapping[_KT, _VT] = ...) -> typing.ChainMap[_KT, _VT]: ... @property def parents(self) -> typing.ChainMap[_KT, _VT]: ... def __setitem__(self, k: _KT, v: _VT) -> None: ... def __delitem__(self, v: _KT) -> None: ... def __getitem__(self, k: _KT) -> _VT: ... def __iter__(self) -> Iterator[_KT]: ... def __len__(self) -> int: ... mypy-0.560/typeshed/stdlib/3/collections/abc.pyi0000644€tŠÔÚ€2›s®0000000215213215007212025723 0ustar jukkaDROPBOX\Domain Users00000000000000# Stubs for collections.abc (introduced from Python 3.3) # # https://docs.python.org/3.3/whatsnew/3.3.html#collections import sys if sys.version_info >= (3, 3): from . import ( Container as Container, Hashable as Hashable, Iterable as Iterable, Iterator as Iterator, Sized as Sized, Callable as Callable, Mapping as Mapping, MutableMapping as MutableMapping, Sequence as Sequence, MutableSequence as MutableSequence, Set as Set, MutableSet as MutableSet, MappingView as MappingView, ItemsView as ItemsView, KeysView as KeysView, ValuesView as ValuesView, ) if sys.version_info >= (3, 5): from . import ( Generator as Generator, ByteString as ByteString, Awaitable as Awaitable, Coroutine as Coroutine, AsyncIterable as AsyncIterable, AsyncIterator as AsyncIterator, ) if sys.version_info >= (3, 6): from . import ( Collection as Collection, Reversible as Reversible, AsyncGenerator as AsyncGenerator, ) mypy-0.560/typeshed/stdlib/3/compileall.pyi0000644€tŠÔÚ€2›s®0000000223713215007212025005 0ustar jukkaDROPBOX\Domain Users00000000000000# Stubs for compileall (Python 3) import os import sys from typing import Optional, Union, Pattern if sys.version_info < (3, 6): _Path = Union[str, bytes] _SuccessType = bool else: _Path = Union[str, bytes, os.PathLike] _SuccessType = int # rx can be any object with a 'search' method; once we have Protocols we can change the type if sys.version_info < (3, 5): def compile_dir(dir: _Path, maxlevels: int = ..., ddir: Optional[_Path] = ..., force: bool = ..., rx: Optional[Pattern] = ..., quiet: int = ..., legacy: bool = ..., optimize: int = ...) -> _SuccessType: ... else: def compile_dir(dir: _Path, maxlevels: int = ..., ddir: Optional[_Path] = ..., force: bool = ..., rx: Optional[Pattern] = ..., quiet: int = ..., legacy: bool = ..., optimize: int = ..., workers: int = ...) -> _SuccessType: ... def compile_file(fullname: _Path, ddir: Optional[_Path] = ..., force: bool = ..., rx: Optional[Pattern] = ..., quiet: int = ..., legacy: bool = ..., optimize: int = ...) -> _SuccessType: ... def compile_path(skip_curdir: bool = ..., maxlevels: int = ..., force: bool = ..., quiet: int = ..., legacy: bool = ..., optimize: int = ...) -> _SuccessType: ... mypy-0.560/typeshed/stdlib/3/concurrent/0000755€tŠÔÚ€2›s®0000000000013215007244024324 5ustar jukkaDROPBOX\Domain Users00000000000000mypy-0.560/typeshed/stdlib/3/concurrent/__init__.pyi0000644€tŠÔÚ€2›s®0000000000013215007212026567 0ustar jukkaDROPBOX\Domain Users00000000000000mypy-0.560/typeshed/stdlib/3/concurrent/futures/0000755€tŠÔÚ€2›s®0000000000013215007244026021 5ustar jukkaDROPBOX\Domain Users00000000000000mypy-0.560/typeshed/stdlib/3/concurrent/futures/__init__.pyi0000644€tŠÔÚ€2›s®0000000015413215007212030276 0ustar jukkaDROPBOX\Domain Users00000000000000from ._base import * # noqa: F403 from .thread import * # noqa: F403 from .process import * # noqa: F403 mypy-0.560/typeshed/stdlib/3/concurrent/futures/_base.pyi0000644€tŠÔÚ€2›s®0000000355113215007212027614 0ustar jukkaDROPBOX\Domain Users00000000000000from typing import TypeVar, Generic, Any, Iterable, Iterator, Callable, Tuple, Optional, Set, NamedTuple FIRST_COMPLETED = ... # type: str FIRST_EXCEPTION = ... # type: str ALL_COMPLETED = ... # type: str PENDING = ... # type: Any RUNNING = ... # type: Any CANCELLED = ... # type: Any CANCELLED_AND_NOTIFIED = ... # type: Any FINISHED = ... # type: Any LOGGER = ... # type: Any class Error(Exception): ... class CancelledError(Error): ... class TimeoutError(Error): ... DoneAndNotDoneFutures = NamedTuple('DoneAndNotDoneFutures', [('done', Future[Any]), ('not_done', Future[Any])]) _T = TypeVar('_T') class Future(Generic[_T]): def __init__(self) -> None: ... def cancel(self) -> bool: ... def cancelled(self) -> bool: ... def running(self) -> bool: ... def done(self) -> bool: ... def add_done_callback(self, fn: Callable[[Future[_T]], Any]) -> None: ... def result(self, timeout: Optional[float] = ...) -> _T: ... def exception(self, timeout: Optional[float] = ...) -> Optional[BaseException]: ... def set_running_or_notify_cancel(self) -> None: ... def set_result(self, result: _T) -> None: ... def set_exception(self, exception: Optional[BaseException]) -> None: ... class Executor: def submit(self, fn: Callable[..., _T], *args: Any, **kwargs: Any) -> Future[_T]: ... def map(self, func: Callable[..., _T], *iterables: Iterable[Any], timeout: Optional[float] = ..., chunksize: int = ...) -> Iterator[_T]: ... def shutdown(self, wait: bool = ...) -> None: ... def __enter__(self: _T) -> _T: ... def __exit__(self, exc_type: Any, exc_val: Any, exc_tb: Any) -> bool: ... def as_completed(fs: Iterable[Future[_T]], timeout: Optional[float] = ...) -> Iterator[Future[_T]]: ... def wait(fs: Iterable[Future[_T]], timeout: Optional[float] = ..., return_when: str = ...) -> Tuple[Set[Future[_T]], Set[Future[_T]]]: ... mypy-0.560/typeshed/stdlib/3/concurrent/futures/process.pyi0000644€tŠÔÚ€2›s®0000000040413215007212030213 0ustar jukkaDROPBOX\Domain Users00000000000000from typing import Optional, Any from ._base import Future, Executor EXTRA_QUEUED_CALLS = ... # type: Any class BrokenProcessPool(RuntimeError): ... class ProcessPoolExecutor(Executor): def __init__(self, max_workers: Optional[int] = ...) -> None: ... mypy-0.560/typeshed/stdlib/3/concurrent/futures/thread.pyi0000644€tŠÔÚ€2›s®0000000054513215007212030012 0ustar jukkaDROPBOX\Domain Users00000000000000from typing import Optional from ._base import Executor, Future import sys class ThreadPoolExecutor(Executor): if sys.version_info >= (3, 6): def __init__(self, max_workers: Optional[int] = ..., thread_name_prefix: str = ...) -> None: ... else: def __init__(self, max_workers: Optional[int] = ...) -> None: ... mypy-0.560/typeshed/stdlib/3/configparser.pyi0000644€tŠÔÚ€2›s®0000001771013215007212025350 0ustar jukkaDROPBOX\Domain Users00000000000000# Based on http://docs.python.org/3.5/library/configparser.html and on # reading configparser.py. import sys from typing import (AbstractSet, MutableMapping, Mapping, Dict, Sequence, List, Union, Iterable, Iterator, Callable, Any, IO, overload, Optional, Pattern, Type, TypeVar) # Types only used in type comments only from typing import Optional, Tuple # noqa if sys.version_info >= (3, 6): from os import PathLike # Internal type aliases _section = Mapping[str, str] _parser = MutableMapping[str, _section] _converter = Callable[[str], Any] _converters = Dict[str, _converter] _T = TypeVar('_T') if sys.version_info >= (3, 6): _Path = Union[str, PathLike[str]] else: _Path = str DEFAULTSECT: str MAX_INTERPOLATION_DEPTH: int class Interpolation: def before_get(self, parser: _parser, section: str, option: str, value: str, defaults: _section) -> str: ... def before_set(self, parser: _parser, section: str, option: str, value: str) -> str: ... def before_read(self, parser: _parser, section: str, option: str, value: str) -> str: ... def before_write(self, parser: _parser, section: str, option: str, value: str) -> str: ... class BasicInterpolation(Interpolation): ... class ExtendedInterpolation(Interpolation): ... class LegacyInterpolation(Interpolation): ... class RawConfigParser(_parser): def __init__(self, defaults: Optional[_section] = ..., dict_type: Type[Mapping[str, str]] = ..., allow_no_value: bool = ..., *, delimiters: Sequence[str] = ..., comment_prefixes: Sequence[str] = ..., inline_comment_prefixes: Optional[Sequence[str]] = ..., strict: bool = ..., empty_lines_in_values: bool = ..., default_section: str = ..., interpolation: Optional[Interpolation] = ...) -> None: ... def __len__(self) -> int: ... def __getitem__(self, section: str) -> SectionProxy: ... def __setitem__(self, section: str, options: _section) -> None: ... def __delitem__(self, section: str) -> None: ... def __iter__(self) -> Iterator[str]: ... def defaults(self) -> _section: ... def sections(self) -> List[str]: ... def add_section(self, section: str) -> None: ... def has_section(self, section: str) -> bool: ... def options(self, section: str) -> List[str]: ... def has_option(self, section: str, option: str) -> bool: ... def read(self, filenames: Union[_Path, Iterable[_Path]], encoding: Optional[str] = ...) -> List[str]: ... def read_file(self, f: Iterable[str], source: Optional[str] = ...) -> None: ... def read_string(self, string: str, source: str = ...) -> None: ... def read_dict(self, dictionary: Mapping[str, Mapping[str, Any]], source: str = ...) -> None: ... # These get* methods are partially applied (with the same names) in # SectionProxy; the stubs should be kept updated together def getint(self, section: str, option: str, *, raw: bool = ..., vars: _section = ..., fallback: int = ...) -> int: ... def getfloat(self, section: str, option: str, *, raw: bool = ..., vars: _section = ..., fallback: float = ...) -> float: ... def getboolean(self, section: str, option: str, *, raw: bool = ..., vars: _section = ..., fallback: bool = ...) -> bool: ... def _get_conv(self, section: str, option: str, conv: Callable[[str], _T], *, raw: bool = ..., vars: _section = ..., fallback: _T = ...) -> _T: ... # This is incompatible with MutableMapping so we ignore the type def get(self, section: str, option: str, *, raw: bool = ..., vars: _section = ..., fallback: str = ...) -> str: # type: ignore ... @overload def items(self, *, raw: bool = ..., vars: _section = ...) -> AbstractSet[Tuple[str, SectionProxy]]: ... @overload def items(self, section: str, raw: bool = ..., vars: _section = ...) -> List[Tuple[str, str]]: ... def set(self, section: str, option: str, value: str) -> None: ... def write(self, fileobject: IO[str], space_around_delimiters: bool = ...) -> None: ... def remove_option(self, section: str, option: str) -> bool: ... def remove_section(self, section: str) -> bool: ... def optionxform(self, option: str) -> str: ... class ConfigParser(RawConfigParser): def __init__(self, defaults: Optional[_section] = ..., dict_type: Mapping[str, str] = ..., allow_no_value: bool = ..., delimiters: Sequence[str] = ..., comment_prefixes: Sequence[str] = ..., inline_comment_prefixes: Optional[Sequence[str]] = ..., strict: bool = ..., empty_lines_in_values: bool = ..., default_section: str = ..., interpolation: Optional[Interpolation] = ..., converters: _converters = ...) -> None: ... class SafeConfigParser(ConfigParser): ... class SectionProxy(MutableMapping[str, str]): def __init__(self, parser: RawConfigParser, name: str) -> None: ... def __getitem__(self, key: str) -> str: ... def __setitem__(self, key: str, value: str) -> None: ... def __delitem__(self, key: str) -> None: ... def __contains__(self, key: object) -> bool: ... def __len__(self) -> int: ... def __iter__(self) -> Iterator[str]: ... @property def parser(self) -> RawConfigParser: ... @property def name(self) -> str: ... def get(self, option: str, fallback: Optional[str] = ..., *, raw: bool = ..., vars: Optional[_section] = ..., **kwargs: Any) -> str: ... # type: ignore # These are partially-applied version of the methods with the same names in # RawConfigParser; the stubs should be kept updated together def getint(self, option: str, *, raw: bool = ..., vars: _section = ..., fallback: int = ...) -> int: ... def getfloat(self, option: str, *, raw: bool = ..., vars: _section = ..., fallback: float = ...) -> float: ... def getboolean(self, option: str, *, raw: bool = ..., vars: _section = ..., fallback: bool = ...) -> bool: ... # SectionProxy can have arbitrary attributes when custon converters are used def __getattr__(self, key: str) -> Callable[..., Any]: ... class ConverterMapping(MutableMapping[str, Optional[_converter]]): GETTERCRE: Pattern def __init__(self, parser: RawConfigParser) -> None: ... def __getitem__(self, key: str) -> _converter: ... def __setitem__(self, key: str, value: Optional[_converter]) -> None: ... def __delitem__(self, key: str) -> None: ... def __iter__(self) -> Iterator[str]: ... def __len__(self) -> int: ... class Error(Exception): pass class NoSectionError(Error): pass class DuplicateSectionError(Error): section = ... # type: str source = ... # type: Optional[str] lineno = ... # type: Optional[int] class DuplicateOptionError(Error): section = ... # type: str option = ... # type: str source = ... # type: Optional[str] lineno = ... # type: Optional[int] class NoOptionError(Error): section = ... # type: str option = ... # type: str class InterpolationError(Error): section = ... # type: str option = ... # type: str class InterpolationDepthError(InterpolationError): pass class InterpolationMissingOptionError(InterpolationError): reference = ... # type: str class InterpolationSyntaxError(InterpolationError): pass class ParsingError: source = ... # type: str errors = ... # type: Sequence[Tuple[int, str]] class MissingSectionHeaderError(ParsingError): lineno = ... # type: int line = ... # type: str mypy-0.560/typeshed/stdlib/3/curses/0000755€tŠÔÚ€2›s®0000000000013215007244023446 5ustar jukkaDROPBOX\Domain Users00000000000000mypy-0.560/typeshed/stdlib/3/curses/__init__.pyi0000644€tŠÔÚ€2›s®0000000026313215007212025724 0ustar jukkaDROPBOX\Domain Users00000000000000import _curses from _curses import * # noqa: F403 LINES: int COLS: int def initscr() -> _curses._CursesWindow: ... def start_color(): ... def wrapper(func, *args, **kwds): ... mypy-0.560/typeshed/stdlib/3/datetime.pyi0000644€tŠÔÚ€2›s®0000001733113215007212024461 0ustar jukkaDROPBOX\Domain Users00000000000000import sys from time import struct_time from typing import Optional, SupportsAbs, Tuple, overload MINYEAR = 0 MAXYEAR = 0 class tzinfo: def tzname(self, dt: Optional[datetime]) -> str: ... def utcoffset(self, dt: Optional[datetime]) -> Optional[timedelta]: ... def dst(self, dt: Optional[datetime]) -> Optional[timedelta]: ... def fromutc(self, dt: datetime) -> datetime: ... class timezone(tzinfo): utc = ... # type: timezone min = ... # type: timezone max = ... # type: timezone def __init__(self, offset: timedelta, name: str = ...) -> None: ... def __hash__(self) -> int: ... _tzinfo = tzinfo _timezone = timezone class date: min = ... # type: date max = ... # type: date resolution = ... # type: timedelta def __init__(self, year: int, month: int, day: int) -> None: ... @classmethod def fromtimestamp(cls, t: float) -> date: ... @classmethod def today(cls) -> date: ... @classmethod def fromordinal(cls, n: int) -> date: ... @property def year(self) -> int: ... @property def month(self) -> int: ... @property def day(self) -> int: ... def ctime(self) -> str: ... def strftime(self, fmt: str) -> str: ... def __format__(self, fmt: str) -> str: ... def isoformat(self) -> str: ... def timetuple(self) -> tuple: ... # TODO return type def toordinal(self) -> int: ... def replace(self, year: int = ..., month: int = ..., day: int = ...) -> date: ... def __le__(self, other: date) -> bool: ... def __lt__(self, other: date) -> bool: ... def __ge__(self, other: date) -> bool: ... def __gt__(self, other: date) -> bool: ... def __add__(self, other: timedelta) -> date: ... @overload def __sub__(self, other: timedelta) -> date: ... @overload def __sub__(self, other: date) -> timedelta: ... def __hash__(self) -> int: ... def weekday(self) -> int: ... def isoweekday(self) -> int: ... def isocalendar(self) -> Tuple[int, int, int]: ... class time: min = ... # type: time max = ... # type: time resolution = ... # type: timedelta def __init__(self, hour: int = ..., minute: int = ..., second: int = ..., microsecond: int = ..., tzinfo: Optional[tzinfo] = ...) -> None: ... @property def hour(self) -> int: ... @property def minute(self) -> int: ... @property def second(self) -> int: ... @property def microsecond(self) -> int: ... @property def tzinfo(self) -> Optional[_tzinfo]: ... def __le__(self, other: time) -> bool: ... def __lt__(self, other: time) -> bool: ... def __ge__(self, other: time) -> bool: ... def __gt__(self, other: time) -> bool: ... def __hash__(self) -> int: ... def isoformat(self) -> str: ... def strftime(self, fmt: str) -> str: ... def __format__(self, fmt: str) -> str: ... def utcoffset(self) -> Optional[timedelta]: ... def tzname(self) -> Optional[str]: ... def dst(self) -> Optional[int]: ... def replace(self, hour: int = ..., minute: int = ..., second: int = ..., microsecond: int = ..., tzinfo: Optional[_tzinfo] = ...) -> time: ... _date = date _time = time class timedelta(SupportsAbs[timedelta]): min = ... # type: timedelta max = ... # type: timedelta resolution = ... # type: timedelta def __init__(self, days: float = ..., seconds: float = ..., microseconds: float = ..., milliseconds: float = ..., minutes: float = ..., hours: float = ..., weeks: float = ...) -> None: ... @property def days(self) -> int: ... @property def seconds(self) -> int: ... @property def microseconds(self) -> int: ... def total_seconds(self) -> float: ... def __add__(self, other: timedelta) -> timedelta: ... def __radd__(self, other: timedelta) -> timedelta: ... def __sub__(self, other: timedelta) -> timedelta: ... def __rsub(self, other: timedelta) -> timedelta: ... def __neg__(self) -> timedelta: ... def __pos__(self) -> timedelta: ... def __abs__(self) -> timedelta: ... def __mul__(self, other: float) -> timedelta: ... def __rmul__(self, other: float) -> timedelta: ... @overload def __floordiv__(self, other: timedelta) -> int: ... @overload def __floordiv__(self, other: int) -> timedelta: ... @overload def __truediv__(self, other: timedelta) -> float: ... @overload def __truediv__(self, other: float) -> timedelta: ... def __mod__(self, other: timedelta) -> timedelta: ... def __divmod__(self, other: timedelta) -> Tuple[int, timedelta]: ... def __le__(self, other: timedelta) -> bool: ... def __lt__(self, other: timedelta) -> bool: ... def __ge__(self, other: timedelta) -> bool: ... def __gt__(self, other: timedelta) -> bool: ... def __hash__(self) -> int: ... class datetime: # TODO: Is a subclass of date, but this would make some types incompatible. min = ... # type: datetime max = ... # type: datetime resolution = ... # type: timedelta def __init__(self, year: int, month: int, day: int, hour: int = ..., minute: int = ..., second: int = ..., microsecond: int = ..., tzinfo: Optional[tzinfo] = ...) -> None: ... @property def year(self) -> int: ... @property def month(self) -> int: ... @property def day(self) -> int: ... @property def hour(self) -> int: ... @property def minute(self) -> int: ... @property def second(self) -> int: ... @property def microsecond(self) -> int: ... @property def tzinfo(self) -> Optional[_tzinfo]: ... @classmethod def fromtimestamp(cls, t: float, tz: Optional[_tzinfo] = ...) -> datetime: ... @classmethod def utcfromtimestamp(cls, t: float) -> datetime: ... @classmethod def today(cls) -> datetime: ... @classmethod def fromordinal(cls, n: int) -> datetime: ... @classmethod def now(cls, tz: Optional[_tzinfo] = ...) -> datetime: ... @classmethod def utcnow(cls) -> datetime: ... @classmethod def combine(cls, date: date, time: time) -> datetime: ... def strftime(self, fmt: str) -> str: ... def __format__(self, fmt: str) -> str: ... def toordinal(self) -> int: ... def timetuple(self) -> struct_time: ... def timestamp(self) -> float: ... def utctimetuple(self) -> struct_time: ... def date(self) -> _date: ... def time(self) -> _time: ... def timetz(self) -> _time: ... def replace(self, year: int = ..., month: int = ..., day: int = ..., hour: int = ..., minute: int = ..., second: int = ..., microsecond: int = ..., tzinfo: Optional[_tzinfo] = ...) -> datetime: ... def astimezone(self, tz: Optional[_tzinfo] = ...) -> datetime: ... def ctime(self) -> str: ... if sys.version_info >= (3, 6): def isoformat(self, sep: str = ..., timespec: str = ...) -> str: ... else: def isoformat(self, sep: str = ...) -> str: ... @classmethod def strptime(cls, date_string: str, format: str) -> datetime: ... def utcoffset(self) -> Optional[timedelta]: ... def tzname(self) -> Optional[str]: ... def dst(self) -> Optional[int]: ... def __le__(self, other: datetime) -> bool: ... def __lt__(self, other: datetime) -> bool: ... def __ge__(self, other: datetime) -> bool: ... def __gt__(self, other: datetime) -> bool: ... def __add__(self, other: timedelta) -> datetime: ... @overload def __sub__(self, other: datetime) -> timedelta: ... @overload def __sub__(self, other: timedelta) -> datetime: ... def __hash__(self) -> int: ... def weekday(self) -> int: ... def isoweekday(self) -> int: ... def isocalendar(self) -> Tuple[int, int, int]: ... mypy-0.560/typeshed/stdlib/3/decimal.pyi0000644€tŠÔÚ€2›s®0000002424013215007212024260 0ustar jukkaDROPBOX\Domain Users00000000000000# Stubs for decimal (Python 3.4) from typing import ( Any, Union, SupportsInt, SupportsFloat, SupportsAbs, SupportsRound, Sequence, Tuple, NamedTuple, Dict ) _Decimal = Union[Decimal, int] _ComparableNum = Union[Decimal, int, float] BasicContext = ... # type: Context DefaultContext = ... # type: Context ExtendedContext = ... # type: Context HAVE_THREADS = ... # type: bool MAX_EMAX = ... # type: int MAX_PREC = ... # type: int MIN_EMIN = ... # type: int MIN_ETINY = ... # type: int ROUND_05UP = ... # type: str ROUND_CEILING = ... # type: str ROUND_DOWN = ... # type: str ROUND_FLOOR = ... # type: str ROUND_HALF_DOWN = ... # type: str ROUND_HALF_EVEN = ... # type: str ROUND_HALF_UP = ... # type: str ROUND_UP = ... # type: str def getcontext() -> Context: ... def localcontext(ctx: Context = ...) -> _ContextManager: ... def setcontext(c: Context) -> None: ... DecimalTuple = NamedTuple('DecimalTuple', [('sign', int), ('digits', Sequence[int]), # TODO: Use Tuple[int, ...] ('exponent', int)]) class _ContextManager: def __enter__(self) -> Context: ... def __exit__(self, t, v, tb) -> None: ... class Context: Emax = ... # type: int Emin = ... # type: int capitals = ... # type: int clamp = ... # type: int prec = ... # type: int rounding = ... # type: str traps = ... # type: Dict[type, bool] def __init__(self, prec: int = ..., rounding: str = ..., Emin: int = ..., Emax: int = ..., capitals: int = ..., clamp: int = ..., flags=..., traps=..., _ignored_flags=...) -> None: ... def Etiny(self): ... def Etop(self): ... def abs(self, x: _Decimal) -> Decimal: ... def add(self, x: _Decimal, y: _Decimal) -> Decimal: ... def canonical(self, x): ... def clear_flags(self): ... def clear_traps(self): ... def compare(self, x, y): ... def compare_signal(self, x, y): ... def compare_total(self, x, y): ... def compare_total_mag(self, x, y): ... def copy(self): ... def copy_abs(self, x): ... def copy_decimal(self, x): ... def copy_negate(self, x): ... def copy_sign(self, x, y): ... def create_decimal(self, x): ... def create_decimal_from_float(self, f): ... def divide(self, x, y): ... def divide_int(self, x, y): ... def divmod(self, x, y): ... def exp(self, x): ... def fma(self, x, y, z): ... def is_canonical(self, x): ... def is_finite(self, x): ... def is_infinite(self, x): ... def is_nan(self, x): ... def is_normal(self, x): ... def is_qnan(self, x): ... def is_signed(self, x): ... def is_snan(self): ... def is_subnormal(self, x): ... def is_zero(self, x): ... def ln(self, x): ... def log10(self, x): ... def logb(self, x): ... def logical_and(self, x, y): ... def logical_invert(self, x): ... def logical_or(self, x, y): ... def logical_xor(self, x, y): ... def max(self, x, y): ... def max_mag(self, x, y): ... def min(self, x, y): ... def min_mag(self, x, y): ... def minus(self, x): ... def multiply(self, x, y): ... def next_minus(self, x): ... def next_plus(self, x): ... def next_toward(self, x): ... def normalize(self, x): ... def number_class(self, x): ... def plus(self, x): ... def power(self, x, y): ... def quantize(self, x, y): ... def radix(self): ... def remainder(self, x, y): ... def remainder_near(self, x, y): ... def rotate(self, x, y): ... def same_quantum(self, x, y): ... def scaleb(self, x, y): ... def shift(self, x, y): ... def sqrt(self, x): ... def subtract(self, x, y): ... def to_eng_string(self, x): ... def to_integral(self, x): ... def to_integral_exact(self, x): ... def to_integral_value(self, x): ... def to_sci_string(self, x): ... def __copy__(self) -> Context: ... def __delattr__(self, name): ... def __reduce__(self): ... class ConversionSyntax(InvalidOperation): ... class Decimal(SupportsInt, SupportsFloat, SupportsAbs[Decimal], SupportsRound[int]): # TODO: SupportsCeil, SupportsFloor, SupportsTrunc? def __init__(cls, value: Union[_Decimal, float, str, Tuple[int, Sequence[int], int]] = ..., context: Context = ...) -> None: ... @property def imag(self) -> Decimal: ... @property def real(self) -> Decimal: ... def adjusted(self) -> int: ... def as_tuple(self) -> DecimalTuple: ... def canonical(self) -> Decimal: ... def compare(self, other: _Decimal, context: Context = ...) -> Decimal: ... def compare_signal(self, other: _Decimal, context: Context = ...) -> Decimal: ... def compare_total(self, other: _Decimal, context: Context = ...) -> Decimal: ... def compare_total_mag(self, other: _Decimal, context: Context = ...) -> Decimal: ... def conjugate(self) -> Decimal: ... def copy_abs(self) -> Decimal: ... def copy_negate(self) -> Decimal: ... def copy_sign(self, other: _Decimal, context: Context = ...) -> Decimal: ... def exp(self, context: Context = ...) -> Decimal: ... def fma(self, other: _Decimal, third: _Decimal, context: Context = ...) -> Decimal: ... @classmethod def from_float(cls, f: float) -> Decimal: ... def is_canonical(self) -> bool: ... def is_finite(self) -> bool: ... def is_infinite(self) -> bool: ... def is_nan(self) -> bool: ... def is_normal(self, context: Context = ...) -> bool: ... def is_qnan(self) -> bool: ... def is_signed(self) -> bool: ... def is_snan(self) -> bool: ... def is_subnormal(self, context: Context = ...) -> bool: ... def is_zero(self) -> bool: ... def ln(self, context: Context = ...) -> Decimal: ... def log10(self, context: Context = ...) -> Decimal: ... def logb(self, context: Context = ...) -> Decimal: ... def logical_and(self, other: _Decimal, context: Context = ...) -> Decimal: ... def logical_invert(self, context: Context = ...) -> Decimal: ... def logical_or(self, other: _Decimal, context: Context = ...) -> Decimal: ... def logical_xor(self, other: _Decimal, context: Context = ...) -> Decimal: ... def max(self, other: _Decimal, context: Context = ...) -> Decimal: ... def max_mag(self, other: _Decimal, context: Context = ...) -> Decimal: ... def min(self, other: _Decimal, context: Context = ...) -> Decimal: ... def min_mag(self, other: _Decimal, context: Context = ...) -> Decimal: ... def next_minus(self, context: Context = ...) -> Decimal: ... def next_plus(self, context: Context = ...) -> Decimal: ... def next_toward(self, other: _Decimal, context: Context = ...) -> Decimal: ... def normalize(self, context: Context = ...) -> Decimal: ... def number_class(self, context: Context = ...) -> str: ... def quantize(self, exp: _Decimal, rounding: str = ..., context: Context = ...) -> Decimal: ... def radix(self) -> Decimal: ... def remainder_near(self, other: _Decimal, context: Context = ...) -> Decimal: ... def rotate(self, other: _Decimal, context: Context = ...) -> Decimal: ... def same_quantum(self, other: _Decimal, context: Context = ...) -> bool: ... def scaleb(self, other: _Decimal, context: Context = ...) -> Decimal: ... def shift(self, other: _Decimal, context: Context = ...) -> Decimal: ... def sqrt(self, context: Context = ...) -> Decimal: ... def to_eng_string(self, context: Context = ...) -> str: ... def to_integral(self, rounding: str = ..., context: Context = ...) -> Decimal: ... def to_integral_exact(self, rounding: str = ..., context: Context = ...) -> Decimal: ... def to_integral_value(self, rounding: str = ..., context: Context = ...) -> Decimal: ... def __abs__(self) -> Decimal: ... def __add__(self, other: _Decimal) -> Decimal: ... def __bool__(self) -> bool: ... def __ceil__(self) -> int: ... def __complex__(self) -> complex: ... def __copy__(self) -> Decimal: ... def __deepcopy__(self) -> Decimal: ... def __divmod__(self, other: _Decimal) -> Tuple[Decimal, Decimal]: ... def __eq__(self, other: object) -> bool: ... def __float__(self) -> float: ... def __floor__(self) -> int: ... def __floordiv__(self, other: _Decimal) -> Decimal: ... def __format__(self, specifier, context=..., _localeconv=...) -> str: ... def __ge__(self, other: _ComparableNum) -> bool: ... def __gt__(self, other: _ComparableNum) -> bool: ... def __hash__(self) -> int: ... def __int__(self) -> int: ... def __le__(self, other: _ComparableNum) -> bool: ... def __lt__(self, other: _ComparableNum) -> bool: ... def __mod__(self, other: _Decimal) -> Decimal: ... def __mul__(self, other: _Decimal) -> Decimal: ... def __ne__(self, other: object) -> bool: ... def __neg__(self) -> Decimal: ... def __pos__(self) -> Decimal: ... def __pow__(self, other: _Decimal) -> Decimal: ... def __radd__(self, other: int) -> Decimal: ... def __rdivmod__(self, other: int) -> Tuple[Decimal, Decimal]: ... def __reduce__(self): ... def __rfloordiv__(self, other: int) -> Decimal: ... def __rmod__(self, other: int) -> Decimal: ... def __rmul__(self, other: int) -> Decimal: ... def __round__(self, n=...) -> int: ... def __rpow__(self, other: int) -> Decimal: ... def __rsub__(self, other: int) -> Decimal: ... def __rtruediv__(self, other: int) -> Decimal: ... def __sub__(self, other: _Decimal) -> Decimal: ... def __truediv__(self, other: _Decimal) -> Decimal: ... def __trunc__(self) -> int: ... class DecimalException(ArithmeticError): ... class Clamped(DecimalException): ... class DivisionByZero(DecimalException, ZeroDivisionError): ... class DivisionImpossible(InvalidOperation): ... class DivisionUndefined(InvalidOperation, ZeroDivisionError): ... class FloatOperation(DecimalException, TypeError): ... class Inexact(DecimalException): ... class InvalidContext(InvalidOperation): ... class InvalidOperation(DecimalException): ... class Overflow(Inexact, Rounded): ... class Rounded(DecimalException): ... class Subnormal(DecimalException): ... class Underflow(Inexact, Rounded, Subnormal): ... mypy-0.560/typeshed/stdlib/3/email/0000755€tŠÔÚ€2›s®0000000000013215007244023231 5ustar jukkaDROPBOX\Domain Users00000000000000mypy-0.560/typeshed/stdlib/3/email/__init__.pyi0000644€tŠÔÚ€2›s®0000000341513215007212025511 0ustar jukkaDROPBOX\Domain Users00000000000000# Stubs for email (Python 3.4) from typing import Callable, Optional, IO import sys from email.message import Message if sys.version_info >= (3, 3): from email.policy import Policy if sys.version_info >= (3, 3): def message_from_string(s: str, _class: Callable[[], Message] = ..., *, policy: Policy = ...) -> Message: ... def message_from_bytes(s: bytes, _class: Callable[[], Message] = ..., *, policy: Policy = ...) -> Message: ... def message_from_file(fp: IO[str], _class: Callable[[], Message] = ..., *, policy: Policy = ...) -> Message: ... def message_from_binary_file(fp: IO[bytes], _class: Callable[[], Message] = ..., *, policy: Policy = ...) -> Message: ... elif sys.version_info >= (3, 2): def message_from_string(s: str, _class: Callable[[], Message] = ..., *, strict: Optional[bool] = ...) -> Message: ... def message_from_bytes(s: bytes, _class: Callable[[], Message] = ..., *, strict: Optional[bool] = ...) -> Message: ... def message_from_file(fp: IO[str], _class: Callable[[], Message] = ..., *, strict: Optional[bool] = ...) -> Message: ... def message_from_binary_file(fp: IO[bytes], _class: Callable[[], Message] = ..., *, strict: Optional[bool] = ...) -> Message: ... # Names in __all__ with no definition: # base64mime # charset # encoders # errors # feedparser # generator # header # iterators # message # mime # parser # quoprimime # utils mypy-0.560/typeshed/stdlib/3/email/charset.pyi0000644€tŠÔÚ€2›s®0000000221113215007212025374 0ustar jukkaDROPBOX\Domain Users00000000000000# Stubs for email.charset (Python 3.4) from typing import List, Optional, Iterator, Any class Charset: input_charset = ... # type: str header_encoding = ... # type: int body_encoding = ... # type: int output_charset = ... # type: Optional[str] input_codec = ... # type: Optional[str] output_codec = ... # type: Optional[str] def __init__(self, input_charset: str = ...) -> None: ... def get_body_encoding(self) -> str: ... def get_output_charset(self) -> Optional[str]: ... def header_encode(self, string: str) -> str: ... def header_encode_lines(self, string: str, maxlengths: Iterator[int]) -> List[str]: ... def body_encode(self, string: str) -> str: ... def __str__(self) -> str: ... def __eq__(self, other: Any) -> bool: ... def __ne__(self, other: Any) -> bool: ... def add_charset(charset: Charset, header_enc: Optional[int] = ..., body_enc: Optional[int] = ..., output_charset: Optional[str] = ...) -> None: ... def add_alias(alias: str, canonical: str) -> None: ... def add_codec(charset: str, codecname: str) -> None: ... mypy-0.560/typeshed/stdlib/3/email/contentmanager.pyi0000644€tŠÔÚ€2›s®0000000124113215007212026752 0ustar jukkaDROPBOX\Domain Users00000000000000# Stubs for email.contentmanager (Python 3.4) from typing import Any, Callable import sys from email.message import Message if sys.version_info >= (3, 4): class ContentManager: def __init__(self) -> None: ... def get_content(self, msg: Message, *args: Any, **kw: Any) -> Any: ... def set_content(self, msg: Message, obj: Any, *args: Any, **kw: Any) -> Any: ... def add_get_handler(self, key: str, handler: Callable[..., Any]) -> None: ... def add_set_handler(self, typekey: type, handler: Callable[..., Any]) -> None: ... raw_data_manager = ... # type: ContentManager mypy-0.560/typeshed/stdlib/3/email/encoders.pyi0000644€tŠÔÚ€2›s®0000000037713215007212025560 0ustar jukkaDROPBOX\Domain Users00000000000000# Stubs for email.encoders (Python 3.4) from email.message import Message def encode_base64(msg: Message) -> None: ... def encode_quopri(msg: Message) -> None: ... def encode_7or8bit(msg: Message) -> None: ... def encode_noop(msg: Message) -> None: ... mypy-0.560/typeshed/stdlib/3/email/errors.pyi0000644€tŠÔÚ€2›s®0000000162213215007212025264 0ustar jukkaDROPBOX\Domain Users00000000000000# Stubs for email.errors (Python 3.4) import sys class MessageError(Exception): ... class MessageParseError(MessageError): ... class HeaderParseError(MessageParseError): ... class BoundaryError(MessageParseError): ... class MultipartConversionError(MessageError, TypeError): ... class MessageDefect(ValueError): ... class NoBoundaryInMultipartDefect(MessageDefect): ... class StartBoundaryNotFoundDefect(MessageDefect): ... class FirstHeaderLineIsContinuationDefect(MessageDefect): ... class MisplacedEnvelopeHeaderDefect(MessageDefect): ... class MalformedHeaderDefect(MessageDefect): ... class MultipartInvariantViolationDefect(MessageDefect): ... class InvalidBase64PaddingDefect(MessageDefect): ... class InvalidBase64CharactersDefect(MessageDefect): ... if sys.version_info >= (3, 3): class CloseBoundaryNotFoundDefect(MessageDefect): ... class MissingHeaderBodySeparatorDefect(MessageDefect): ... mypy-0.560/typeshed/stdlib/3/email/feedparser.pyi0000644€tŠÔÚ€2›s®0000000173713215007212026077 0ustar jukkaDROPBOX\Domain Users00000000000000# Stubs for email.feedparser (Python 3.4) from typing import Callable import sys from email.message import Message if sys.version_info >= (3, 3): from email.policy import Policy class FeedParser: if sys.version_info >= (3, 3): def __init__(self, _factory: Callable[[], Message] = ..., *, policy: Policy = ...) -> None: ... else: def __init__(self, _factory: Callable[[], Message] = ...) -> None: ... def feed(self, data: str) -> None: ... def close(self) -> Message: ... if sys.version_info >= (3, 2): class BytesFeedParser: if sys.version_info >= (3, 3): def __init__(self, _factory: Callable[[], Message] = ..., *, policy: Policy = ...) -> None: ... else: def __init__(self, _factory: Callable[[], Message] = ...) -> None: ... def feed(self, data: str) -> None: ... def close(self) -> Message: ... mypy-0.560/typeshed/stdlib/3/email/generator.pyi0000644€tŠÔÚ€2›s®0000000343513215007212025742 0ustar jukkaDROPBOX\Domain Users00000000000000# Stubs for email.generator (Python 3.4) from typing import TextIO, Optional import sys from email.message import Message if sys.version_info >= (3, 3): from email.policy import Policy class Generator: def clone(self, fp: TextIO) -> 'Generator': ... def write(self, s: str) -> None: ... if sys.version_info >= (3, 3): def __init__(self, outfp: TextIO, mangle_from_: bool = ..., maxheaderlen: int = ..., *, policy: Policy = ...) -> None: ... else: def __init__(self, outfp: TextIO, mangle_from_: bool = ..., maxheaderlen: int = ...) -> None: ... if sys.version_info >= (3, 2): def flatten(self, msg: Message, unixfrom: bool = ..., linesep: Optional[str] =...) -> None: ... else: def flatten(self, msg: Message, unixfrom: bool = ...) -> None: ... if sys.version_info >= (3, 2): class BytesGenerator: def clone(self, fp: TextIO) -> 'Generator': ... def write(self, s: str) -> None: ... if sys.version_info >= (3, 3): def __init__(self, outfp: TextIO, mangle_from_: bool = ..., maxheaderlen: int = ..., *, policy: Policy = ...) -> None: ... else: def __init__(self, outfp: TextIO, mangle_from_: bool = ..., maxheaderlen: int = ...) -> None: ... def flatten(self, msg: Message, unixfrom: bool = ..., linesep: Optional[str] =...) -> None: ... class DecodedGenerator(Generator): # TODO `fmt` is positional def __init__(self, outfp: TextIO, mangle_from_: bool = ..., maxheaderlen: int = ..., *, fmt: Optional[str]) -> None: ... mypy-0.560/typeshed/stdlib/3/email/header.pyi0000644€tŠÔÚ€2›s®0000000216613215007212025204 0ustar jukkaDROPBOX\Domain Users00000000000000# Stubs for email.header (Python 3.4) from typing import Union, Optional, Any, List, Tuple from email.charset import Charset class Header: def __init__(self, s: Union[bytes, str, None] = ..., charset: Union[Charset, str, None] = ..., maxlinelen: Optional[int] = ..., header_name: Optional[str] = ..., continuation_ws: str = ..., errors: str = ...) -> None: ... def append(self, s: Union[bytes, str], charset: Union[Charset, str, None] = ..., errors: str = ...) -> None: ... def encode(self, splitchars: str = ..., maxlinelen: Optional[int] = ..., linesep: str = ...) -> str: ... def __str__(self) -> str: ... def __eq__(self, other: Any) -> bool: ... def __ne__(self, other: Any) -> bool: ... def decode_header(header: Union[Header, str]) -> List[Tuple[bytes, Optional[str]]]: ... def make_header(decoded_seq: List[Tuple[bytes, Optional[str]]], maxlinelen: Optional[int] =..., header_name: Optional[str] = ..., continuation_ws: str = ...) -> Header: ... mypy-0.560/typeshed/stdlib/3/email/headerregistry.pyi0000644€tŠÔÚ€2›s®0000000700513215007212026772 0ustar jukkaDROPBOX\Domain Users00000000000000# Stubs for email.headerregistry (Python 3.4) from datetime import datetime as _datetime import sys from typing import Dict, Tuple, Optional, Any, Union, Mapping from email.errors import MessageDefect if sys.version_info >= (3, 3): from email.policy import Policy if sys.version_info >= (3, 3): class BaseHeader(str): @property def name(self) -> str: ... @property def defects(self) -> Tuple[MessageDefect, ...]: ... @property def max_count(self) -> Optional[int]: ... def __new__(cls, name: str, value: Any) -> 'BaseHeader': ... def init(self, *args: Any, **kw: Any) -> None: ... def fold(self, *, policy: Policy) -> str: ... class UnstructuredHeader: @classmethod def parse(cls, string: str, kwds: Dict[str, Any]) -> None: ... class UniqueUnstructuredHeader(UnstructuredHeader): ... class DateHeader: datetime = ... # type: _datetime @classmethod def parse(cls, string: Union[str, _datetime], kwds: Dict[str, Any]) -> None: ... class UniqueDateHeader(DateHeader): ... class AddressHeader: groups = ... # type: Tuple[Group, ...] addresses = ... # type: Tuple[Address, ...] @classmethod def parse(cls, string: str, kwds: Dict[str, Any]) -> None: ... class UniqueAddressHeader(AddressHeader): ... class SingleAddressHeader(AddressHeader): @property def address(self) -> Address: ... class UniqueSingleAddressHeader(SingleAddressHeader): ... class MIMEVersionHeader: version = ... # type: Optional[str] major = ... # type: Optional[int] minor = ... # type: Optional[int] @classmethod def parse(cls, string: str, kwds: Dict[str, Any]) -> None: ... class ParameterizedMIMEHeader: params = ... # type: Mapping[str, Any] @classmethod def parse(cls, string: str, kwds: Dict[str, Any]) -> None: ... class ContentTypeHeader(ParameterizedMIMEHeader): content_type = ... # type: str maintype = ... # type: str subtype = ... # type: str class ContentDispositionHeader(ParameterizedMIMEHeader): content_disposition = ... # type: str class ContentTransferEncodingHeader: cte = ... # type: str @classmethod def parse(cls, string: str, kwds: Dict[str, Any]) -> None: ... class HeaderRegistry: def __init__(self, base_class: BaseHeader = ..., default_class: BaseHeader = ..., use_default_map: bool = ...) -> None: ... def map_to_type(self, name: str, cls: BaseHeader) -> None: ... def __getitem__(self, name: str) -> BaseHeader: ... def __call__(self, name: str, value: Any) -> BaseHeader: ... class Address: display_name = ... # type: str username = ... # type: str domain = ... # type: str @property def addr_spec(self) -> str: ... def __init__(self, display_name: str = ..., username: Optional[str] = ..., domain: Optional[str] = ..., addr_spec: Optional[str]=...) -> None: ... def __str__(self) -> str: ... class Group: display_name = ... # type: Optional[str] addresses = ... # type: Tuple[Address, ...] def __init__(self, display_name: Optional[str] = ..., addresses: Optional[Tuple[Address, ...]] = ...) -> None: ... def __str__(self) -> str: ... mypy-0.560/typeshed/stdlib/3/email/iterators.pyi0000644€tŠÔÚ€2›s®0000000051713215007212025766 0ustar jukkaDROPBOX\Domain Users00000000000000# Stubs for email.iterators (Python 3.4) from typing import Iterator, Optional from email.message import Message def body_line_iterator(msg: Message, decode: bool = ...) -> Iterator[str]: ... def typed_subpart_iterator(msg: Message, maintype: str = ..., subtype: Optional[str] = ...) -> Iterator[str]: ... mypy-0.560/typeshed/stdlib/3/email/message.pyi0000644€tŠÔÚ€2›s®0000001410113215007212025370 0ustar jukkaDROPBOX\Domain Users00000000000000# Stubs for email.message (Python 3.4) from typing import ( List, Optional, Union, Tuple, TypeVar, Generator, Sequence, Iterator, Any ) import sys from email.charset import Charset from email.errors import MessageDefect from email.header import Header if sys.version_info >= (3, 3): from email.policy import Policy if sys.version_info >= (3, 4): from email.contentmanager import ContentManager else: ContentManager = object # Hack so we can reference it in argument types. _T = TypeVar('_T') _PayloadType = Union[List[Message], str, bytes] _CharsetType = Union[Charset, str, None] _ParamsType = Union[str, None, Tuple[str, Optional[str], str]] _ParamType = Union[str, Tuple[Optional[str], Optional[str], str]] _HeaderType = Union[str, Header] class Message: preamble = ... # type: Optional[str] epilogue = ... # type: Optional[str] defects = ... # type: List[MessageDefect] def __str__(self) -> str: ... def is_multipart(self) -> bool: ... def set_unixfrom(self, unixfrom: str) -> None: ... def get_unixfrom(self) -> Optional[str]: ... def attach(self, payload: 'Message') -> None: ... def get_payload(self, i: int = ..., decode: bool = ...) -> Optional[_PayloadType]: ... def set_payload(self, payload: _PayloadType, charset: _CharsetType = ...) -> None: ... def set_charset(self, charset: _CharsetType) -> None: ... def get_charset(self) -> _CharsetType: ... def __len__(self) -> int: ... def __contains__(self, name: str) -> bool: ... def __getitem__(self, name: str) -> Optional[_HeaderType]: ... def __setitem__(self, name: str, val: _HeaderType) -> None: ... def __delitem__(self, name: str) -> None: ... def keys(self) -> List[str]: ... def values(self) -> List[_HeaderType]: ... def items(self) -> List[Tuple[str, _HeaderType]]: ... def get(self, name: str, failobj: _T = ...) -> Union[_HeaderType, _T]: ... def get_all(self, name: str, failobj: _T = ...) -> Union[List[_HeaderType], _T]: ... def add_header(self, _name: str, _value: str, **_params: _ParamsType) -> None: ... def replace_header(self, _name: str, _value: _HeaderType) -> None: ... def get_content_type(self) -> str: ... def get_content_maintype(self) -> str: ... def get_content_subtype(self) -> str: ... def get_default_type(self) -> str: ... def set_default_type(self, ctype: str) -> None: ... def get_params(self, failobj: _T = ..., header: str = ..., unquote: bool = ...) -> Union[List[Tuple[str, str]], _T]: ... def get_param(self, param: str, failobj: _T = ..., header: str = ..., unquote: bool = ...) -> Union[_T, _ParamType]: ... def del_param(self, param: str, header: str = ..., requote: bool = ...) -> None: ... def set_type(self, type: str, header: str = ..., requote: bool = ...) -> None: ... def get_filename(self, failobj: _T = ...) -> Union[_T, str]: ... def get_boundary(self, failobj: _T = ...) -> Union[_T, str]: ... def set_boundary(self, boundary: str) -> None: ... def get_content_charset(self, failobj: _T = ...) -> Union[_T, str]: ... def get_charsets(self, failobj: _T = ...) -> Union[_T, List[str]]: ... def walk(self) -> Generator['Message', None, None]: ... if sys.version_info >= (3, 5): def get_content_disposition(self) -> Optional[str]: ... if sys.version_info >= (3, 4): def as_string(self, unixfrom: bool = ..., maxheaderlen: int = ..., policy: Optional[Policy] = ...) -> str: ... def as_bytes(self, unixfrom: bool = ..., policy: Optional[Policy] = ...) -> bytes: ... def __bytes__(self) -> bytes: ... def set_param(self, param: str, value: str, header: str = ..., requote: bool = ..., charset: str = ..., language: str = ..., replace: bool = ...) -> None: ... else: def as_string(self, unixfrom: bool = ..., maxheaderlen: int = ...) -> str: ... def set_param(self, param: str, value: str, header: str = ..., requote: bool = ..., charset: str = ..., language: str = ...) -> None: ... if sys.version_info >= (3, 3): def __init__(self, policy: Policy = ...) -> None: ... else: def __init__(self) -> None: ... class MIMEPart: if sys.version_info >= (3, 3): def __init__(self, policy: Policy = ...) -> None: ... else: def __init__(self) -> None: ... def get_body(self, preferencelist: Sequence[str] = ...) -> Optional[Message]: ... def iter_attachments(self) -> Iterator[Message]: ... def iter_parts(self) -> Iterator[Message]: ... def get_content(self, *args: Any, content_manager: Optional[ContentManager] = ..., **kw: Any) -> Any: ... def set_content(self, *args: Any, content_manager: Optional[ContentManager] = ..., **kw: Any) -> None: ... def make_related(self, boundary: Optional[str] = ...) -> None: ... def make_alternative(self, boundary: Optional[str] = ...) -> None: ... def make_mixed(self, boundary: Optional[str] = ...) -> None: ... def add_related(self, *args: Any, content_manager: Optional[ContentManager] = ..., **kw: Any) -> None: ... def add_alternative(self, *args: Any, content_manager: Optional[ContentManager] = ..., **kw: Any) -> None: ... def add_attachement(self, *args: Any, content_manager: Optional[ContentManager] = ..., **kw: Any) -> None: ... def clear(self) -> None: ... def clear_content(self) -> None: ... if sys.version_info >= (3, 4): def is_attachment(self) -> bool: ... else: @property def is_attachment(self) -> bool: ... class EmailMessage(MIMEPart): def set_content(self, *args: Any, content_manager: Optional[ContentManager] = ..., **kw: Any) -> None: ... mypy-0.560/typeshed/stdlib/3/email/mime/0000755€tŠÔÚ€2›s®0000000000013215007244024160 5ustar jukkaDROPBOX\Domain Users00000000000000mypy-0.560/typeshed/stdlib/3/email/mime/__init__.pyi0000644€tŠÔÚ€2›s®0000000000013215007212026423 0ustar jukkaDROPBOX\Domain Users00000000000000mypy-0.560/typeshed/stdlib/3/email/mime/application.pyi0000644€tŠÔÚ€2›s®0000000067013215007212027204 0ustar jukkaDROPBOX\Domain Users00000000000000# Stubs for email.mime.application (Python 3.4) from typing import Callable, Optional, Tuple, Union from email.mime.nonmultipart import MIMENonMultipart _ParamsType = Union[str, None, Tuple[str, Optional[str], str]] class MIMEApplication(MIMENonMultipart): def __init__(self, _data: bytes, _subtype: str = ..., _encoder: Callable[[MIMEApplication], None] = ..., **_params: _ParamsType) -> None: ... mypy-0.560/typeshed/stdlib/3/email/mime/audio.pyi0000644€tŠÔÚ€2›s®0000000066513215007212026006 0ustar jukkaDROPBOX\Domain Users00000000000000# Stubs for email.mime.audio (Python 3.4) from typing import Callable, Optional, Tuple, Union from email.mime.nonmultipart import MIMENonMultipart _ParamsType = Union[str, None, Tuple[str, Optional[str], str]] class MIMEAudio(MIMENonMultipart): def __init__(self, _audiodata: bytes, _subtype: Optional[str] = ..., _encoder: Callable[[MIMEAudio], None] = ..., **_params: _ParamsType) -> None: ... mypy-0.560/typeshed/stdlib/3/email/mime/base.pyi0000644€tŠÔÚ€2›s®0000000047513215007212025616 0ustar jukkaDROPBOX\Domain Users00000000000000# Stubs for email.mime.base (Python 3.4) from typing import Optional, Tuple, Union import email.message _ParamsType = Union[str, None, Tuple[str, Optional[str], str]] class MIMEBase(email.message.Message): def __init__(self, _maintype: str, _subtype: str, **_params: _ParamsType) -> None: ... mypy-0.560/typeshed/stdlib/3/email/mime/image.pyi0000644€tŠÔÚ€2›s®0000000066513215007212025767 0ustar jukkaDROPBOX\Domain Users00000000000000# Stubs for email.mime.image (Python 3.4) from typing import Callable, Optional, Tuple, Union from email.mime.nonmultipart import MIMENonMultipart _ParamsType = Union[str, None, Tuple[str, Optional[str], str]] class MIMEImage(MIMENonMultipart): def __init__(self, _imagedata: bytes, _subtype: Optional[str] = ..., _encoder: Callable[[MIMEImage], None] = ..., **_params: _ParamsType) -> None: ... mypy-0.560/typeshed/stdlib/3/email/mime/message.pyi0000644€tŠÔÚ€2›s®0000000036213215007212026323 0ustar jukkaDROPBOX\Domain Users00000000000000# Stubs for email.mime.message (Python 3.4) from email.message import Message from email.mime.nonmultipart import MIMENonMultipart class MIMEMessage(MIMENonMultipart): def __init__(self, _msg: Message, _subtype: str = ...) -> None: ... mypy-0.560/typeshed/stdlib/3/email/mime/multipart.pyi0000644€tŠÔÚ€2›s®0000000071213215007212026717 0ustar jukkaDROPBOX\Domain Users00000000000000# Stubs for email.mime.multipart (Python 3.4) from typing import Optional, Sequence, Tuple, Union from email.message import Message from email.mime.base import MIMEBase _ParamsType = Union[str, None, Tuple[str, Optional[str], str]] class MIMEMultipart(MIMEBase): def __init__(self, _subtype: str = ..., boundary: Optional[str] = ..., _subparts: Optional[Sequence[Message]] = ..., **_params: _ParamsType) -> None: ... mypy-0.560/typeshed/stdlib/3/email/mime/nonmultipart.pyi0000644€tŠÔÚ€2›s®0000000017613215007212027436 0ustar jukkaDROPBOX\Domain Users00000000000000# Stubs for email.mime.nonmultipart (Python 3.4) from email.mime.base import MIMEBase class MIMENonMultipart(MIMEBase): ... mypy-0.560/typeshed/stdlib/3/email/mime/text.pyi0000644€tŠÔÚ€2›s®0000000042313215007212025661 0ustar jukkaDROPBOX\Domain Users00000000000000# Stubs for email.mime.text (Python 3.4) from typing import Optional from email.mime.nonmultipart import MIMENonMultipart class MIMEText(MIMENonMultipart): def __init__(self, _text: str, _subtype: str = ..., _charset: Optional[str] = ...) -> None: ... mypy-0.560/typeshed/stdlib/3/email/parser.pyi0000644€tŠÔÚ€2›s®0000000502513215007212025245 0ustar jukkaDROPBOX\Domain Users00000000000000# Stubs for email.parser (Python 3.4) import email.feedparser from email.message import Message import sys from typing import Callable, Optional, TextIO, BinaryIO if sys.version_info >= (3, 3): from email.policy import Policy FeedParser = email.feedparser.FeedParser BytesFeedParser = email.feedparser.BytesFeedParser class Parser: if sys.version_info >= (3, 3): def __init__(self, _class: Callable[[], Message] = ..., *, policy: Policy = ...) -> None: ... else: # TODO `strict` is positional def __init__(self, _class: Callable[[], Message] = ..., *, strict: Optional[bool]) -> None: ... def parse(self, fp: TextIO, headersonly: bool = ...) -> Message: ... def parsestr(self, text: str, headersonly: bool = ...) -> Message: ... class HeaderParser(Parser): if sys.version_info >= (3, 3): def __init__(self, _class: Callable[[], Message] = ..., *, policy: Policy = ...) -> None: ... else: # TODO `strict` is positional def __init__(self, _class: Callable[[], Message] = ..., *, strict: Optional[bool]) -> None: ... def parse(self, fp: TextIO, headersonly: bool = ...) -> Message: ... def parsestr(self, text: str, headersonly: bool = ...) -> Message: ... if sys.version_info >= (3, 3): class BytesHeaderParser(BytesParser): if sys.version_info >= (3, 3): def __init__(self, _class: Callable[[], Message] = ..., *, policy: Policy = ...) -> None: ... else: # TODO `strict` is positional def __init__(self, _class: Callable[[], Message] = ..., *, strict: Optional[bool]) -> None: ... def parse(self, fp: BinaryIO, headersonly: bool = ...) -> Message: ... def parsestr(self, text: str, headersonly: bool = ...) -> Message: ... if sys.version_info >= (3, 2): class BytesParser: if sys.version_info >= (3, 3): def __init__(self, _class: Callable[[], Message] = ..., *, policy: Policy = ...) -> None: ... else: # TODO `strict` is positional def __init__(self, _class: Callable[[], Message] = ..., *, strict: Optional[bool]) -> None: ... def parse(self, fp: BinaryIO, headersonly: bool = ...) -> Message: ... def parsestr(self, text: str, headersonly: bool = ...) -> Message: ... mypy-0.560/typeshed/stdlib/3/email/policy.pyi0000644€tŠÔÚ€2›s®0000000547413215007212025260 0ustar jukkaDROPBOX\Domain Users00000000000000# Stubs for email.policy (Python 3.4) from abc import abstractmethod from typing import Any, List, Optional, Tuple, Union, Callable import sys from email.message import Message from email.errors import MessageDefect from email.header import Header if sys.version_info >= (3, 4): from email.contentmanager import ContentManager if sys.version_info >= (3, 3): class Policy: max_line_length = ... # type: Optional[int] linesep = ... # type: str cte_type = ... # type: str raise_on_defect = ... # type: bool if sys.version_info >= (3, 5): mange_from = ... # type: bool def __init__(**kw: Any) -> None: ... def clone(**kw: Any) -> 'Policy': ... def handle_defect(self, obj: Message, defect: MessageDefect) -> None: ... def register_defect(self, obj: Message, defect: MessageDefect) -> None: ... def header_max_count(self, name: str) -> Optional[int]: ... @abstractmethod def header_source_parse(self, sourcelines: List[str]) -> str: ... @abstractmethod def header_store_parse(self, name: str, value: str) -> Tuple[str, str]: ... @abstractmethod def header_fetch_parse(self, name: str, value: str) -> str: ... @abstractmethod def fold(self, name: str, value: str) -> str: ... @abstractmethod def fold_binary(self, name: str, value: str) -> bytes: ... class Compat32(Policy): def header_source_parse(self, sourcelines: List[str]) -> str: ... def header_store_parse(self, name: str, value: str) -> Tuple[str, str]: ... def header_fetch_parse(self, name: str, value: str) -> Union[str, Header]: ... # type: ignore def fold(self, name: str, value: str) -> str: ... def fold_binary(self, name: str, value: str) -> bytes: ... compat32 = ... # type: Compat32 class EmailPolicy(Policy): utf8 = ... # type: bool refold_source = ... # type: str header_factory = ... # type: Callable[[str, str], str] if sys.version_info >= (3, 4): content_manager = ... # type: ContentManager def header_source_parse(self, sourcelines: List[str]) -> str: ... def header_store_parse(self, name: str, value: str) -> Tuple[str, str]: ... def header_fetch_parse(self, name: str, value: str) -> str: ... def fold(self, name: str, value: str) -> str: ... def fold_binary(self, name: str, value: str) -> bytes: ... default = ... # type: EmailPolicy SMTP = ... # type: EmailPolicy SMTPUTF8 = ... # type: EmailPolicy HTTP = ... # type: EmailPolicy strict = ... # type: EmailPolicy mypy-0.560/typeshed/stdlib/3/email/utils.pyi0000644€tŠÔÚ€2›s®0000000313613215007212025112 0ustar jukkaDROPBOX\Domain Users00000000000000# Stubs for email.utils (Python 3.4) from typing import List, Optional, Tuple, Union from email.charset import Charset import datetime _ParamType = Union[str, Tuple[Optional[str], Optional[str], str]] _PDTZ = Tuple[int, int, int, int, int, int, int, int, int, Optional[int]] def quote(str: str) -> str: ... def unquote(str: str) -> str: ... def parseaddr(address: Optional[str]) -> Tuple[str, str]: ... def formataddr(pair: Tuple[Optional[str], str], charset: Union[str, Charset] = ...) -> str: ... def getaddresses(fieldvalues: List[str]) -> List[Tuple[str, str]]: ... def parsedate(date: str) -> Optional[Tuple[int, int, int, int, int, int, int, int, int]]: ... def parsedate_tz(date: str) -> Optional[_PDTZ]: ... def parsedate_to_datetime(date: str) -> datetime.datetime: ... def mktime_tz(tuple: _PDTZ) -> int: ... def formatdate(timeval: Optional[float] = ..., localtime: bool = ..., usegmt: bool = ...) -> str: ... def format_datetime(dt: datetime.datetime, usegmt: bool = ...) -> str: ... def localtime(dt: Optional[datetime.datetime] = ...) -> datetime.datetime: ... def make_msgid(idstring: Optional[str] = ..., domain: Optional[str] = ...) -> str: ... def decode_rfc2231(s: str) -> Tuple[Optional[str], Optional[str], str]: ... def encode_rfc2231(s: str, charset: Optional[str] = ..., language: Optional[str] = ...) -> str: ... def collapse_rfc2231_value(value: _ParamType, errors: str = ..., fallback_charset: str = ...) -> str: ... def decode_params( params: List[Tuple[str, str]] ) -> List[Tuple[str, _ParamType]]: ... mypy-0.560/typeshed/stdlib/3/encodings/0000755€tŠÔÚ€2›s®0000000000013215007244024113 5ustar jukkaDROPBOX\Domain Users00000000000000mypy-0.560/typeshed/stdlib/3/encodings/__init__.pyi0000644€tŠÔÚ€2›s®0000000013613215007212026370 0ustar jukkaDROPBOX\Domain Users00000000000000import codecs import typing def search_function(encoding: str) -> codecs.CodecInfo: ... mypy-0.560/typeshed/stdlib/3/encodings/utf_8.pyi0000644€tŠÔÚ€2›s®0000000107513215007212025661 0ustar jukkaDROPBOX\Domain Users00000000000000import codecs from typing import Text, Tuple class IncrementalEncoder(codecs.IncrementalEncoder): def encode(self, input: Text, final: bool = ...) -> bytes: ... class IncrementalDecoder(codecs.BufferedIncrementalDecoder): def _buffer_decode(self, input: bytes, errors: str, final: bool) -> Tuple[Text, int]: ... class StreamWriter(codecs.StreamWriter): ... class StreamReader(codecs.StreamReader): ... def getregentry() -> codecs.CodecInfo: ... def encode(input: Text, errors: Text = ...) -> bytes: ... def decode(input: bytes, errors: Text = ...) -> Text: ... mypy-0.560/typeshed/stdlib/3/fcntl.pyi0000644€tŠÔÚ€2›s®0000000535413215007212023775 0ustar jukkaDROPBOX\Domain Users00000000000000# Stubs for fcntl from io import IOBase from typing import Any, IO, Union FASYNC = ... # type: int FD_CLOEXEC = ... # type: int DN_ACCESS = ... # type: int DN_ATTRIB = ... # type: int DN_CREATE = ... # type: int DN_DELETE = ... # type: int DN_MODIFY = ... # type: int DN_MULTISHOT = ... # type: int DN_RENAME = ... # type: int F_DUPFD = ... # type: int F_DUPFD_CLOEXEC = ... # type: int F_FULLFSYNC = ... # type: int F_EXLCK = ... # type: int F_GETFD = ... # type: int F_GETFL = ... # type: int F_GETLEASE = ... # type: int F_GETLK = ... # type: int F_GETLK64 = ... # type: int F_GETOWN = ... # type: int F_NOCACHE = ... # type: int F_GETSIG = ... # type: int F_NOTIFY = ... # type: int F_RDLCK = ... # type: int F_SETFD = ... # type: int F_SETFL = ... # type: int F_SETLEASE = ... # type: int F_SETLK = ... # type: int F_SETLK64 = ... # type: int F_SETLKW = ... # type: int F_SETLKW64 = ... # type: int F_SETOWN = ... # type: int F_SETSIG = ... # type: int F_SHLCK = ... # type: int F_UNLCK = ... # type: int F_WRLCK = ... # type: int I_ATMARK = ... # type: int I_CANPUT = ... # type: int I_CKBAND = ... # type: int I_FDINSERT = ... # type: int I_FIND = ... # type: int I_FLUSH = ... # type: int I_FLUSHBAND = ... # type: int I_GETBAND = ... # type: int I_GETCLTIME = ... # type: int I_GETSIG = ... # type: int I_GRDOPT = ... # type: int I_GWROPT = ... # type: int I_LINK = ... # type: int I_LIST = ... # type: int I_LOOK = ... # type: int I_NREAD = ... # type: int I_PEEK = ... # type: int I_PLINK = ... # type: int I_POP = ... # type: int I_PUNLINK = ... # type: int I_PUSH = ... # type: int I_RECVFD = ... # type: int I_SENDFD = ... # type: int I_SETCLTIME = ... # type: int I_SETSIG = ... # type: int I_SRDOPT = ... # type: int I_STR = ... # type: int I_SWROPT = ... # type: int I_UNLINK = ... # type: int LOCK_EX = ... # type: int LOCK_MAND = ... # type: int LOCK_NB = ... # type: int LOCK_READ = ... # type: int LOCK_RW = ... # type: int LOCK_SH = ... # type: int LOCK_UN = ... # type: int LOCK_WRITE = ... # type: int _AnyFile = Union[int, IO[Any], IOBase] # TODO All these return either int or bytes depending on the value of # cmd (not on the type of arg). def fcntl(fd: _AnyFile, cmd: int, arg: Union[int, bytes] = ...) -> Any: ... # TODO This function accepts any object supporting a buffer interface, # as arg, is there a better way to express this than bytes? def ioctl(fd: _AnyFile, request: int, arg: Union[int, bytes] = ..., mutate_flag: bool = ...) -> Any: ... def flock(fd: _AnyFile, operation: int) -> None: ... def lockf(fd: _AnyFile, cmd: int, len: int = ..., start: int = ..., whence: int = ...) -> Any: ... mypy-0.560/typeshed/stdlib/3/fnmatch.pyi0000644€tŠÔÚ€2›s®0000000055613215007212024306 0ustar jukkaDROPBOX\Domain Users00000000000000# Stubs for fnmatch # Based on http://docs.python.org/3.2/library/fnmatch.html and # python-lib/fnmatch.py from typing import Iterable, List, AnyStr def fnmatch(name: AnyStr, pat: AnyStr) -> bool: ... def fnmatchcase(name: AnyStr, pat: AnyStr) -> bool: ... def filter(names: Iterable[AnyStr], pat: AnyStr) -> List[AnyStr]: ... def translate(pat: str) -> str: ... mypy-0.560/typeshed/stdlib/3/functools.pyi0000644€tŠÔÚ€2›s®0000000573713215007212024710 0ustar jukkaDROPBOX\Domain Users00000000000000import sys from typing import Any, Callable, Generic, Dict, Iterable, Mapping, Optional, Sequence, Tuple, Type, TypeVar, NamedTuple, Union, overload _AnyCallable = Callable[..., Any] _T = TypeVar("_T") _S = TypeVar("_S") @overload def reduce(function: Callable[[_T, _S], _T], sequence: Iterable[_S], initial: _T) -> _T: ... @overload def reduce(function: Callable[[_T, _T], _T], sequence: Iterable[_T]) -> _T: ... class _CacheInfo(NamedTuple('CacheInfo', [ ('hits', int), ('misses', int), ('maxsize', int), ('currsize', int) ])): ... class _lru_cache_wrapper(Generic[_T]): __wrapped__ = ... # type: Callable[..., _T] def __call__(self, *args: Any, **kwargs: Any) -> _T: ... def cache_info(self) -> _CacheInfo: ... class lru_cache(): def __init__(self, maxsize: Optional[int] = ..., typed: bool = ...) -> None: ... def __call__(self, f: Callable[..., _T]) -> _lru_cache_wrapper[_T]: ... WRAPPER_ASSIGNMENTS = ... # type: Sequence[str] WRAPPER_UPDATES = ... # type: Sequence[str] def update_wrapper(wrapper: _AnyCallable, wrapped: _AnyCallable, assigned: Sequence[str] = ..., updated: Sequence[str] = ...) -> _AnyCallable: ... def wraps(wrapped: _AnyCallable, assigned: Sequence[str] = ..., updated: Sequence[str] = ...) -> Callable[[_AnyCallable], _AnyCallable]: ... def total_ordering(cls: type) -> type: ... def cmp_to_key(mycmp: Callable[[_T, _T], int]) -> Callable[[_T], Any]: ... class partial(Generic[_T]): func = ... # type: Callable[..., _T] args = ... # type: Tuple[Any, ...] keywords = ... # type: Dict[str, Any] def __init__(self, func: Callable[..., _T], *args: Any, **kwargs: Any) -> None: ... def __call__(self, *args: Any, **kwargs: Any) -> _T: ... if sys.version_info >= (3, 4): # With protocols, this could change into a generic protocol that defines __get__ and returns _T _Descriptor = Any class partialmethod(Generic[_T]): func: Union[Callable[..., _T], _Descriptor] args: Tuple[Any, ...] keywords: Dict[str, Any] @overload def __init__(self, func: Callable[..., _T], *args: Any, **keywords: Any) -> None: ... @overload def __init__(self, func: _Descriptor, *args: Any, **keywords: Any) -> None: ... def __get__(self, obj: Any, cls: Type[Any]) -> Callable[..., _T]: ... @property def __isabstractmethod__(self) -> bool: ... class _SingleDispatchCallable(Generic[_T]): registry = ... # type: Mapping[Any, Callable[..., _T]] def dispatch(self, cls: Any) -> Callable[..., _T]: ... @overload def register(self, cls: Any) -> Callable[[Callable[..., _T]], Callable[..., _T]]: ... @overload def register(self, cls: Any, func: Callable[..., _T]) -> Callable[..., _T]: ... def _clear_cache(self) -> None: ... def __call__(self, *args: Any, **kwargs: Any) -> _T: ... def singledispatch(func: Callable[..., _T]) -> _SingleDispatchCallable[_T]: ... mypy-0.560/typeshed/stdlib/3/gc.pyi0000644€tŠÔÚ€2›s®0000000162513215007212023255 0ustar jukkaDROPBOX\Domain Users00000000000000# Stubs for gc from typing import Any, Dict, List, Tuple DEBUG_COLLECTABLE = ... # type: int DEBUG_LEAK = ... # type: int DEBUG_SAVEALL = ... # type: int DEBUG_STATS = ... # type: int DEBUG_UNCOLLECTABLE = ... # type: int callbacks = ... # type: List[Any] garbage = ... # type: List[Any] def collect(generations: int = ...) -> int: ... def disable() -> None: ... def enable() -> None: ... def get_count() -> Tuple[int, int, int]: ... def get_debug() -> int: ... def get_objects() -> List[Any]: ... def get_referents(*objs: Any) -> List[Any]: ... def get_referrers(*objs: Any) -> List[Any]: ... def get_stats() -> List[Dict[str, Any]]: ... def get_threshold() -> Tuple[int, int, int]: ... def is_tracked(obj: Any) -> bool: ... def isenabled() -> bool: ... def set_debug(flags: int) -> None: ... def set_threshold(threshold0: int, threshold1: int = ..., threshold2: int = ...) -> None: ... mypy-0.560/typeshed/stdlib/3/getopt.pyi0000644€tŠÔÚ€2›s®0000000110013215007212024152 0ustar jukkaDROPBOX\Domain Users00000000000000# Stubs for getopt # Based on http://docs.python.org/3.2/library/getopt.html from typing import List, Tuple def getopt(args: List[str], shortopts: str, longopts: List[str]=...) -> Tuple[List[Tuple[str, str]], List[str]]: ... def gnu_getopt(args: List[str], shortopts: str, longopts: List[str]=...) -> Tuple[List[Tuple[str, str]], List[str]]: ... class GetoptError(Exception): msg = ... # type: str opt = ... # type: str error = GetoptError mypy-0.560/typeshed/stdlib/3/getpass.pyi0000644€tŠÔÚ€2›s®0000000032013215007212024321 0ustar jukkaDROPBOX\Domain Users00000000000000# Stubs for getpass from typing import Optional, TextIO def getpass(prompt: str = ..., stream: Optional[TextIO] = ...) -> str: ... def getuser() -> str: ... class GetPassWarning(UserWarning): pass mypy-0.560/typeshed/stdlib/3/gettext.pyi0000644€tŠÔÚ€2›s®0000000360213215007212024345 0ustar jukkaDROPBOX\Domain Users00000000000000# Stubs for gettext (Python 3.4) from typing import Any, IO, List, Optional, Union, Callable class NullTranslations: def __init__(self, fp: IO[str] = ...) -> None: ... def add_fallback(self, fallback: NullTranslations) -> None: ... def gettext(self, message: str) -> str: ... def lgettext(self, message: str) -> str: ... def ngettext(self, singular: str, plural: str, n: int) -> str: ... def lngettext(self, singular: str, plural: str, n: int) -> str: ... def info(self) -> Any: ... def charset(self) -> Any: ... def output_charset(self) -> Any: ... def set_output_charset(self, charset: Any) -> None: ... def install(self, names: List[str] = ...) -> None: ... class GNUTranslations(NullTranslations): LE_MAGIC = ... # type: int BE_MAGIC = ... # type: int def find(domain: str, localedir: str = ..., languages: List[str] = ..., all: bool = ...): ... def translation(domain: str, localedir: str = ..., languages: List[str] = ..., class_: Callable[[IO[str]], NullTranslations] = ..., fallback: bool =..., codeset: Any = ...) -> NullTranslations: ... def install(domain: str, localedir: str = ..., codeset: Any = ..., names: List[str] = ...): ... def textdomain(domain: str = ...) -> str: ... def bindtextdomain(domain: str, localedir: str = ...) -> str: ... def bind_textdomain_codeset(domain: str, codeset: str = ...) -> str: ... def dgettext(domain: str, message: str) -> str: ... def ldgettext(domain: str, message: str) -> str: ... def dngettext(domain: str, singular: str, plural: str, n: int) -> str: ... def ldngettext(domain: str, singular: str, plural: str, n: int) -> str: ... def gettext(message: str) -> str: ... def lgettext(message: str) -> str: ... def ngettext(singular: str, plural: str, n: int) -> str: ... def lngettext(singular: str, plural: str, n: int) -> str: ... Catalog = translation mypy-0.560/typeshed/stdlib/3/glob.pyi0000644€tŠÔÚ€2›s®0000000136413215007212023607 0ustar jukkaDROPBOX\Domain Users00000000000000# Stubs for glob # Based on http://docs.python.org/3/library/glob.html from typing import List, Iterator, AnyStr import sys if sys.version_info >= (3, 6): def glob0(dirname: AnyStr, pattern: AnyStr) -> List[AnyStr]: ... else: def glob0(dirname: AnyStr, basename: AnyStr) -> List[AnyStr]: ... def glob1(dirname: AnyStr, pattern: AnyStr) -> List[AnyStr]: ... if sys.version_info >= (3, 5): def glob(pathname: AnyStr, *, recursive: bool = ...) -> List[AnyStr]: ... def iglob(pathname: AnyStr, *, recursive: bool = ...) -> Iterator[AnyStr]: ... else: def glob(pathname: AnyStr) -> List[AnyStr]: ... def iglob(pathname: AnyStr) -> Iterator[AnyStr]: ... if sys.version_info >= (3, 4): def escape(pathname: AnyStr) -> AnyStr: ... mypy-0.560/typeshed/stdlib/3/gzip.pyi0000644€tŠÔÚ€2›s®0000000353713215007212023641 0ustar jukkaDROPBOX\Domain Users00000000000000from typing import Any, IO, Optional from os.path import _PathType import _compression import zlib def open(filename, mode: str = ..., compresslevel: int = ..., encoding: Optional[str] = ..., errors: Optional[str] = ..., newline: Optional[str] = ...) -> IO[Any]: ... class _PaddedFile: file: IO[bytes] def __init__(self, f: IO[bytes], prepend: bytes = ...) -> None: ... def read(self, size: int) -> bytes: ... def prepend(self, prepend: bytes = ...) -> None: ... def seek(self, off: int) -> int: ... def seekable(self) -> bool: ... class GzipFile(_compression.BaseStream): myfileobj: Optional[IO[bytes]] mode: str name: str compress: zlib._Compress fileobj: IO[bytes] def __init__(self, filename: Optional[_PathType] = ..., mode: Optional[str] = ..., compresslevel: int = ..., fileobj: Optional[IO[bytes]] = ..., mtime: Optional[float] = ...) -> None: ... @property def filename(self) -> str: ... @property def mtime(self): ... crc: int def write(self, data: bytes) -> int: ... def read(self, size: Optional[int] = ...) -> bytes: ... def read1(self, size: int = ...) -> bytes: ... def peek(self, n: int) -> bytes: ... @property def closed(self) -> bool: ... def close(self) -> None: ... def flush(self, zlib_mode: int = ...) -> None: ... def fileno(self) -> int: ... def rewind(self) -> None: ... def readable(self) -> bool: ... def writable(self) -> bool: ... def seekable(self) -> bool: ... def seek(self, offset: int, whence: int = ...) -> int: ... def readline(self, size: int = ...) -> bytes: ... class _GzipReader(_compression.DecompressReader): def __init__(self, fp: IO[bytes]) -> None: ... def read(self, size: int = ...) -> bytes: ... def compress(data, compresslevel: int = ...) -> bytes: ... def decompress(data: bytes) -> bytes: ... mypy-0.560/typeshed/stdlib/3/hashlib.pyi0000644€tŠÔÚ€2›s®0000000504613215007212024277 0ustar jukkaDROPBOX\Domain Users00000000000000# Stubs for hashlib import sys from typing import AbstractSet, Optional, Union _DataType = Union[bytes, bytearray, memoryview] class _Hash(object): digest_size = ... # type: int block_size = ... # type: int # [Python documentation note] Changed in version 3.4: The name attribute has # been present in CPython since its inception, but until Python 3.4 was not # formally specified, so may not exist on some platforms name = ... # type: str def __init__(self, data: _DataType = ...) -> None: ... def copy(self) -> _Hash: ... def digest(self) -> bytes: ... def hexdigest(self) -> str: ... def update(self, arg: _DataType) -> None: ... def md5(arg: _DataType = ...) -> _Hash: ... def sha1(arg: _DataType = ...) -> _Hash: ... def sha224(arg: _DataType = ...) -> _Hash: ... def sha256(arg: _DataType = ...) -> _Hash: ... def sha384(arg: _DataType = ...) -> _Hash: ... def sha512(arg: _DataType = ...) -> _Hash: ... def new(name: str, data: _DataType = ...) -> _Hash: ... # New in version 3.2 algorithms_guaranteed = ... # type: AbstractSet[str] algorithms_available = ... # type: AbstractSet[str] # New in version 3.4 if sys.version_info >= (3, 4): def pbkdf2_hmac(hash_name: str, password: _DataType, salt: _DataType, iterations: int, dklen: Optional[int] = ...) -> bytes: ... if sys.version_info >= (3, 6): class _VarLenHash(object): digest_size = ... # type: int block_size = ... # type: int name = ... # type: str def __init__(self, data: _DataType = ...) -> None: ... def copy(self) -> _VarLenHash: ... def digest(self, length: int) -> bytes: ... def hexdigest(self, length: int) -> str: ... def update(self, arg: _DataType) -> None: ... sha3_224 = _Hash sha3_256 = _Hash sha3_384 = _Hash sha3_512 = _Hash shake_128 = _VarLenHash shake_256 = _VarLenHash def scrypt(password: _DataType, *, salt: _DataType, n: int, r: int, p: int, maxmem: int = ..., dklen: int = ...) -> bytes: ... class _BlakeHash(_Hash): MAX_DIGEST_SIZE = ... # type: int MAX_KEY_SIZE = ... # type: int PERSON_SIZE = ... # type: int SALT_SIZE = ... # type: int def __init__(self, data: _DataType = ..., digest_size: int = ..., key: _DataType = ..., salt: _DataType = ..., person: _DataType = ..., fanout: int = ..., depth: int = ..., leaf_size: int = ..., node_offset: int = ..., node_depth: int = ..., inner_size: int = ..., last_node: bool = ...) -> None: ... blake2b = _BlakeHash blake2s = _BlakeHash mypy-0.560/typeshed/stdlib/3/heapq.pyi0000644€tŠÔÚ€2›s®0000000154013215007212023756 0ustar jukkaDROPBOX\Domain Users00000000000000# Stubs for heapq # Based on http://docs.python.org/3.2/library/heapq.html import sys from typing import TypeVar, List, Iterable, Any, Callable, Optional _T = TypeVar('_T') def heappush(heap: List[_T], item: _T) -> None: ... def heappop(heap: List[_T]) -> _T: ... def heappushpop(heap: List[_T], item: _T) -> _T: ... def heapify(x: List[_T]) -> None: ... def heapreplace(heap: List[_T], item: _T) -> _T: ... if sys.version_info >= (3, 5): def merge(*iterables: Iterable[_T], key: Callable[[_T], Any] = ..., reverse: bool = ...) -> Iterable[_T]: ... else: def merge(*iterables: Iterable[_T]) -> Iterable[_T]: ... def nlargest(n: int, iterable: Iterable[_T], key: Optional[Callable[[_T], Any]] = ...) -> List[_T]: ... def nsmallest(n: int, iterable: Iterable[_T], key: Callable[[_T], Any] = ...) -> List[_T]: ... mypy-0.560/typeshed/stdlib/3/html/0000755€tŠÔÚ€2›s®0000000000013215007244023106 5ustar jukkaDROPBOX\Domain Users00000000000000mypy-0.560/typeshed/stdlib/3/html/__init__.pyi0000644€tŠÔÚ€2›s®0000000017213215007212025363 0ustar jukkaDROPBOX\Domain Users00000000000000from typing import AnyStr def escape(s: AnyStr, quote: bool = ...) -> AnyStr: ... def unescape(s: AnyStr) -> AnyStr: ... mypy-0.560/typeshed/stdlib/3/html/entities.pyi0000644€tŠÔÚ€2›s®0000000022313215007212025445 0ustar jukkaDROPBOX\Domain Users00000000000000from typing import Any name2codepoint = ... # type: Any html5 = ... # type: Any codepoint2name = ... # type: Any entitydefs = ... # type: Any mypy-0.560/typeshed/stdlib/3/html/parser.pyi0000644€tŠÔÚ€2›s®0000000247313215007212025126 0ustar jukkaDROPBOX\Domain Users00000000000000from typing import List, Tuple from _markupbase import ParserBase import sys class HTMLParser(ParserBase): if sys.version_info >= (3, 5): def __init__(self, *, convert_charrefs: bool = ...) -> None: ... elif sys.version_info >= (3, 4): def __init__(self, strict: bool = ..., *, convert_charrefs: bool = ...) -> None: ... else: def __init__(self, strict: bool = ...) -> None: ... def feed(self, feed: str) -> None: ... def close(self) -> None: ... def reset(self) -> None: ... def getpos(self) -> Tuple[int, int]: ... def get_starttag_text(self) -> str: ... def handle_starttag(self, tag: str, attrs: List[Tuple[str, str]]) -> None: ... def handle_endtag(self, tag: str) -> None: ... def handle_startendtag(self, tag: str, attrs: List[Tuple[str, str]]) -> None: ... def handle_data(self, data: str) -> None: ... def handle_entityref(self, name: str) -> None: ... def handle_charref(self, name: str) -> None: ... def handle_comment(self, data: str) -> None: ... def handle_decl(self, decl: str) -> None: ... def handle_pi(self, data: str) -> None: ... def unknown_decl(self, data: str) -> None: ... if sys.version_info < (3, 5): class HTMLParseError(Exception): ... mypy-0.560/typeshed/stdlib/3/http/0000755€tŠÔÚ€2›s®0000000000013215007244023121 5ustar jukkaDROPBOX\Domain Users00000000000000mypy-0.560/typeshed/stdlib/3/http/__init__.pyi0000644€tŠÔÚ€2›s®0000000603713215007212025404 0ustar jukkaDROPBOX\Domain Users00000000000000import sys from enum import IntEnum if sys.version_info >= (3, 5): class HTTPStatus(IntEnum): def __init__(self, *a) -> None: ... phrase = ... # type: str description = ... # type: str CONTINUE = ... # type: HTTPStatus SWITCHING_PROTOCOLS = ... # type: HTTPStatus PROCESSING = ... # type: HTTPStatus OK = ... # type: HTTPStatus CREATED = ... # type: HTTPStatus ACCEPTED = ... # type: HTTPStatus NON_AUTHORITATIVE_INFORMATION = ... # type: HTTPStatus NO_CONTENT = ... # type: HTTPStatus RESET_CONTENT = ... # type: HTTPStatus PARTIAL_CONTENT = ... # type: HTTPStatus MULTI_STATUS = ... # type: HTTPStatus ALREADY_REPORTED = ... # type: HTTPStatus IM_USED = ... # type: HTTPStatus MULTIPLE_CHOICES = ... # type: HTTPStatus MOVED_PERMANENTLY = ... # type: HTTPStatus FOUND = ... # type: HTTPStatus SEE_OTHER = ... # type: HTTPStatus NOT_MODIFIED = ... # type: HTTPStatus USE_PROXY = ... # type: HTTPStatus TEMPORARY_REDIRECT = ... # type: HTTPStatus PERMANENT_REDIRECT = ... # type: HTTPStatus BAD_REQUEST = ... # type: HTTPStatus UNAUTHORIZED = ... # type: HTTPStatus PAYMENT_REQUIRED = ... # type: HTTPStatus FORBIDDEN = ... # type: HTTPStatus NOT_FOUND = ... # type: HTTPStatus METHOD_NOT_ALLOWED = ... # type: HTTPStatus NOT_ACCEPTABLE = ... # type: HTTPStatus PROXY_AUTHENTICATION_REQUIRED = ... # type: HTTPStatus REQUEST_TIMEOUT = ... # type: HTTPStatus CONFLICT = ... # type: HTTPStatus GONE = ... # type: HTTPStatus LENGTH_REQUIRED = ... # type: HTTPStatus PRECONDITION_FAILED = ... # type: HTTPStatus REQUEST_ENTITY_TOO_LARGE = ... # type: HTTPStatus REQUEST_URI_TOO_LONG = ... # type: HTTPStatus UNSUPPORTED_MEDIA_TYPE = ... # type: HTTPStatus REQUESTED_RANGE_NOT_SATISFIABLE = ... # type: HTTPStatus EXPECTATION_FAILED = ... # type: HTTPStatus UNPROCESSABLE_ENTITY = ... # type: HTTPStatus LOCKED = ... # type: HTTPStatus FAILED_DEPENDENCY = ... # type: HTTPStatus UPGRADE_REQUIRED = ... # type: HTTPStatus PRECONDITION_REQUIRED = ... # type: HTTPStatus TOO_MANY_REQUESTS = ... # type: HTTPStatus REQUEST_HEADER_FIELDS_TOO_LARGE = ... # type: HTTPStatus INTERNAL_SERVER_ERROR = ... # type: HTTPStatus NOT_IMPLEMENTED = ... # type: HTTPStatus BAD_GATEWAY = ... # type: HTTPStatus SERVICE_UNAVAILABLE = ... # type: HTTPStatus GATEWAY_TIMEOUT = ... # type: HTTPStatus HTTP_VERSION_NOT_SUPPORTED = ... # type: HTTPStatus VARIANT_ALSO_NEGOTIATES = ... # type: HTTPStatus INSUFFICIENT_STORAGE = ... # type: HTTPStatus LOOP_DETECTED = ... # type: HTTPStatus NOT_EXTENDED = ... # type: HTTPStatus NETWORK_AUTHENTICATION_REQUIRED = ... # type: HTTPStatus mypy-0.560/typeshed/stdlib/3/http/client.pyi0000644€tŠÔÚ€2›s®0000001637013215007212025124 0ustar jukkaDROPBOX\Domain Users00000000000000from typing import ( Any, Dict, IO, Iterable, List, Iterator, Mapping, Optional, Tuple, TypeVar, Union, overload, ) import email.message import io from socket import socket import sys import ssl import types _DataType = Union[bytes, IO[Any], Iterable[bytes], str] _T = TypeVar('_T') HTTP_PORT = ... # type: int HTTPS_PORT = ... # type: int CONTINUE = ... # type: int SWITCHING_PROTOCOLS = ... # type: int PROCESSING = ... # type: int OK = ... # type: int CREATED = ... # type: int ACCEPTED = ... # type: int NON_AUTHORITATIVE_INFORMATION = ... # type: int NO_CONTENT = ... # type: int RESET_CONTENT = ... # type: int PARTIAL_CONTENT = ... # type: int MULTI_STATUS = ... # type: int IM_USED = ... # type: int MULTIPLE_CHOICES = ... # type: int MOVED_PERMANENTLY = ... # type: int FOUND = ... # type: int SEE_OTHER = ... # type: int NOT_MODIFIED = ... # type: int USE_PROXY = ... # type: int TEMPORARY_REDIRECT = ... # type: int BAD_REQUEST = ... # type: int UNAUTHORIZED = ... # type: int PAYMENT_REQUIRED = ... # type: int FORBIDDEN = ... # type: int NOT_FOUND = ... # type: int METHOD_NOT_ALLOWED = ... # type: int NOT_ACCEPTABLE = ... # type: int PROXY_AUTHENTICATION_REQUIRED = ... # type: int REQUEST_TIMEOUT = ... # type: int CONFLICT = ... # type: int GONE = ... # type: int LENGTH_REQUIRED = ... # type: int PRECONDITION_FAILED = ... # type: int REQUEST_ENTITY_TOO_LARGE = ... # type: int REQUEST_URI_TOO_LONG = ... # type: int UNSUPPORTED_MEDIA_TYPE = ... # type: int REQUESTED_RANGE_NOT_SATISFIABLE = ... # type: int EXPECTATION_FAILED = ... # type: int UNPROCESSABLE_ENTITY = ... # type: int LOCKED = ... # type: int FAILED_DEPENDENCY = ... # type: int UPGRADE_REQUIRED = ... # type: int PRECONDITION_REQUIRED = ... # type: int TOO_MANY_REQUESTS = ... # type: int REQUEST_HEADER_FIELDS_TOO_LARGE = ... # type: int INTERNAL_SERVER_ERROR = ... # type: int NOT_IMPLEMENTED = ... # type: int BAD_GATEWAY = ... # type: int SERVICE_UNAVAILABLE = ... # type: int GATEWAY_TIMEOUT = ... # type: int HTTP_VERSION_NOT_SUPPORTED = ... # type: int INSUFFICIENT_STORAGE = ... # type: int NOT_EXTENDED = ... # type: int NETWORK_AUTHENTICATION_REQUIRED = ... # type: int responses = ... # type: Dict[int, str] class HTTPMessage(email.message.Message): ... if sys.version_info >= (3, 5): class HTTPResponse(io.BufferedIOBase): msg = ... # type: HTTPMessage version = ... # type: int debuglevel = ... # type: int closed = ... # type: bool status = ... # type: int reason = ... # type: str def __init__(self, sock: socket, debuglevel: int = ..., method: Optional[str] = ..., url: Optional[str] = ...) -> None: ... def read(self, amt: Optional[int] = ...) -> bytes: ... def readinto(self, b: bytearray) -> int: ... @overload def getheader(self, name: str) -> Optional[str]: ... @overload def getheader(self, name: str, default: _T) -> Union[str, _T]: ... def getheaders(self) -> List[Tuple[str, str]]: ... def fileno(self) -> int: ... def isclosed(self) -> bool: ... def __iter__(self) -> Iterator[bytes]: ... def __enter__(self) -> 'HTTPResponse': ... def __exit__(self, exc_type: Optional[type], exc_val: Optional[Exception], exc_tb: Optional[types.TracebackType]) -> bool: ... else: class HTTPResponse: msg = ... # type: HTTPMessage version = ... # type: int debuglevel = ... # type: int closed = ... # type: bool status = ... # type: int reason = ... # type: str def read(self, amt: Optional[int] = ...) -> bytes: ... if sys.version_info >= (3, 3): def readinto(self, b: bytearray) -> int: ... @overload def getheader(self, name: str) -> Optional[str]: ... @overload def getheader(self, name: str, default: _T) -> Union[str, _T]: ... def getheaders(self) -> List[Tuple[str, str]]: ... def fileno(self) -> int: ... def __iter__(self) -> Iterator[bytes]: ... def __enter__(self) -> 'HTTPResponse': ... def __exit__(self, exc_type: Optional[type], exc_val: Optional[Exception], exc_tb: Optional[types.TracebackType]) -> bool: ... class HTTPConnection: if sys.version_info >= (3, 4): def __init__( self, host: str, port: Optional[int] = ..., timeout: int = ..., source_address: Optional[Tuple[str, int]] = ... ) -> None: ... else: def __init__( self, host: str, port: Optional[int] = ..., strict: bool = ..., timeout: int = ..., source_address: Optional[Tuple[str, int]] = ... )-> None: ... def request(self, method: str, url: str, body: Optional[_DataType] = ..., headers: Mapping[str, str] = ...) -> None: ... def getresponse(self) -> HTTPResponse: ... def set_debuglevel(self, level: int) -> None: ... def set_tunnel(self, host: str, port: Optional[int] = ..., headers: Optional[Mapping[str, str]] = ...) -> None: ... def connect(self) -> None: ... def close(self) -> None: ... def putrequest(self, request: str, selector: str, skip_host: bool = ..., skip_accept_encoding: bool = ...) -> None: ... def putheader(self, header: str, *argument: str) -> None: ... def endheaders(self, message_body: Optional[_DataType] = ...) -> None: ... def send(self, data: _DataType) -> None: ... class HTTPSConnection(HTTPConnection): if sys.version_info >= (3, 4): def __init__(self, host: str, port: Optional[int] = ..., key_file: Optional[str] = ..., cert_file: Optional[str] = ..., timeout: int = ..., source_address: Optional[Tuple[str, int]] = ..., *, context: Optional[ssl.SSLContext] = ..., check_hostname: Optional[bool] = ...) -> None: ... else: def __init__(self, host: str, port: Optional[int] = ..., key_file: Optional[str] = ..., cert_file: Optional[str] = ..., strict: bool = ..., timeout: int = ..., source_address: Optional[Tuple[str, int]] = ..., *, context: Optional[ssl.SSLContext] = ..., check_hostname: Optional[bool] = ...) -> None: ... class HTTPException(Exception): ... error = HTTPException class NotConnected(HTTPException): ... class InvalidURL(HTTPException): ... class UnknownProtocol(HTTPException): ... class UnknownTransferEncoding(HTTPException): ... class UnimplementedFileMode(HTTPException): ... class IncompleteRead(HTTPException): ... class ImproperConnectionState(HTTPException): ... class CannotSendRequest(ImproperConnectionState): ... class CannotSendHeader(ImproperConnectionState): ... class ResponseNotReady(ImproperConnectionState): ... class BadStatusLine(HTTPException): ... class LineTooLong(HTTPException): ... if sys.version_info >= (3, 5): class RemoteDisconnected(ConnectionResetError, BadStatusLine): ... mypy-0.560/typeshed/stdlib/3/http/cookiejar.pyi0000644€tŠÔÚ€2›s®0000001124113215007212025604 0ustar jukkaDROPBOX\Domain Users00000000000000from typing import Iterable, Iterator, Optional, Sequence, Tuple, TypeVar, Union, overload from http.client import HTTPResponse import sys from urllib.request import Request _T = TypeVar('_T') if sys.version_info >= (3, 3): class LoadError(OSError): ... else: class LoadError(IOError): ... class CookieJar(Iterable['Cookie']): def __init__(self, policy: Optional['CookiePolicy'] = ...) -> None: ... def add_cookie_header(self, request: Request) -> None: ... def extract_cookies(self, response: HTTPResponse, request: Request) -> None: ... def set_policy(self, policy: 'CookiePolicy') -> None: ... def make_cookies(self, response: HTTPResponse, request: Request) -> Sequence['Cookie']: ... def set_cookie(self, cookie: 'Cookie') -> None: ... def set_cookie_if_ok(self, cookie: 'Cookie', request: Request) -> None: ... def clear(self, domain: str = ..., path: str = ..., name: str = ...) -> None: ... def clear_session_cookies(self) -> None: ... def __iter__(self) -> Iterator['Cookie']: ... def __len__(self) -> int: ... class FileCookieJar(CookieJar): filename = ... # type: str delayload = ... # type: bool def __init__(self, filename: str = ..., delayload: bool = ..., policy: Optional['CookiePolicy'] = ...) -> None: ... def save(self, filename: Optional[str] = ..., ignore_discard: bool = ..., ignore_expires: bool = ...) -> None: ... def load(self, filename: Optional[str] = ..., ignore_discard: bool = ..., ignore_expires: bool = ...) -> None: ... def revert(self, filename: Optional[str] = ..., ignore_discard: bool = ..., ignore_expires: bool = ...) -> None: ... class MozillaCookieJar(FileCookieJar): ... class LWPCookieJar(FileCookieJar): ... class CookiePolicy: netscape = ... # type: bool rfc2965 = ... # type: bool hide_cookie2 = ... # type: bool def set_ok(self, cookie: 'Cookie', request: Request) -> bool: ... def return_ok(self, cookie: 'Cookie', request: Request) -> bool: ... def domain_return_ok(self, domain: str, request: Request) -> bool: ... def path_return_ok(self, path: str, request: Request) -> bool: ... class DefaultCookiePolicy(CookiePolicy): rfc2109_as_netscape = ... # type: bool strict_domain = ... # type: bool strict_rfc2965_unverifiable = ... # type: bool strict_ns_unverifiable = ... # type: bool strict_ns_domain = ... # type: int strict_ns_set_initial_dollar = ... # type: bool strict_ns_set_path = ... # type: bool DomainStrictNoDots = ... # type: int DomainStrictNonDomain = ... # type: int DomainRFC2965Match = ... # type: int DomainLiberal = ... # type: int DomainStrict = ... # type: int def __init__(self, blocked_domains: Optional[Sequence[str]] = ..., allowed_domains: Optional[Sequence[str]] = ..., netscape: bool = ..., rfc2965: bool = ..., rfc2109_as_netscape: Optional[bool] = ..., hide_cookie2: bool = ..., strict_domain: bool = ..., strict_rfc2965_unverifiable: bool =..., strict_ns_unverifiable: bool = ..., strict_ns_domain: int = ..., strict_ns_set_initial_dollar: bool = ..., strict_ns_set_path: bool = ...) -> None: ... def blocked_domains(self) -> Tuple[str, ...]: ... def set_blocked_domains(self, blocked_domains: Sequence[str]) -> None: ... def is_blocked(self, domain: str) -> bool: ... def allowed_domains(self) -> Optional[Tuple[str, ...]]: ... def set_allowed_domains(self, allowed_domains: Optional[Sequence[str]]) -> None: ... def is_not_allowed(self, domain: str) -> bool: ... class Cookie: version = ... # type: Optional[int] name = ... # type: str value = ... # type: Optional[str] port = ... # type: Optional[str] path = ... # type: str secure = ... # type: bool expires = ... # type: Optional[int] discard = ... # type: bool comment = ... # type: Optional[str] comment_url = ... # type: Optional[str] rfc2109 = ... # type: bool port_specified = ... # type: bool domain_specified = ... # type: bool domain_initial_dot = ... # type: bool def has_nonstandard_attr(self, name: str) -> bool: ... @overload def get_nonstandard_attr(self, name: str) -> Optional[str]: ... @overload def get_nonstandard_attr(self, name: str, default: _T = ...) -> Union[str, _T]: ... def set_nonstandard_attr(self, name: str, value: str) -> None: ... def is_expired(self, now: int = ...) -> bool: ... mypy-0.560/typeshed/stdlib/3/http/cookies.pyi0000644€tŠÔÚ€2›s®0000000244313215007212025276 0ustar jukkaDROPBOX\Domain Users00000000000000# Stubs for http.cookies (Python 3.5) from typing import Generic, Dict, List, Mapping, MutableMapping, Optional, TypeVar, Union _DataType = Union[str, Mapping[str, Union[str, 'Morsel']]] _T = TypeVar('_T') class CookieError(Exception): ... class Morsel(Dict[str, str], Generic[_T]): value = ... # type: str coded_value = ... # type: _T key = ... # type: str def set(self, key: str, val: str, coded_val: _T) -> None: ... def isReservedKey(self, K: str) -> bool: ... def output(self, attrs: Optional[List[str]] = ..., header: str = ...) -> str: ... def js_output(self, attrs: Optional[List[str]] = ...) -> str: ... def OutputString(self, attrs: Optional[List[str]] = ...) -> str: ... class BaseCookie(MutableMapping[str, Morsel], Generic[_T]): def __init__(self, input: Optional[_DataType] = ...) -> None: ... def value_decode(self, val: str) -> _T: ... def value_encode(self, val: _T) -> str: ... def output(self, attrs: Optional[List[str]] = ..., header: str = ..., sep: str = ...) -> str: ... def js_output(self, attrs: Optional[List[str]] = ...) -> str: ... def load(self, rawdata: _DataType) -> None: ... def __setitem__(self, key: str, value: Union[str, Morsel]) -> None: ... class SimpleCookie(BaseCookie): ... mypy-0.560/typeshed/stdlib/3/http/server.pyi0000644€tŠÔÚ€2›s®0000000527013215007212025151 0ustar jukkaDROPBOX\Domain Users00000000000000# Stubs for http.server (Python 3.4) from typing import Any, BinaryIO, Dict, List, Mapping, Optional, Tuple, Union import socketserver import email.message class HTTPServer(socketserver.TCPServer): server_name = ... # type: str server_port = ... # type: int def __init__(self, server_address: Tuple[str, int], RequestHandlerClass: type) -> None: ... class BaseHTTPRequestHandler: client_address = ... # type: Tuple[str, int] server = ... # type: socketserver.BaseServer close_connection = ... # type: bool requestline = ... # type: str command = ... # type: str path = ... # type: str request_version = ... # type: str headers = ... # type: email.message.Message rfile = ... # type: BinaryIO wfile = ... # type: BinaryIO server_version = ... # type: str sys_version = ... # type: str error_message_format = ... # type: str error_content_type = ... # type: str protocol_version = ... # type: str MessageClass = ... # type: type responses = ... # type: Mapping[int, Tuple[str, str]] def __init__(self, request: bytes, client_address: Tuple[str, int], server: socketserver.BaseServer) -> None: ... def handle(self) -> None: ... def handle_one_request(self) -> None: ... def handle_expect_100(self) -> bool: ... def send_error(self, code: int, message: Optional[str] = ..., explain: Optional[str] = ...) -> None: ... def send_response(self, code: int, message: Optional[str] = ...) -> None: ... def send_header(self, keyword: str, value: str) -> None: ... def send_response_only(self, code: int, message: Optional[str] = ...) -> None: ... def end_headers(self) -> None: ... def flush_headers(self) -> None: ... def log_request(self, code: Union[int, str] = ..., size: Union[int, str] = ...) -> None: ... def log_error(self, format: str, *args: Any) -> None: ... def log_message(self, format: str, *args: Any) -> None: ... def version_string(self) -> str: ... def date_time_string(self, timestamp: Optional[int] = ...) -> str: ... def log_date_time_string(self) -> str: ... def address_string(self) -> str: ... class SimpleHTTPRequestHandler(BaseHTTPRequestHandler): extensions_map = ... # type: Dict[str, str] def __init__(self, request: bytes, client_address: Tuple[str, int], server: socketserver.BaseServer) -> None: ... def do_GET(self) -> None: ... def do_HEAD(self) -> None: ... class CGIHTTPRequestHandler(SimpleHTTPRequestHandler): cgi_directories = ... # type: List[str] def do_POST(self) -> None: ... mypy-0.560/typeshed/stdlib/3/imp.pyi0000644€tŠÔÚ€2›s®0000000417513215007212023454 0ustar jukkaDROPBOX\Domain Users00000000000000# Stubs for imp (Python 3.6) import os import sys import types from typing import Any, IO, List, Optional, Tuple, TypeVar, Union from _imp import (lock_held as lock_held, acquire_lock as acquire_lock, release_lock as release_lock, get_frozen_object as get_frozen_object, is_frozen_package as is_frozen_package, init_frozen as init_frozen, is_builtin as is_builtin, is_frozen as is_frozen) if sys.version_info >= (3, 5): from _imp import create_dynamic as create_dynamic _T = TypeVar('_T') if sys.version_info >= (3, 6): _Path = Union[str, os.PathLike[str]] else: _Path = str SEARCH_ERROR: int PY_SOURCE: int PY_COMPILED: int C_EXTENSION: int PY_RESOURCE: int PKG_DIRECTORY: int C_BUILTIN: int PY_FROZEN: int PY_CODERESOURCE: int IMP_HOOK: int def new_module(name: str) -> types.ModuleType: ... def get_magic() -> bytes: ... def get_tag() -> str: ... def cache_from_source(path: _Path, debug_override: Optional[bool] = ...) -> str: ... def source_from_cache(path: _Path) -> str: ... def get_suffixes() -> List[Tuple[str, str, int]]: ... class NullImporter: def __init__(self, path: _Path) -> None: ... def find_module(self, fullname: Any) -> None: ... # PathLike doesn't work for the pathname argument here def load_source(name: str, pathname: str, file: Optional[IO[Any]] = ...) -> types.ModuleType: ... def load_compiled(name: str, pathname: str, file: Optional[IO[Any]] = ...) -> types.ModuleType: ... def load_package(name: str, path: _Path) -> types.ModuleType: ... def load_module(name: str, file: IO[Any], filename: str, details: Tuple[str, str, int]) -> types.ModuleType: ... if sys.version_info >= (3, 6): def find_module(name: str, path: Union[None, List[str], List[os.PathLike[str]], List[_Path]] = ...) -> Tuple[str, str, Tuple[IO[Any], str, int]]: ... else: def find_module(name: str, path: Optional[List[str]] = ...) -> Tuple[str, str, Tuple[IO[Any], str, int]]: ... def reload(module: types.ModuleType) -> types.ModuleType: ... def init_builtin(name: str) -> Optional[types.ModuleType]: ... def load_dynamic(name: str, path: str, file: Optional[IO[Any]] = ...) -> types.ModuleType: ... mypy-0.560/typeshed/stdlib/3/importlib/0000755€tŠÔÚ€2›s®0000000000013215007244024143 5ustar jukkaDROPBOX\Domain Users00000000000000mypy-0.560/typeshed/stdlib/3/importlib/__init__.pyi0000644€tŠÔÚ€2›s®0000000130513215007212026417 0ustar jukkaDROPBOX\Domain Users00000000000000from importlib import util from importlib.abc import Loader import sys import types from typing import Any, Mapping, Optional, Sequence def __import__(name: str, globals: Optional[Mapping[str, Any]] = ..., locals: Optional[Mapping[str, Any]] = ..., fromlist: Sequence[str] = ..., level: int = ...) -> types.ModuleType: ... def import_module(name: str, package: Optional[str] = ...) -> types.ModuleType: ... if sys.version_info >= (3, 3): def find_loader(name: str, path: Optional[str] = ...) -> Optional[Loader]: ... def invalidate_caches() -> None: ... if sys.version_info >= (3, 4): def reload(module: types.ModuleType) -> types.ModuleType: ... mypy-0.560/typeshed/stdlib/3/importlib/abc.pyi0000644€tŠÔÚ€2›s®0000000710113215007212025405 0ustar jukkaDROPBOX\Domain Users00000000000000from abc import ABCMeta, abstractmethod import sys import types from typing import Any, Mapping, Optional, Sequence, Tuple, Union # Loader is exported from this module, but for circular import reasons # exists in its own stub file (with ModuleSpec and ModuleType). from _importlib_modulespec import Loader as Loader # Exported if sys.version_info >= (3, 4): from _importlib_modulespec import ModuleSpec _Path = Union[bytes, str] class Finder(metaclass=ABCMeta): ... # Technically this class defines the following method, but its subclasses # in this module violate its signature. Since this class is deprecated, it's # easier to simply ignore that this method exists. # @abstractmethod # def find_module(self, fullname: str, # path: Optional[Sequence[_Path]] = ...) -> Optional[Loader]: ... class ResourceLoader(Loader): @abstractmethod def get_data(self, path: _Path) -> bytes: ... class InspectLoader(Loader): def is_package(self, fullname: str) -> bool: ... def get_code(self, fullname: str) -> Optional[types.CodeType]: ... def load_module(self, fullname: str) -> types.ModuleType: ... @abstractmethod def get_source(self, fullname: str) -> Optional[str]: ... if sys.version_info >= (3, 4): def exec_module(self, module: types.ModuleType) -> None: ... if sys.version_info[:2] == (3, 4): def source_to_code(self, data: Union[bytes, str], path: str = ...) -> types.CodeType: ... elif sys.version_info >= (3, 5): @staticmethod def source_to_code(data: Union[bytes, str], path: str = ...) -> types.CodeType: ... class ExecutionLoader(InspectLoader): @abstractmethod def get_filename(self, fullname: str) -> _Path: ... def get_code(self, fullname: str) -> Optional[types.CodeType]: ... class SourceLoader(ResourceLoader, ExecutionLoader): def path_mtime(self, path: _Path) -> Union[int, float]: ... def set_data(self, path: _Path, data: bytes) -> None: ... def get_source(self, fullname: str) -> Optional[str]: ... if sys.version_info >= (3, 3): def path_stats(self, path: _Path) -> Mapping[str, Any]: ... if sys.version_info >= (3, 3): class MetaPathFinder(Finder): def find_module(self, fullname: str, path: Optional[Sequence[_Path]]) -> Optional[Loader]: ... def invalidate_caches(self) -> None: ... if sys.version_info >= (3, 4): # Not defined on the actual class, but expected to exist. def find_spec( self, fullname: str, path: Optional[Sequence[_Path]], target: Optional[types.ModuleType] = ... ) -> Optional[ModuleSpec]: ... class PathEntryFinder(Finder): def find_module(self, fullname: str) -> Optional[Loader]: ... def find_loader( self, fullname: str ) -> Tuple[Optional[Loader], Sequence[_Path]]: ... def invalidate_caches(self) -> None: ... if sys.version_info >= (3, 4): # Not defined on the actual class, but expected to exist. def find_spec( self, fullname: str, target: Optional[types.ModuleType] = ... ) -> Optional[ModuleSpec]: ... class FileLoader(ResourceLoader, ExecutionLoader): name = ... # type: str path = ... # type: _Path def __init__(self, fullname: str, path: _Path) -> None: ... def get_data(self, path: _Path) -> bytes: ... def get_filename(self, fullname: str) -> _Path: ... mypy-0.560/typeshed/stdlib/3/importlib/machinery.pyi0000644€tŠÔÚ€2›s®0000001554413215007212026651 0ustar jukkaDROPBOX\Domain Users00000000000000import importlib.abc import sys import types from typing import Any, Callable, List, Optional, Sequence, Tuple, Union # ModuleSpec is exported from this module, but for circular import # reasons exists in its own stub file (with Loader and ModuleType). if sys.version_info >= (3, 4): from _importlib_modulespec import ModuleSpec as ModuleSpec # Exported if sys.version_info >= (3, 3): class BuiltinImporter(importlib.abc.MetaPathFinder, importlib.abc.InspectLoader): # MetaPathFinder @classmethod def find_module( cls, fullname: str, path: Optional[Sequence[importlib.abc._Path]] ) -> Optional[importlib.abc.Loader]: ... if sys.version_info >= (3, 4): @classmethod def find_spec(cls, fullname: str, path: Optional[Sequence[importlib.abc._Path]], target: Optional[types.ModuleType] = ...) -> Optional[ModuleSpec]: ... # InspectLoader @classmethod def is_package(cls, fullname: str) -> bool: ... @classmethod def load_module(cls, fullname: str) -> types.ModuleType: ... @classmethod def get_code(cls, fullname: str) -> None: ... @classmethod def get_source(cls, fullname: str) -> None: ... # Loader @classmethod def load_module(cls, fullname: str) -> types.ModuleType: ... if sys.version_info >= (3, 3): @staticmethod def module_repr(module: types.ModuleType) -> str: ... # type: ignore if sys.version_info >= (3, 4): @classmethod def create_module(cls, spec: ModuleSpec) -> Optional[types.ModuleType]: ... @classmethod def exec_module(cls, module: types.ModuleType) -> None: ... else: class BuiltinImporter(importlib.abc.InspectLoader): # MetaPathFinder @classmethod def find_module( cls, fullname: str, path: Optional[Sequence[importlib.abc._Path]] ) -> Optional[importlib.abc.Loader]: ... # InspectLoader @classmethod def is_package(cls, fullname: str) -> bool: ... @classmethod def load_module(cls, fullname: str) -> types.ModuleType: ... @classmethod def get_code(cls, fullname: str) -> None: ... @classmethod def get_source(cls, fullname: str) -> None: ... # Loader @classmethod def load_module(cls, fullname: str) -> types.ModuleType: ... if sys.version_info >= (3, 3): class FrozenImporter(importlib.abc.MetaPathFinder, importlib.abc.InspectLoader): # MetaPathFinder @classmethod def find_module( cls, fullname: str, path: Optional[Sequence[importlib.abc._Path]] ) -> Optional[importlib.abc.Loader]: ... if sys.version_info >= (3, 4): @classmethod def find_spec(cls, fullname: str, path: Optional[Sequence[importlib.abc._Path]], target: Optional[types.ModuleType] = ...) -> Optional[ModuleSpec]: ... # InspectLoader @classmethod def is_package(cls, fullname: str) -> bool: ... @classmethod def load_module(cls, fullname: str) -> types.ModuleType: ... @classmethod def get_code(cls, fullname: str) -> None: ... @classmethod def get_source(cls, fullname: str) -> None: ... # Loader @classmethod def load_module(cls, fullname: str) -> types.ModuleType: ... if sys.version_info >= (3, 3): @staticmethod def module_repr(module: types.ModuleType) -> str: ... # type: ignore if sys.version_info >= (3, 4): @classmethod def create_module(cls, spec: ModuleSpec) -> Optional[types.ModuleType]: ... @staticmethod def exec_module(module: types.ModuleType) -> None: ... # type: ignore else: class FrozenImporter(importlib.abc.InspectLoader): # MetaPathFinder @classmethod def find_module( cls, fullname: str, path: Optional[Sequence[importlib.abc._Path]] ) -> Optional[importlib.abc.Loader]: ... # InspectLoader @classmethod def is_package(cls, fullname: str) -> bool: ... @classmethod def load_module(cls, fullname: str) -> types.ModuleType: ... @classmethod def get_code(cls, fullname: str) -> None: ... # type: ignore @classmethod def get_source(cls, fullname: str) -> None: ... # type: ignore # Loader @classmethod def load_module(cls, fullname: str) -> types.ModuleType: ... if sys.version_info >= (3, 3): class WindowsRegistryFinder(importlib.abc.MetaPathFinder): @classmethod def find_module( cls, fullname: str, path: Optional[Sequence[importlib.abc._Path]] ) -> Optional[importlib.abc.Loader]: ... if sys.version_info >= (3, 4): @classmethod def find_spec(cls, fullname: str, path: Optional[Sequence[importlib.abc._Path]], target: Optional[types.ModuleType] = ...) -> Optional[ModuleSpec]: ... else: class WindowsRegisteryFinder: @classmethod def find_module( cls, fullname: str, path: Optional[Sequence[importlib.abc._Path]] ) -> Optional[importlib.abc.Loader]: ... if sys.version_info >= (3, 3): class PathFinder(importlib.abc.MetaPathFinder): ... else: class PathFinder: ... if sys.version_info >= (3, 3): SOURCE_SUFFIXES = ... # type: List[str] DEBUG_BYTECODE_SUFFIXES = ... # type: List[str] OPTIMIZED_BYTECODE_SUFFIXES = ... # type: List[str] BYTECODE_SUFFIXES = ... # type: List[str] EXTENSION_SUFFIXES = ... # type: List[str] def all_suffixes() -> List[str]: ... class FileFinder(importlib.abc.PathEntryFinder): path = ... # type: str def __init__( self, path: str, *loader_details: Tuple[importlib.abc.Loader, List[str]] ) -> None: ... @classmethod def path_hook( *loader_details: Tuple[importlib.abc.Loader, List[str]] ) -> Callable[[str], importlib.abc.PathEntryFinder]: ... class SourceFileLoader(importlib.abc.FileLoader, importlib.abc.SourceLoader): ... class SourcelessFileLoader(importlib.abc.FileLoader, importlib.abc.SourceLoader): ... class ExtensionFileLoader(importlib.abc.ExecutionLoader): def get_filename(self, fullname: str) -> importlib.abc._Path: ... def get_source(self, fullname: str) -> None: ... # type: ignore mypy-0.560/typeshed/stdlib/3/importlib/util.pyi0000644€tŠÔÚ€2›s®0000000373313215007212025644 0ustar jukkaDROPBOX\Domain Users00000000000000import importlib.abc import importlib.machinery import sys import types from typing import Any, Callable, List, Optional def module_for_loader( fxn: Callable[..., types.ModuleType] ) -> Callable[..., types.ModuleType]: ... def set_loader( fxn: Callable[..., types.ModuleType] ) -> Callable[..., types.ModuleType]: ... def set_package( fxn: Callable[..., types.ModuleType] ) -> Callable[..., types.ModuleType]: ... if sys.version_info >= (3, 3): def resolve_name(name: str, package: str) -> str: ... if sys.version_info >= (3, 4): MAGIC_NUMBER = ... # type: bytes def cache_from_source(path: str, debug_override: Optional[bool] = ..., *, optimization: Optional[Any] = ...) -> str: ... def source_from_cache(path: str) -> str: ... def decode_source(source_bytes: bytes) -> str: ... def find_spec( name: str, package: Optional[str] = ... ) -> importlib.machinery.ModuleSpec: ... def spec_from_loader( name: str, loader: Optional[importlib.abc.Loader], *, origin: Optional[str] = ..., loader_state: Optional[Any] = ..., is_package: Optional[bool] = ... ) -> importlib.machinery.ModuleSpec: ... def spec_from_file_location( name: str, location: str, *, loader: Optional[importlib.abc.Loader] = ..., submodule_search_locations: Optional[List[str]] = ... ) -> importlib.machinery.ModuleSpec: ... if sys.version_info >= (3, 5): def module_from_spec( spec: importlib.machinery.ModuleSpec ) -> types.ModuleType: ... class LazyLoader(importlib.abc.Loader): def __init__(self, loader: importlib.abc.Loader) -> None: ... @classmethod def factory( cls, loader: importlib.abc.Loader ) -> Callable[..., 'LazyLoader']: ... def create_module( self, spec: importlib.machinery.ModuleSpec ) -> Optional[types.ModuleType]: ... def exec_module(self, module: types.ModuleType) -> None: ... mypy-0.560/typeshed/stdlib/3/inspect.pyi0000644€tŠÔÚ€2›s®0000002434313215007212024333 0ustar jukkaDROPBOX\Domain Users00000000000000import sys from typing import (AbstractSet, Any, Tuple, List, Dict, Callable, Generator, Mapping, MutableMapping, NamedTuple, Optional, Sequence, Union, ) from types import FrameType, ModuleType, TracebackType # # Types and members # if sys.version_info < (3, 6): ModuleInfo = NamedTuple('ModuleInfo', [('name', str), ('suffix', str), ('mode', str), ('module_type', int), ]) def getmoduleinfo(path: str) -> Optional[ModuleInfo]: ... def getmembers(object: object, predicate: Callable[[Any], bool] = ..., ) -> List[Tuple[str, Any]]: ... def getmodulename(path: str) -> Optional[str]: ... def ismodule(object: object) -> bool: ... def isclass(object: object) -> bool: ... def ismethod(object: object) -> bool: ... def isfunction(object: object) -> bool: ... def isgeneratorfunction(object: object) -> bool: ... def isgenerator(object: object) -> bool: ... # Python 3.5+ def iscoroutinefunction(object: object) -> bool: ... def iscoroutine(object: object) -> bool: ... def isawaitable(object: object) -> bool: ... def istraceback(object: object) -> bool: ... def isframe(object: object) -> bool: ... def iscode(object: object) -> bool: ... def isbuiltin(object: object) -> bool: ... def isroutine(object: object) -> bool: ... def isabstract(object: object) -> bool: ... def ismethoddescriptor(object: object) -> bool: ... def isdatadescriptor(object: object) -> bool: ... def isgetsetdescriptor(object: object) -> bool: ... def ismemberdescriptor(object: object) -> bool: ... # # Retrieving source code # def getdoc(object: object) -> str: ... def getcomments(object: object) -> str: ... def getfile(object: object) -> str: ... def getmodule(object: object) -> ModuleType: ... def getsourcefile(object: object) -> str: ... # TODO restrict to "module, class, method, function, traceback, frame, # or code object" def getsourcelines(object: object) -> Tuple[List[str], int]: ... # TODO restrict to "a module, class, method, function, traceback, frame, # or code object" def getsource(object: object) -> str: ... def cleandoc(doc: str) -> str: ... # # Introspecting callables with the Signature object (Python 3.3+) # def signature(callable: Callable[..., Any], *, follow_wrapped: bool = ...) -> 'Signature': ... class Signature: def __init__(self, parameters: Optional[Sequence['Parameter']] = ..., *, return_annotation: Any = ...) -> None: ... # TODO: can we be more specific here? empty = ... # type: object parameters = ... # type: Mapping[str, 'Parameter'] # TODO: can we be more specific here? return_annotation = ... # type: Any def bind(self, *args: Any, **kwargs: Any) -> 'BoundArguments': ... def bind_partial(self, *args: Any, **kwargs: Any) -> 'BoundArguments': ... def replace(self, *, parameters: Optional[Sequence['Parameter']] = ..., return_annotation: Any = ...) -> 'Signature': ... # Python 3.5+ @classmethod def from_callable(cls, obj: Callable[..., Any], *, follow_wrapped: bool = ...) -> 'Signature': ... # The name is the same as the enum's name in CPython class _ParameterKind: ... class Parameter: def __init__(self, name: str, kind: _ParameterKind, *, default: Any = ..., annotation: Any = ...) -> None: ... empty = ... # type: Any name = ... # type: str default = ... # type: Any annotation = ... # type: Any kind = ... # type: _ParameterKind POSITIONAL_ONLY = ... # type: _ParameterKind POSITIONAL_OR_KEYWORD = ... # type: _ParameterKind VAR_POSITIONAL = ... # type: _ParameterKind KEYWORD_ONLY = ... # type: _ParameterKind VAR_KEYWORD = ... # type: _ParameterKind def replace(self, *, name: Optional[str] = ..., kind: Optional[_ParameterKind] = ..., default: Any = ..., annotation: Any = ...) -> 'Parameter': ... class BoundArguments: arguments = ... # type: MutableMapping[str, Any] args = ... # Tuple[Any, ...] kwargs = ... # Dict[str, Any] signature = ... # type: Signature # Python 3.5+ def apply_defaults(self) -> None: ... # # Classes and functions # # TODO: The actual return type should be List[_ClassTreeItem] but mypy doesn't # seem to be supporting this at the moment: # _ClassTreeItem = Union[List['_ClassTreeItem'], Tuple[type, Tuple[type, ...]]] def getclasstree(classes: List[type], unique: bool = ...) -> Any: ... ArgSpec = NamedTuple('ArgSpec', [('args', List[str]), ('varargs', str), ('keywords', str), ('defaults', tuple), ]) def getargspec(func: object) -> ArgSpec: ... FullArgSpec = NamedTuple('FullArgSpec', [('args', List[str]), ('varargs', str), ('varkw', str), ('defaults', tuple), ('kwonlyargs', List[str]), ('kwonlydefaults', Dict[str, Any]), ('annotations', Dict[str, Any]), ]) def getfullargspec(func: object) -> FullArgSpec: ... # TODO make the field types more specific here ArgInfo = NamedTuple('ArgInfo', [('args', List[str]), ('varargs', Optional[str]), ('keywords', Optional[str]), ('locals', Dict[str, Any]), ]) def getargvalues(frame: FrameType) -> ArgInfo: ... def formatargspec(args: List[str], varargs: Optional[str] = ..., varkw: Optional[str] = ..., defaults: Optional[Tuple[Any, ...]] = ..., kwonlyargs: Optional[List[str]] = ..., kwonlydefaults: Optional[Dict[str, Any]] = ..., annotations: Optional[Dict[str, Any]] = ..., formatarg: Optional[Callable[[str], str]] = ..., formatvarargs: Optional[Callable[[str], str]] = ..., formatvarkw: Optional[Callable[[str], str]] = ..., formatvalue: Optional[Callable[[Any], str]] = ..., formatreturns: Optional[Callable[[Any], str]] = ..., formatannotations: Optional[Callable[[Any], str]] = ..., ) -> str: ... def formatargvalues(args: List[str], varargs: Optional[str] = ..., varkw: Optional[str] = ..., locals: Optional[Dict[str, Any]] = ..., formatarg: Optional[Callable[[str], str]] = ..., formatvarargs: Optional[Callable[[str], str]] = ..., formatvarkw: Optional[Callable[[str], str]] = ..., formatvalue: Optional[Callable[[Any], str]] = ..., ) -> str: ... def getmro(cls: type) -> Tuple[type, ...]: ... # Python 3.2+ def getcallargs(func: Callable[..., Any], *args: Any, **kwds: Any) -> Dict[str, Any]: ... # Python 3.3+ ClosureVars = NamedTuple('ClosureVars', [('nonlocals', Mapping[str, Any]), ('globals', Mapping[str, Any]), ('builtins', Mapping[str, Any]), ('unbound', AbstractSet[str]), ]) def getclosurevars(func: Callable[..., Any]) -> ClosureVars: ... # Python 3.4+ def unwrap(func: Callable[..., Any], *, stop: Callable[[Any], Any]) -> Any: ... # # The interpreter stack # # Python 3.5+ (functions returning it used to return regular tuples) FrameInfo = NamedTuple('FrameInfo', [('frame', FrameType), ('filename', str), ('lineno', int), ('function', str), ('code_context', List[str]), ('index', int), ]) # TODO make the frame type more specific def getframeinfo(frame: Any, context: int = ...) -> FrameInfo: ... def getouterframes(frame: Any, context: int = ...) -> List[FrameInfo]: ... def getinnerframes(traceback: TracebackType, context: int = ...) -> List[FrameInfo]: ... def currentframe() -> Optional[FrameType]: ... def stack(context: int = ...) -> List[FrameInfo]: ... def trace(context: int = ...) -> List[FrameInfo]: ... # # Fetching attributes statically # # Python 3.2+ def getattr_static(obj: object, attr: str, default: Optional[Any] = ...) -> Any: ... # # Current State of Generators and Coroutines # # TODO In the next two blocks of code, can we be more specific regarding the # type of the "enums"? # Python 3.2+ GEN_CREATED = ... # type: str GEN_RUNNING = ... # type: str GEN_SUSPENDED = ... # type: str GEN_CLOSED = ... # type: str def getgeneratorstate(generator: Generator[Any, Any, Any]) -> str: ... # Python 3.5+ CORO_CREATED = ... # type: str CORO_RUNNING = ... # type: str CORO_SUSPENDED = ... # type: str CORO_CLOSED = ... # type: str # TODO can we be more specific than "object"? def getcoroutinestate(coroutine: object) -> str: ... # Python 3.3+ def getgeneratorlocals(generator: Generator[Any, Any, Any]) -> Dict[str, Any]: ... # Python 3.5+ # TODO can we be more specific than "object"? def getcoroutinelocals(coroutine: object) -> Dict[str, Any]: ... # # The following seems undocumented but it was already present in this file # _object = object # namedtuple('Attribute', 'name kind defining_class object') class Attribute(tuple): name = ... # type: str kind = ... # type: str defining_class = ... # type: type object = ... # type: _object def classify_class_attrs(cls: type) -> List[Attribute]: ... mypy-0.560/typeshed/stdlib/3/io.pyi0000644€tŠÔÚ€2›s®0000002337113215007212023275 0ustar jukkaDROPBOX\Domain Users00000000000000from typing import ( List, BinaryIO, TextIO, Iterator, Union, Optional, Callable, Tuple, Any, IO, Iterable ) import builtins import codecs import sys from types import TracebackType from typing import TypeVar DEFAULT_BUFFER_SIZE = ... # type: int SEEK_SET = ... # type: int SEEK_CUR = ... # type: int SEEK_END = ... # type: int _T = TypeVar('_T', bound='IOBase') open = builtins.open if sys.version_info >= (3, 3): BlockingIOError = builtins.BlockingIOError class UnsupportedOperation(OSError, ValueError): ... else: class BlockingIOError(IOError): characters_written: int class UnsupportedOperation(IOError, ValueError): ... class IOBase: def __iter__(self) -> Iterator[bytes]: ... def __next__(self) -> bytes: ... def __enter__(self: _T) -> _T: ... def __exit__(self, exc_type: Optional[type], exc_val: Optional[Exception], exc_tb: Optional[TracebackType]) -> bool: ... def close(self) -> None: ... def fileno(self) -> int: ... def flush(self) -> None: ... def isatty(self) -> bool: ... def readable(self) -> bool: ... def readlines(self, hint: int = ...) -> List[bytes]: ... def seek(self, offset: int, whence: int = ...) -> int: ... def seekable(self) -> bool: ... def tell(self) -> int: ... def truncate(self, size: Optional[int] = ...) -> int: ... def writable(self) -> bool: ... def writelines(self, lines: Iterable[Union[bytes, bytearray]]) -> None: ... if sys.version_info >= (3, 4): def readline(self, size: int = ...) -> bytes: ... def __del__(self) -> None: ... else: def readline(self, limit: int = ...) -> bytes: ... if sys.version_info >= (3, 2): @property def closed(self) -> bool: ... else: def closed(self) -> bool: ... class RawIOBase(IOBase): def readall(self) -> bytes: ... def readinto(self, b: bytearray) -> Optional[int]: ... def write(self, b: Union[bytes, bytearray]) -> Optional[int]: ... if sys.version_info >= (3, 4): def read(self, size: int = ...) -> Optional[bytes]: ... else: def read(self, n: int = ...) -> Optional[bytes]: ... class BufferedIOBase(IOBase): def detach(self) -> RawIOBase: ... def readinto(self, b: bytearray) -> int: ... def write(self, b: Union[bytes, bytearray]) -> int: ... if sys.version_info >= (3, 5): def readinto1(self, b: bytearray) -> int: ... if sys.version_info >= (3, 4): def read(self, size: Optional[int] = ...) -> bytes: ... def read1(self, size: int = ...) -> bytes: ... else: def read(self, n: Optional[int] = ...) -> bytes: ... def read1(self, n: int = ...) -> bytes: ... class FileIO(RawIOBase): mode = ... # type: str name = ... # type: Union[int, str] if sys.version_info >= (3, 3): def __init__( self, name: Union[str, bytes, int], mode: str = ..., closefd: bool = ..., opener: Optional[Callable[[Union[int, str], str], int]] = ... ) -> None: ... else: def __init__(self, name: Union[str, bytes, int], mode: str = ..., closefd: bool = ...) -> None: ... # TODO should extend from BufferedIOBase class BytesIO(BinaryIO): def __init__(self, initial_bytes: bytes = ...) -> None: ... def getvalue(self) -> bytes: ... if sys.version_info >= (3, 2): def getbuffer(self) -> memoryview: ... # copied from IOBase def __iter__(self) -> Iterator[bytes]: ... def __next__(self) -> bytes: ... def __enter__(self) -> 'BytesIO': ... def __exit__(self, t: Optional[type] = ..., value: Optional[BaseException] = ..., traceback: Optional[Any] = ...) -> bool: ... def close(self) -> None: ... def fileno(self) -> int: ... def flush(self) -> None: ... def isatty(self) -> bool: ... def readable(self) -> bool: ... def readlines(self, hint: int = ...) -> List[bytes]: ... def seek(self, offset: int, whence: int = ...) -> int: ... def seekable(self) -> bool: ... def tell(self) -> int: ... def truncate(self, size: Optional[int] = ...) -> int: ... def writable(self) -> bool: ... # TODO should be the next line instead # def writelines(self, lines: List[Union[bytes, bytearray]]) -> None: ... def writelines(self, lines: Any) -> None: ... if sys.version_info >= (3, 4): def readline(self, size: int = ...) -> bytes: ... def __del__(self) -> None: ... else: def readline(self, limit: int = ...): ... if sys.version_info >= (3, 2): closed = ... # type: bool else: def closed(self) -> bool: ... # copied from BufferedIOBase def detach(self) -> RawIOBase: ... def readinto(self, b: bytearray) -> int: ... def write(self, b: Union[bytes, bytearray]) -> int: ... if sys.version_info >= (3, 5): def readinto1(self, b: bytearray) -> int: ... if sys.version_info >= (3, 4): def read(self, size: Optional[int] = ...) -> bytes: ... def read1(self, size: int = ...) -> bytes: ... else: def read(self, n: Optional[int] = ...) -> bytes: ... def read1(self, n: int = ...) -> bytes: ... class BufferedReader(BufferedIOBase): def __init__(self, raw: RawIOBase, buffer_size: int = ...) -> None: ... if sys.version_info >= (3, 4): def peek(self, size: int = ...) -> bytes: ... else: def peek(self, n: int = ...) -> bytes: ... class BufferedWriter(BufferedIOBase): def __init__(self, raw: RawIOBase, buffer_size: int = ...) -> None: ... def flush(self) -> None: ... def write(self, b: Union[bytes, bytearray]) -> int: ... class BufferedRandom(BufferedReader, BufferedWriter): def __init__(self, raw: RawIOBase, buffer_size: int = ...) -> None: ... def seek(self, offset: int, whence: int = ...) -> int: ... def tell(self) -> int: ... class BufferedRWPair(BufferedIOBase): def __init__(self, reader: RawIOBase, writer: RawIOBase, buffer_size: int = ...) -> None: ... class TextIOBase(IOBase): encoding = ... # type: str errors = ... # type: Optional[str] newlines = ... # type: Union[str, Tuple[str, ...], None] def __iter__(self) -> Iterator[str]: ... # type: ignore def __next__(self) -> str: ... # type: ignore def detach(self) -> IOBase: ... def write(self, s: str) -> int: ... if sys.version_info >= (3, 4): def readline(self, size: int = ...) -> str: ... # type: ignore def read(self, size: Optional[int] = ...) -> str: ... elif sys.version_info >= (3, 2): def readline(self, limit: int = ...) -> str: ... # type: ignore else: def readline(self) -> str: ... if sys.version_info >= (3, 2): def seek(self, offset: int, whence: int = ...) -> int: ... def tell(self) -> int: ... # TODO should extend from TextIOBase class TextIOWrapper(TextIO): line_buffering = ... # type: bool # TODO uncomment after fixing mypy about using write_through # if sys.version_info >= (3, 3): # def __init__(self, buffer: IO[bytes], encoding: str = ..., # errors: Optional[str] = ..., newline: Optional[str] = ..., # line_buffering: bool = ..., write_through: bool = ...) \ # -> None: ... # else: # def __init__(self, buffer: IO[bytes], # encoding: str = ..., errors: Optional[str] = ..., # newline: Optional[str] = ..., line_buffering: bool = ...) \ # -> None: ... def __init__( self, buffer: IO[bytes], encoding: str = ..., errors: Optional[str] = ..., newline: Optional[str] = ..., line_buffering: bool = ..., write_through: bool = ... ) -> None: ... # copied from IOBase def __exit__(self, t: Optional[type] = ..., value: Optional[BaseException] = ..., traceback: Optional[Any] = ...) -> bool: ... def close(self) -> None: ... def fileno(self) -> int: ... def flush(self) -> None: ... def isatty(self) -> bool: ... def readable(self) -> bool: ... def readlines(self, hint: int = ...) -> List[str]: ... def seekable(self) -> bool: ... def truncate(self, size: Optional[int] = ...) -> int: ... def writable(self) -> bool: ... # TODO should be the next line instead # def writelines(self, lines: List[str]) -> None: ... def writelines(self, lines: Any) -> None: ... if sys.version_info >= (3, 4): def __del__(self) -> None: ... if sys.version_info >= (3, 2): closed = ... # type: bool else: def closed(self) -> bool: ... # copied from TextIOBase encoding = ... # type: str errors = ... # type: Optional[str] newlines = ... # type: Union[str, Tuple[str, ...], None] def __iter__(self) -> Iterator[str]: ... def __next__(self) -> str: ... def __enter__(self) -> 'TextIO': ... def detach(self) -> IOBase: ... def write(self, s: str) -> int: ... if sys.version_info >= (3, 4): def readline(self, size: int = ...) -> str: ... def read(self, size: Optional[int] = ...) -> str: ... elif sys.version_info >= (3, 2): def readline(self, limit: int = ...) -> str: ... else: def readline(self) -> str: ... if sys.version_info >= (3, 2): def seek(self, offset: int, whence: int = ...) -> int: ... def tell(self) -> int: ... class StringIO(TextIOWrapper): def __init__(self, initial_value: str = ..., newline: Optional[str] = ...) -> None: ... name = ... # type: str def getvalue(self) -> str: ... def __enter__(self) -> 'StringIO': ... class IncrementalNewlineDecoder(codecs.IncrementalDecoder): def decode(self, input: codecs._encoded, final: bool = ...) -> codecs._decoded: ... mypy-0.560/typeshed/stdlib/3/itertools.pyi0000644€tŠÔÚ€2›s®0000000771713215007212024720 0ustar jukkaDROPBOX\Domain Users00000000000000# Stubs for itertools # Based on http://docs.python.org/3.2/library/itertools.html from typing import (Iterator, TypeVar, Iterable, overload, Any, Callable, Tuple, Generic, Optional) _T = TypeVar('_T') _S = TypeVar('_S') _N = TypeVar('_N', int, float) def count(start: _N = ..., step: _N = ...) -> Iterator[_N]: ... # more general types? def cycle(iterable: Iterable[_T]) -> Iterator[_T]: ... @overload def repeat(object: _T) -> Iterator[_T]: ... @overload def repeat(object: _T, times: int) -> Iterator[_T]: ... def accumulate(iterable: Iterable[_T], func: Callable[[_T, _T], _T] = ...) -> Iterator[_T]: ... class chain(Iterator[_T], Generic[_T]): def __init__(self, *iterables: Iterable[_T]) -> None: ... def __next__(self) -> _T: ... def __iter__(self) -> Iterator[_T]: ... @staticmethod def from_iterable(iterable: Iterable[Iterable[_S]]) -> Iterator[_S]: ... def compress(data: Iterable[_T], selectors: Iterable[Any]) -> Iterator[_T]: ... def dropwhile(predicate: Callable[[_T], Any], iterable: Iterable[_T]) -> Iterator[_T]: ... def filterfalse(predicate: Optional[Callable[[_T], Any]], iterable: Iterable[_T]) -> Iterator[_T]: ... @overload def groupby(iterable: Iterable[_T]) -> Iterator[Tuple[_T, Iterator[_T]]]: ... @overload def groupby(iterable: Iterable[_T], key: Callable[[_T], _S]) -> Iterator[Tuple[_S, Iterator[_T]]]: ... @overload def islice(iterable: Iterable[_T], stop: int) -> Iterator[_T]: ... @overload def islice(iterable: Iterable[_T], start: int, stop: Optional[int], step: int = ...) -> Iterator[_T]: ... def starmap(func: Callable[..., _S], iterable: Iterable[Iterable[Any]]) -> Iterator[_S]: ... def takewhile(predicate: Callable[[_T], Any], iterable: Iterable[_T]) -> Iterator[_T]: ... def tee(iterable: Iterable[_T], n: int = ...) -> Tuple[Iterator[_T], ...]: ... def zip_longest(*p: Iterable[Any], fillvalue: Any = ...) -> Iterator[Any]: ... _T1 = TypeVar('_T1') _T2 = TypeVar('_T2') _T3 = TypeVar('_T3') _T4 = TypeVar('_T4') _T5 = TypeVar('_T5') _T6 = TypeVar('_T6') @overload def product(iter1: Iterable[_T1], *, repeat: int = ...) -> Iterator[Tuple[_T1]]: ... @overload def product(iter1: Iterable[_T1], iter2: Iterable[_T2], *, repeat: int = ...) -> Iterator[Tuple[_T1, _T2]]: ... @overload def product(iter1: Iterable[_T1], iter2: Iterable[_T2], iter3: Iterable[_T3], *, repeat: int = ...) -> Iterator[Tuple[_T1, _T2, _T3]]: ... @overload def product(iter1: Iterable[_T1], iter2: Iterable[_T2], iter3: Iterable[_T3], iter4: Iterable[_T4], *, repeat: int = ...) -> Iterator[Tuple[_T1, _T2, _T3, _T4]]: ... @overload def product(iter1: Iterable[_T1], iter2: Iterable[_T2], iter3: Iterable[_T3], iter4: Iterable[_T4], iter5: Iterable[_T5], *, repeat: int = ...) -> Iterator[Tuple[_T1, _T2, _T3, _T4, _T5]]: ... @overload def product(iter1: Iterable[_T1], iter2: Iterable[_T2], iter3: Iterable[_T3], iter4: Iterable[_T4], iter5: Iterable[_T5], iter6: Iterable[_T6], *, repeat: int = ...) -> Iterator[Tuple[_T1, _T2, _T3, _T4, _T5, _T6]]: ... @overload def product(iter1: Iterable[Any], iter2: Iterable[Any], iter3: Iterable[Any], iter4: Iterable[Any], iter5: Iterable[Any], iter6: Iterable[Any], iter7: Iterable[Any], *iterables: Iterable, repeat: int = ...) -> Iterator[Tuple]: ... def permutations(iterable: Iterable[_T], r: Optional[int] = ...) -> Iterator[Tuple[_T, ...]]: ... def combinations(iterable: Iterable[_T], r: int) -> Iterable[Tuple[_T, ...]]: ... def combinations_with_replacement(iterable: Iterable[_T], r: int) -> Iterable[Tuple[_T, ...]]: ... mypy-0.560/typeshed/stdlib/3/json/0000755€tŠÔÚ€2›s®0000000000013215007244023113 5ustar jukkaDROPBOX\Domain Users00000000000000mypy-0.560/typeshed/stdlib/3/json/__init__.pyi0000644€tŠÔÚ€2›s®0000000342513215007212025374 0ustar jukkaDROPBOX\Domain Users00000000000000import sys from typing import Any, IO, Optional, Tuple, Callable, Dict, List, Union from .decoder import JSONDecoder as JSONDecoder from .encoder import JSONEncoder as JSONEncoder if sys.version_info >= (3, 5): from .decoder import JSONDecodeError as JSONDecodeError def dumps(obj: Any, skipkeys: bool = ..., ensure_ascii: bool = ..., check_circular: bool = ..., allow_nan: bool = ..., cls: Any = ..., indent: Union[None, int, str] = ..., separators: Optional[Tuple[str, str]] = ..., default: Optional[Callable[[Any], Any]] = ..., sort_keys: bool = ..., **kwds: Any) -> str: ... def dump(obj: Any, fp: IO[str], skipkeys: bool = ..., ensure_ascii: bool = ..., check_circular: bool = ..., allow_nan: bool = ..., cls: Any = ..., indent: Union[None, int, str] = ..., separators: Optional[Tuple[str, str]] = ..., default: Optional[Callable[[Any], Any]] = ..., sort_keys: bool = ..., **kwds: Any) -> None: ... def loads(s: Union[str, bytes, bytearray], encoding: Any = ..., # ignored and deprecated cls: Any = ..., object_hook: Optional[Callable[[Dict], Any]] = ..., parse_float: Optional[Callable[[str], Any]] = ..., parse_int: Optional[Callable[[str], Any]] = ..., parse_constant: Optional[Callable[[str], Any]] = ..., object_pairs_hook: Optional[Callable[[List[Tuple[Any, Any]]], Any]] = ..., **kwds: Any) -> Any: ... def load(fp: IO[str], cls: Any = ..., object_hook: Optional[Callable[[Dict], Any]] = ..., parse_float: Optional[Callable[[str], Any]] = ..., parse_int: Optional[Callable[[str], Any]] = ..., parse_constant: Optional[Callable[[str], Any]] = ..., object_pairs_hook: Optional[Callable[[List[Tuple[Any, Any]]], Any]] = ..., **kwds: Any) -> Any: ... mypy-0.560/typeshed/stdlib/3/json/decoder.pyi0000644€tŠÔÚ€2›s®0000000221713215007212025240 0ustar jukkaDROPBOX\Domain Users00000000000000import sys from typing import Any, Callable, Dict, List, Optional, Tuple if sys.version_info >= (3, 5): class JSONDecodeError(ValueError): msg: str doc: str pos: int lineno: int colno: int def __init__(self, msg: str, doc: str, pos: int) -> None: ... class JSONDecoder: object_hook = ... # type: Callable[[Dict[str, Any]], Any] parse_float = ... # type: Callable[[str], Any] parse_int = ... # type: Callable[[str], Any] parse_constant = ... # Callable[[str], Any] strict = ... # type: bool object_pairs_hook = ... # type: Callable[[List[Tuple[str, Any]]], Any] def __init__(self, object_hook: Optional[Callable[[Dict[str, Any]], Any]] = ..., parse_float: Optional[Callable[[str], Any]] = ..., parse_int: Optional[Callable[[str], Any]] = ..., parse_constant: Optional[Callable[[str], Any]] = ..., strict: bool = ..., object_pairs_hook: Optional[Callable[[List[Tuple[str, Any]]], Any]] = ...) -> None: ... def decode(self, s: str) -> Any: ... def raw_decode(self, s: str, idx: int = ...) -> Tuple[Any, int]: ... mypy-0.560/typeshed/stdlib/3/json/encoder.pyi0000644€tŠÔÚ€2›s®0000000146113215007212025252 0ustar jukkaDROPBOX\Domain Users00000000000000from typing import Any, Callable, Iterator, Optional, Tuple class JSONEncoder: item_separator = ... # type: str key_separator = ... # type: str skipkeys = ... # type: bool ensure_ascii = ... # type: bool check_circular = ... # type: bool allow_nan = ... # type: bool sort_keys = ... # type: bool indent = ... # type: int def __init__(self, skipkeys: bool = ..., ensure_ascii: bool = ..., check_circular: bool = ..., allow_nan: bool = ..., sort_keys: bool = ..., indent: Optional[int] = ..., separators: Optional[Tuple[str, str]] = ..., default: Optional[Callable] = ...) -> None: ... def default(self, o: Any) -> Any: ... def encode(self, o: Any) -> str: ... def iterencode(self, o: Any, _one_shot: bool = ...) -> Iterator[str]: ... mypy-0.560/typeshed/stdlib/3/macpath.pyi0000644€tŠÔÚ€2›s®0000000321213215007212024273 0ustar jukkaDROPBOX\Domain Users00000000000000# Stubs for os.path # Ron Murawski # based on http://docs.python.org/3.2/library/os.path.html from typing import Any, List, Tuple, IO # ----- os.path variables ----- supports_unicode_filenames = False # ----- os.path function stubs ----- def abspath(path: str) -> str: ... def basename(path) -> str: ... def commonprefix(list: List[str]) -> str: ... def dirname(path: str) -> str: ... def exists(path: str) -> bool: ... def lexists(path: str) -> bool: ... def expanduser(path: str) -> str: ... def expandvars(path: str) -> str: ... def getatime(path: str) -> int: ... # return float if os.stat_float_times() returns True def getmtime(path: str) -> int: ... # return float if os.stat_float_times() returns True def getctime(path: str) -> int: ... # return float if os.stat_float_times() returns True def getsize(path: str) -> int: ... def isabs(path: str) -> bool: ... def isfile(path: str) -> bool: ... def isdir(path: str) -> bool: ... def islink(path: str) -> bool: ... def ismount(path: str) -> bool: ... def join(path: str, *paths: str) -> str: ... def normcase(path: str) -> str: ... def normpath(path: str) -> str: ... def realpath(path: str) -> str: ... def relpath(path: str, start: str = ...) -> str: ... def samefile(path1: str, path2: str) -> bool: ... def sameopenfile(fp1: IO[Any], fp2: IO[Any]) -> bool: ... # def samestat(stat1: stat_result, stat2: stat_result) -> bool: # ... # Unix only def split(path: str) -> Tuple[str, str]: ... def splitdrive(path: str) -> Tuple[str, str]: ... def splitext(path: str) -> Tuple[str, str]: ... # def splitunc(path: str) -> Tuple[str, str] : ... # Windows only, deprecated mypy-0.560/typeshed/stdlib/3/msvcrt.pyi0000644€tŠÔÚ€2›s®0000000030313215007212024172 0ustar jukkaDROPBOX\Domain Users00000000000000# Stubs for msvcrt # NOTE: These are incomplete! from typing import overload, BinaryIO, TextIO def get_osfhandle(file: int) -> int: ... def open_osfhandle(handle: int, flags: int) -> int: ... mypy-0.560/typeshed/stdlib/3/multiprocessing/0000755€tŠÔÚ€2›s®0000000000013215007244025371 5ustar jukkaDROPBOX\Domain Users00000000000000mypy-0.560/typeshed/stdlib/3/multiprocessing/__init__.pyi0000644€tŠÔÚ€2›s®0000000733713215007212027660 0ustar jukkaDROPBOX\Domain Users00000000000000# Stubs for multiprocessing from typing import ( Any, Callable, ContextManager, Iterable, Mapping, Optional, Dict, List, Union, TypeVar, Sequence, Tuple ) from logging import Logger from multiprocessing import connection, pool, synchronize from multiprocessing.context import ( BaseContext, ProcessError, BufferTooShort, TimeoutError, AuthenticationError) from multiprocessing.managers import SyncManager from multiprocessing.process import current_process as current_process import queue import sys _T = TypeVar('_T') # N.B. The functions below are generated at runtime by partially applying # multiprocessing.context.BaseContext's methods, so the two signatures should # be identical (modulo self). # Sychronization primitives _LockLike = Union[synchronize.Lock, synchronize.RLock] def Barrier(parties: int, action: Optional[Callable] = ..., timeout: Optional[float] = ...) -> synchronize.Barrier: ... def BoundedSemaphore(value: int = ...) -> synchronize.BoundedSemaphore: ... def Condition(lock: Optional[_LockLike] = ...) -> synchronize.Condition: ... def Event(lock: Optional[_LockLike] = ...) -> synchronize.Event: ... def Lock() -> synchronize.Lock: ... def RLock() -> synchronize.RLock: ... def Semaphore(value: int = ...) -> synchronize.Semaphore: ... def Pipe(duplex: bool = ...) -> Tuple[connection.Connection, connection.Connection]: ... def Pool(processes: Optional[int] = ..., initializer: Optional[Callable[..., Any]] = ..., initargs: Iterable[Any] = ..., maxtasksperchild: Optional[int] = ...) -> pool.Pool: ... class Process(): name: str daemon: bool pid: Optional[int] exitcode: Optional[int] authkey: bytes sentinel: int # TODO: set type of group to None def __init__(self, group: Any = ..., target: Optional[Callable] = ..., name: Optional[str] = ..., args: Iterable[Any] = ..., kwargs: Mapping[Any, Any] = ..., *, daemon: Optional[bool] = ...) -> None: ... def start(self) -> None: ... def run(self) -> None: ... def terminate(self) -> None: ... def is_alive(self) -> bool: ... def join(self, timeout: Optional[float] = ...) -> None: ... class Queue(queue.Queue[_T]): def __init__(self, maxsize: int = ...) -> None: ... def get(self, block: bool = ..., timeout: Optional[float] = ...) -> _T: ... def put(self, item: _T, block: bool = ..., timeout: Optional[float] = ...) -> None: ... def qsize(self) -> int: ... def empty(self) -> bool: ... def full(self) -> bool: ... def put_nowait(self, item: _T) -> None: ... def get_nowait(self) -> _T: ... def close(self) -> None: ... def join_thread(self) -> None: ... def cancel_join_thread(self) -> None: ... class Value(): value: Any = ... def __init__(self, typecode_or_type: str, *args: Any, lock: bool = ...) -> None: ... # ----- multiprocessing function stubs ----- def active_children() -> List[Process]: ... def allow_connection_pickling() -> None: ... def cpu_count() -> int: ... def freeze_support() -> None: ... def get_logger() -> Logger: ... def log_to_stderr(level: Optional[Union[str, int]] = ...) -> Logger: ... def Manager() -> SyncManager: ... def set_forkserver_preload(module_names: List[str]) -> None: ... if sys.platform == 'win32' or sys.version_info >= (3, 4): def set_executable(executable: str) -> None: ... if sys.version_info >= (3, 4): def get_all_start_methods() -> List[str]: ... def get_context(method: Optional[str] = ...) -> BaseContext: ... def get_start_method(allow_none: Optional[bool]) -> Optional[str]: ... def set_start_method(method: str, force: Optional[bool] = ...) -> None: ... mypy-0.560/typeshed/stdlib/3/multiprocessing/connection.pyi0000644€tŠÔÚ€2›s®0000000346413215007212030255 0ustar jukkaDROPBOX\Domain Users00000000000000from typing import Any, Iterable, List, Optional, Tuple, Type, Union import socket import sys import types # https://docs.python.org/3/library/multiprocessing.html#address-formats _Address = Union[str, Tuple[str, int]] def deliver_challenge(connection: Connection, authkey: bytes) -> None: ... def answer_challenge(connection: Connection, authkey: bytes) -> None: ... if sys.version_info >= (3, 3): def wait(object_list: Iterable[Union[Connection, socket.socket, int]], timeout: Optional[float] = ...) -> List[Union[Connection, socket.socket, int]]: ... def Client(address: _Address, family: Optional[str] = ..., authkey: Optional[bytes] = ...) -> Connection: ... def Pipe(duplex: bool = ...) -> Tuple[Connection, Connection]: ... class Listener: def __init__(self, address: Optional[_Address] = ..., family: Optional[str] = ..., backlog: int = ..., authkey: Optional[bytes] = ...) -> None: ... def accept(self) -> Connection: ... def close(self) -> None: ... @property def address(self) -> _Address: ... @property def last_accepted(self) -> Optional[_Address]: ... if sys.version_info >= (3, 3): def __enter__(self) -> Listener: ... def __exit__(self, exc_type: Optional[Type[BaseException]], exc_value: Optional[BaseException], exc_tb: Optional[types.TracebackType]) -> None: ... class Connection: def close(self) -> None: ... def fileno(self) -> int: ... def poll(self, timeout: float = ...) -> bool: ... def recv(self) -> Any: ... def recv_bytes(self, maxlength: Optional[int] = ...) -> bytes: ... def recv_bytes_into(self, buf: Any, offset: int = ...) -> int: ... def send(self, obj: Any) -> None: ... def send_bytes(self, buf: bytes, offset: int = ..., size: Optional[int] = ...) -> None: ... mypy-0.560/typeshed/stdlib/3/multiprocessing/context.pyi0000644€tŠÔÚ€2›s®0000001645113215007212027602 0ustar jukkaDROPBOX\Domain Users00000000000000# Stubs for multiprocessing.context from logging import Logger import multiprocessing from multiprocessing import synchronize import sys from typing import ( Any, Callable, Iterable, Optional, List, Mapping, Sequence, Tuple, Type, Union, ) _LockLike = Union[synchronize.Lock, synchronize.RLock] class ProcessError(Exception): ... class BufferTooShort(ProcessError): ... class TimeoutError(ProcessError): ... class AuthenticationError(ProcessError): ... class BaseContext(object): ProcessError = ... # type: Type[Exception] BufferTooShort = ... # type: Type[Exception] TimeoutError = ... # type: Type[Exception] AuthenticationError = ... # type: Type[Exception] # N.B. The methods below are applied at runtime to generate # multiprocessing.*, so the signatures should be identical (modulo self). @staticmethod def current_process() -> multiprocessing.Process: ... @staticmethod def active_children() -> List[multiprocessing.Process]: ... def cpu_count(self) -> int: ... # TODO: change return to SyncManager once a stub exists in multiprocessing.managers def Manager(self) -> Any: ... # TODO: change return to Pipe once a stub exists in multiprocessing.connection def Pipe(self, duplex: bool) -> Any: ... def Barrier(self, parties: int, action: Optional[Callable] = ..., timeout: Optional[float] = ...) -> synchronize.Barrier: ... def BoundedSemaphore(self, value: int = ...) -> synchronize.BoundedSemaphore: ... def Condition(self, lock: Optional[_LockLike] = ...) -> synchronize.Condition: ... def Event(self, lock: Optional[_LockLike] = ...) -> synchronize.Event: ... def Lock(self) -> synchronize.Lock: ... def RLock(self) -> synchronize.RLock: ... def Semaphore(self, value: int = ...) -> synchronize.Semaphore: ... # TODO: change return to Queue once a stub exists in multiprocessing.queues def Queue(self, maxsize: int = ...) -> Any: ... # TODO: change return to Queue once a stub exists in multiprocessing.queues def JoinableQueue(self, maxsize: int = ...) -> Any: ... # TODO: change return to SimpleQueue once a stub exists in multiprocessing.queues def SimpleQueue(self) -> Any: ... def Pool( self, processes: Optional[int] = ..., initializer: Optional[Callable[..., Any]] = ..., initargs: Iterable[Any] = ..., maxtasksperchild: Optional[int] = ... ) -> multiprocessing.pool.Pool: ... def Process( self, group: Any = ..., target: Optional[Callable] = ..., name: Optional[str] = ..., args: Iterable[Any] = ..., kwargs: Mapping[Any, Any] = ..., *, daemon: Optional[bool] = ... ) -> multiprocessing.Process: ... # TODO: typecode_or_type param is a ctype with a base class of _SimpleCData or array.typecode Need to figure out # how to handle the ctype # TODO: change return to RawValue once a stub exists in multiprocessing.sharedctypes def RawValue(self, typecode_or_type: Any, *args: Any) -> Any: ... # TODO: typecode_or_type param is a ctype with a base class of _SimpleCData or array.typecode Need to figure out # how to handle the ctype # TODO: change return to RawArray once a stub exists in multiprocessing.sharedctypes def RawArray(self, typecode_or_type: Any, size_or_initializer: Union[int, Sequence[Any]]) -> Any: ... # TODO: typecode_or_type param is a ctype with a base class of _SimpleCData or array.typecode Need to figure out # how to handle the ctype # TODO: change return to Value once a stub exists in multiprocessing.sharedctypes def Value( self, typecode_or_type: Any, *args: Any, lock: bool = ... ) -> Any: ... # TODO: typecode_or_type param is a ctype with a base class of _SimpleCData or array.typecode Need to figure out # how to handle the ctype # TODO: change return to Array once a stub exists in multiprocessing.sharedctypes def Array( self, typecode_or_type: Any, size_or_initializer: Union[int, Sequence[Any]], *, lock: bool = ... ) -> Any: ... def freeze_support(self) -> None: ... def get_logger(self) -> Logger: ... def log_to_stderr(self, level: Optional[str] = ...) -> Logger: ... def allow_connection_pickling(self) -> None: ... def set_executable(self, executable: str) -> None: ... def set_forkserver_preload(self, module_names: List[str]) -> None: ... def get_context(self, method: Optional[str] = ...) -> BaseContext: ... def get_start_method(self, allow_none: bool = ...) -> str: ... def set_start_method(self, method: Optional[str] = ...) -> None: ... @property def reducer(self) -> str: ... @reducer.setter def reducer(self, reduction: str) -> None: ... def _check_available(self) -> None: ... class Process(object): _start_method: Optional[str] @staticmethod # TODO: type should be BaseProcess once a stub in multiprocessing.process exists def _Popen(process_obj: Any) -> DefaultContext: ... class DefaultContext(object): Process = ... # type: Type[multiprocessing.Process] def __init__(self, context: BaseContext) -> None: ... def get_context(self, method: Optional[str] = ...) -> BaseContext: ... def set_start_method(self, method: str, force: bool = ...) -> None: ... def get_start_method(self, allow_none: bool = ...) -> str: ... def get_all_start_methods(self) -> List[str]: ... if sys.platform != 'win32': # TODO: type should be BaseProcess once a stub in multiprocessing.process exists class ForkProcess(Any): # type: ignore _start_method: str @staticmethod def _Popen(process_obj: Any) -> Any: ... # TODO: type should be BaseProcess once a stub in multiprocessing.process exists class SpawnProcess(Any): # type: ignore _start_method: str @staticmethod def _Popen(process_obj: Any) -> SpawnProcess: ... # TODO: type should be BaseProcess once a stub in multiprocessing.process exists class ForkServerProcess(Any): # type: ignore _start_method: str @staticmethod def _Popen(process_obj: Any) -> Any: ... class ForkContext(BaseContext): _name: str Process = ... # type: Type[ForkProcess] class SpawnContext(BaseContext): _name: str Process = ... # type: Type[SpawnProcess] class ForkServerContext(BaseContext): _name: str Process = ... # type: Type[ForkServerProcess] else: # TODO: type should be BaseProcess once a stub in multiprocessing.process exists class SpawnProcess(Any): # type: ignore _start_method: str @staticmethod # TODO: type should be BaseProcess once a stub in multiprocessing.process exists def _Popen(process_obj: Process) -> Any: ... class SpawnContext(BaseContext): _name: str Process = ... # type: Type[SpawnProcess] def _force_start_method(method: str) -> None: ... # TODO: type should be BaseProcess once a stub in multiprocessing.process exists def get_spawning_popen() -> Optional[Any]: ... # TODO: type should be BaseProcess once a stub in multiprocessing.process exists def set_spawning_popen(popen: Any) -> None: ... def assert_spawning(obj: Any) -> None: ... mypy-0.560/typeshed/stdlib/3/multiprocessing/managers.pyi0000644€tŠÔÚ€2›s®0000000260713215007212027711 0ustar jukkaDROPBOX\Domain Users00000000000000# Stubs for multiprocessing.managers # NOTE: These are incomplete! import queue import threading from typing import ( Any, Callable, ContextManager, Dict, Iterable, List, Mapping, Optional, Sequence, TypeVar, ) _T = TypeVar('_T') _KT = TypeVar('_KT') _VT = TypeVar('_VT') class Namespace: ... _Namespace = Namespace class BaseManager(ContextManager[BaseManager]): def register(self, typeid: str, callable: Any = ...) -> None: ... def shutdown(self) -> None: ... def start(self, initializer: Optional[Callable[..., Any]] = ..., initargs: Iterable[Any] = ...) -> None: ... class SyncManager(BaseManager): def BoundedSemaphore(self, value: Any = ...) -> threading.BoundedSemaphore: ... def Condition(self, lock: Any = ...) -> threading.Condition: ... def Event(self) -> threading.Event: ... def Lock(self) -> threading.Lock: ... def Namespace(self) -> _Namespace: ... def Queue(self, maxsize: int = ...) -> queue.Queue: ... def RLock(self) -> threading.RLock: ... def Semaphore(self, value: Any = ...) -> threading.Semaphore: ... def Array(self, typecode: Any, sequence: Sequence[_T]) -> Sequence[_T]: ... def Value(self, typecode: Any, value: _T) -> _T: ... def dict(self, sequence: Mapping[_KT, _VT] = ...) -> Dict[_KT, _VT]: ... def list(self, sequence: Sequence[_T] = ...) -> List[_T]: ... class RemoteError(Exception): ... mypy-0.560/typeshed/stdlib/3/multiprocessing/pool.pyi0000644€tŠÔÚ€2›s®0000000566313215007212027072 0ustar jukkaDROPBOX\Domain Users00000000000000# Stubs for multiprocessing.pool # NOTE: These are incomplete! from typing import ( Any, Callable, ContextManager, Iterable, Mapping, Optional, Dict, List, TypeVar, ) _T = TypeVar('_T', bound='Pool') class AsyncResult(): def get(self, timeout: float = ...) -> Any: ... def wait(self, timeout: float = ...) -> None: ... def ready(self) -> bool: ... def successful(self) -> bool: ... class Pool(ContextManager[Pool]): def __init__(self, processes: Optional[int] = ..., initializer: Optional[Callable[..., None]] = ..., initargs: Iterable[Any] = ..., maxtasksperchild: Optional[int] = ..., context: Optional[Any] = ...) -> None: ... def apply(self, func: Callable[..., Any], args: Iterable[Any] = ..., kwds: Dict[str, Any] = ...) -> Any: ... def apply_async(self, func: Callable[..., Any], args: Iterable[Any] = ..., kwds: Dict[str, Any] = ..., callback: Optional[Callable[..., None]] = ..., error_callback: Optional[Callable[[BaseException], None]] = ...) -> AsyncResult: ... def map(self, func: Callable[..., Any], iterable: Iterable[Any] = ..., chunksize: Optional[int] = ...) -> List[Any]: ... def map_async(self, func: Callable[..., Any], iterable: Iterable[Any] = ..., chunksize: Optional[int] = ..., callback: Optional[Callable[..., None]] = ..., error_callback: Optional[Callable[[BaseException], None]] = ...) -> AsyncResult: ... def imap(self, func: Callable[..., Any], iterable: Iterable[Any] = ..., chunksize: Optional[int] = ...) -> Iterable[Any]: ... def imap_unordered(self, func: Callable[..., Any], iterable: Iterable[Any] = ..., chunksize: Optional[int] = ...) -> Iterable[Any]: ... def starmap(self, func: Callable[..., Any], iterable: Iterable[Iterable[Any]] = ..., chunksize: Optional[int] = ...) -> List[Any]: ... def starmap_async(self, func: Callable[..., Any], iterable: Iterable[Iterable[Any]] = ..., chunksize: Optional[int] = ..., callback: Optional[Callable[..., None]] = ..., error_callback: Optional[Callable[[BaseException], None]] = ...) -> AsyncResult: ... def close(self) -> None: ... def terminate(self) -> None: ... def join(self) -> None: ... def __enter__(self: _T) -> _T: ... class ThreadPool(Pool, ContextManager[ThreadPool]): def __init__(self, processes: Optional[int] = ..., initializer: Optional[Callable[..., Any]] = ..., initargs: Iterable[Any] = ...) -> None: ... mypy-0.560/typeshed/stdlib/3/multiprocessing/process.pyi0000644€tŠÔÚ€2›s®0000000021713215007212027565 0ustar jukkaDROPBOX\Domain Users00000000000000from typing import List from multiprocessing import Process def current_process() -> Process: ... def active_children() -> List[Process]: ... mypy-0.560/typeshed/stdlib/3/multiprocessing/synchronize.pyi0000644€tŠÔÚ€2›s®0000000407013215007212030463 0ustar jukkaDROPBOX\Domain Users00000000000000from typing import Callable, ContextManager, Optional, Union from multiprocessing.context import BaseContext import threading import sys _LockLike = Union[Lock, RLock] class Barrier(threading.Barrier): def __init__(self, parties: int, action: Optional[Callable] = ..., timeout: Optional[float] = ..., * ctx: BaseContext) -> None: ... class BoundedSemaphore(Semaphore): def __init__(self, value: int = ..., *, ctx: BaseContext) -> None: ... class Condition(ContextManager[bool]): def __init__(self, lock: Optional[_LockLike] = ..., *, ctx: BaseContext) -> None: ... if sys.version_info >= (3, 7): def notify(self, n: int = ...) -> None: ... else: def notify(self) -> None: ... def notify_all(self) -> None: ... def wait(self, timeout: Optional[float] = ...) -> bool: ... if sys.version_info >= (3, 3): def wait_for(self, predicate: Callable[[], bool], timeout: Optional[float] = ...) -> bool: ... def acquire(self, block: bool = ..., timeout: Optional[float] = ...) -> bool: ... def release(self) -> None: ... class Event(ContextManager[bool]): def __init__(self, lock: Optional[_LockLike] = ..., *, ctx: BaseContext) -> None: ... def is_set(self) -> bool: ... def set(self) -> None: ... def clear(self) -> None: ... def wait(self, timeout: Optional[float] = ...) -> bool: ... class Lock(SemLock): def __init__(self, *, ctx: BaseContext) -> None: ... class RLock(SemLock): def __init__(self, *, ctx: BaseContext) -> None: ... class Semaphore(SemLock): def __init__(self, value: int = ..., *, ctx: BaseContext) -> None: ... # Not part of public API class SemLock(ContextManager[bool]): def acquire(self, block: bool = ..., timeout: Optional[float] = ...) -> bool: ... def release(self) -> None: ... mypy-0.560/typeshed/stdlib/3/nntplib.pyi0000644€tŠÔÚ€2›s®0000001037313215007212024332 0ustar jukkaDROPBOX\Domain Users00000000000000# Stubs for nntplib (Python 3) import datetime import socket import ssl import sys from typing import Any, Dict, IO, Iterable, List, NamedTuple, Optional, Tuple, TypeVar, Union _SelfT = TypeVar('_SelfT', bound=_NNTPBase) _File = Union[IO[bytes], bytes, str, None] class NNTPError(Exception): response: str class NNTPReplyError(NNTPError): ... class NNTPTemporaryError(NNTPError): ... class NNTPPermanentError(NNTPError): ... class NNTPProtocolError(NNTPError): ... class NNTPDataError(NNTPError): ... NNTP_PORT: int NNTP_SSL_PORT: int GroupInfo = NamedTuple('GroupInfo', [ ('group', str), ('last', str), ('first', str), ('flag', str), ]) ArticleInfo = NamedTuple('ArticleInfo', [ ('number', int), ('message_id', str), ('lines', List[bytes]), ]) def decode_header(header_str: str) -> str: ... class _NNTPBase: encoding: str errors: str host: str file: IO[bytes] debugging: int welcome: str readermode_afterauth: bool tls_on: bool authenticated: bool nntp_implementation: str nntp_version: int def __init__(self, file: IO[bytes], host: str, readermode: Optional[bool] = ..., timeout: float = ...) -> None: ... if sys.version_info >= (3, 3): def __enter__(self: _SelfT) -> _SelfT: ... def __exit__(self, *args: Any) -> None: ... def getwelcome(self) -> str: ... def getcapabilities(self) -> Dict[str, List[str]]: ... def set_debuglevel(self, level: int) -> None: ... def debug(self, level: int) -> None: ... def capabilities(self) -> Tuple[str, Dict[str, List[str]]]: ... def newgroups(self, date: Union[datetime.date, datetime.datetime], *, file: _File = ...) -> Tuple[str, List[str]]: ... def newnews(self, group: str, date: Union[datetime.date, datetime.datetime], *, file: _File = ...) -> Tuple[str, List[str]]: ... def list(self, group_pattern: Optional[str] = ..., *, file: _File = ...) -> Tuple[str, List[str]]: ... def description(self, group: str) -> str: ... def descriptions(self, group_pattern: str) -> Tuple[str, Dict[str, str]]: ... def group(self, name: str) -> Tuple[str, int, int, int, str]: ... def help(self, *, file: _File = ...) -> Tuple[str, List[str]]: ... def stat(self, message_spec: Any = ...) -> Tuple[str, int, str]: ... def next(self) -> Tuple[str, int, str]: ... def last(self) -> Tuple[str, int, str]: ... def head(self, message_spec: Any = ..., *, file: _File = ...) -> Tuple[str, ArticleInfo]: ... def body(self, message_spec: Any = ..., *, file: _File = ...) -> Tuple[str, ArticleInfo]: ... def article(self, message_spec: Any = ..., *, file: _File = ...) -> Tuple[str, ArticleInfo]: ... def slave(self) -> str: ... def xhdr(self, hdr: str, str: Any, *, file: _File = ...) -> Tuple[str, List[str]]: ... def xover(self, start: int, end: int, *, file: _File = ...) -> Tuple[str, List[Tuple[int, Dict[str, str]]]]: ... def over(self, message_spec: Union[None, str, List[Any], Tuple[Any, ...]], *, file: _File = ...) -> Tuple[str, List[Tuple[int, Dict[str, str]]]]: ... def xgtitle(self, group: str, *, file: _File = ...) -> Tuple[str, List[Tuple[str, str]]]: ... def xpath(self, id: Any) -> Tuple[str, str]: ... def date(self) -> Tuple[str, datetime.datetime]: ... def post(self, data: Union[bytes, Iterable[bytes]]) -> str: ... def ihave(self, message_id: Any, data: Union[bytes, Iterable[bytes]]) -> str: ... def quit(self) -> str: ... def login(self, user: Optional[str] = ..., password: Optional[str] = ..., usenetrc: bool = ...) -> None: ... def starttls(self, ssl_context: Optional[ssl.SSLContext] = ...) -> None: ... class NNTP(_NNTPBase): port: int sock: socket.socket def __init__(self, host: str, port: int = ..., user: Optional[str] = ..., password: Optional[str] = ..., readermode: Optional[bool] = ..., usenetrc: bool = ..., timeout: float = ...) -> None: ... class NNTP_SSL(_NNTPBase): sock: socket.socket def __init__(self, host: str, port: int = ..., user: Optional[str] = ..., password: Optional[str] = ..., ssl_context: Optional[ssl.SSLContext] = ..., readermode: Optional[bool] = ..., usenetrc: bool = ..., timeout: float = ...) -> None: ... mypy-0.560/typeshed/stdlib/3/ntpath.pyi0000644€tŠÔÚ€2›s®0000000321213215007212024154 0ustar jukkaDROPBOX\Domain Users00000000000000# Stubs for os.path # Ron Murawski # based on http://docs.python.org/3.2/library/os.path.html from typing import Any, List, Tuple, IO # ----- os.path variables ----- supports_unicode_filenames = False # ----- os.path function stubs ----- def abspath(path: str) -> str: ... def basename(path) -> str: ... def commonprefix(list: List[str]) -> str: ... def dirname(path: str) -> str: ... def exists(path: str) -> bool: ... def lexists(path: str) -> bool: ... def expanduser(path: str) -> str: ... def expandvars(path: str) -> str: ... def getatime(path: str) -> int: ... # return float if os.stat_float_times() returns True def getmtime(path: str) -> int: ... # return float if os.stat_float_times() returns True def getctime(path: str) -> int: ... # return float if os.stat_float_times() returns True def getsize(path: str) -> int: ... def isabs(path: str) -> bool: ... def isfile(path: str) -> bool: ... def isdir(path: str) -> bool: ... def islink(path: str) -> bool: ... def ismount(path: str) -> bool: ... def join(path: str, *paths: str) -> str: ... def normcase(path: str) -> str: ... def normpath(path: str) -> str: ... def realpath(path: str) -> str: ... def relpath(path: str, start: str = ...) -> str: ... def samefile(path1: str, path2: str) -> bool: ... def sameopenfile(fp1: IO[Any], fp2: IO[Any]) -> bool: ... # def samestat(stat1: stat_result, stat2: stat_result) -> bool: # ... # Unix only def split(path: str) -> Tuple[str, str]: ... def splitdrive(path: str) -> Tuple[str, str]: ... def splitext(path: str) -> Tuple[str, str]: ... # def splitunc(path: str) -> Tuple[str, str] : ... # Windows only, deprecated mypy-0.560/typeshed/stdlib/3/nturl2path.pyi0000644€tŠÔÚ€2›s®0000000011413215007212024757 0ustar jukkaDROPBOX\Domain Users00000000000000def url2pathname(url: str) -> str: ... def pathname2url(p: str) -> str: ... mypy-0.560/typeshed/stdlib/3/os/0000755€tŠÔÚ€2›s®0000000000013215007244022563 5ustar jukkaDROPBOX\Domain Users00000000000000mypy-0.560/typeshed/stdlib/3/os/__init__.pyi0000644€tŠÔÚ€2›s®0000006475313215007212025057 0ustar jukkaDROPBOX\Domain Users00000000000000# Stubs for os # Ron Murawski from builtins import OSError as error from io import TextIOWrapper as _TextIOWrapper import sys from typing import ( Mapping, MutableMapping, Dict, List, Any, Tuple, IO, Iterable, Iterator, overload, Union, AnyStr, Optional, Generic, Set, Callable, Text, Sequence, NamedTuple, TypeVar ) from . import path as path from mypy_extensions import NoReturn _T = TypeVar('_T') # ----- os variables ----- if sys.version_info >= (3, 2): supports_bytes_environ: bool if sys.version_info >= (3, 3): supports_dir_fd: Set[Callable[..., Any]] supports_fd: Set[Callable[..., Any]] supports_effective_ids: Set[Callable[..., Any]] supports_follow_symlinks: Set[Callable[..., Any]] PRIO_PROCESS: int # Unix only PRIO_PGRP: int # Unix only PRIO_USER: int # Unix only F_LOCK: int # Unix only F_TLOCK: int # Unix only F_ULOCK: int # Unix only F_TEST: int # Unix only POSIX_FADV_NORMAL: int # Unix only POSIX_FADV_SEQUENTIAL: int # Unix only POSIX_FADV_RANDOM: int # Unix only POSIX_FADV_NOREUSE: int # Unix only POSIX_FADV_WILLNEED: int # Unix only POSIX_FADV_DONTNEED: int # Unix only SF_NODISKIO: int # Unix only SF_MNOWAIT: int # Unix only SF_SYNC: int # Unix only XATTR_SIZE_MAX: int # Linux only XATTR_CREATE: int # Linux only XATTR_REPLACE: int # Linux only P_PID: int # Unix only P_PGID: int # Unix only P_ALL: int # Unix only WEXITED: int # Unix only WSTOPPED: int # Unix only WNOWAIT: int # Unix only CLD_EXITED: int # Unix only CLD_DUMPED: int # Unix only CLD_TRAPPED: int # Unix only CLD_CONTINUED: int # Unix only SCHED_OTHER: int # some flavors of Unix SCHED_BATCH: int # some flavors of Unix SCHED_IDLE: int # some flavors of Unix SCHED_SPORADIC: int # some flavors of Unix SCHED_FIFO: int # some flavors of Unix SCHED_RR: int # some flavors of Unix SCHED_RESET_ON_FORK: int # some flavors of Unix RTLD_LAZY: int RTLD_NOW: int RTLD_GLOBAL: int RTLD_LOCAL: int RTLD_NODELETE: int RTLD_NOLOAD: int RTLD_DEEPBIND: int SEEK_SET: int SEEK_CUR: int SEEK_END: int if sys.version_info >= (3, 3): SEEK_DATA: int # some flavors of Unix SEEK_HOLE: int # some flavors of Unix O_RDONLY: int O_WRONLY: int O_RDWR: int O_APPEND: int O_CREAT: int O_EXCL: int O_TRUNC: int O_DSYNC: int # Unix only O_RSYNC: int # Unix only O_SYNC: int # Unix only O_NDELAY: int # Unix only O_NONBLOCK: int # Unix only O_NOCTTY: int # Unix only if sys.version_info >= (3, 3): O_CLOEXEC: int # Unix only O_SHLOCK: int # Unix only O_EXLOCK: int # Unix only O_BINARY: int # Windows only O_NOINHERIT: int # Windows only O_SHORT_LIVED: int # Windows only O_TEMPORARY: int # Windows only O_RANDOM: int # Windows only O_SEQUENTIAL: int # Windows only O_TEXT: int # Windows only O_ASYNC: int # Gnu extension if in C library O_DIRECT: int # Gnu extension if in C library O_DIRECTORY: int # Gnu extension if in C library O_NOFOLLOW: int # Gnu extension if in C library O_NOATIME: int # Gnu extension if in C library if sys.version_info >= (3, 4): O_PATH: int # Gnu extension if in C library O_TMPFILE: int # Gnu extension if in C library O_LARGEFILE: int # Gnu extension if in C library curdir: str pardir: str sep: str altsep: str extsep: str pathsep: str defpath: str linesep: str devnull: str name: str F_OK: int R_OK: int W_OK: int X_OK: int class _Environ(MutableMapping[AnyStr, AnyStr], Generic[AnyStr]): def copy(self) -> Dict[AnyStr, AnyStr]: ... environ: _Environ[str] if sys.version_info >= (3, 2): environb: _Environ[bytes] confstr_names: Dict[str, int] # Unix only pathconf_names: Dict[str, int] # Unix only sysconf_names: Dict[str, int] # Unix only EX_OK: int # Unix only EX_USAGE: int # Unix only EX_DATAERR: int # Unix only EX_NOINPUT: int # Unix only EX_NOUSER: int # Unix only EX_NOHOST: int # Unix only EX_UNAVAILABLE: int # Unix only EX_SOFTWARE: int # Unix only EX_OSERR: int # Unix only EX_OSFILE: int # Unix only EX_CANTCREAT: int # Unix only EX_IOERR: int # Unix only EX_TEMPFAIL: int # Unix only EX_PROTOCOL: int # Unix only EX_NOPERM: int # Unix only EX_CONFIG: int # Unix only EX_NOTFOUND: int # Unix only P_NOWAIT: int P_NOWAITO: int P_WAIT: int if sys.platform == 'win32': P_DETACH: int # Windows only P_OVERLAY: int # Windows only # wait()/waitpid() options WNOHANG: int # Unix only WCONTINUED: int # some Unix systems WUNTRACED: int # Unix only TMP_MAX: int # Undocumented, but used by tempfile # ----- os classes (structures) ----- if sys.version_info >= (3, 6): from builtins import _PathLike as PathLike # See comment in builtins _PathType = path._PathType if sys.version_info >= (3, 3): _FdOrPathType = Union[int, _PathType] else: _FdOrPathType = _PathType if sys.version_info >= (3, 6): class DirEntry(PathLike[AnyStr]): # This is what the scandir interator yields # The constructor is hidden name: AnyStr path: AnyStr def inode(self) -> int: ... def is_dir(self, follow_symlinks: bool = ...) -> bool: ... def is_file(self, follow_symlinks: bool = ...) -> bool: ... def is_symlink(self) -> bool: ... def stat(self) -> stat_result: ... def __fspath__(self) -> AnyStr: ... elif sys.version_info >= (3, 5): class DirEntry(Generic[AnyStr]): # This is what the scandir interator yields # The constructor is hidden name: AnyStr path: AnyStr def inode(self) -> int: ... def is_dir(self, follow_symlinks: bool = ...) -> bool: ... def is_file(self, follow_symlinks: bool = ...) -> bool: ... def is_symlink(self) -> bool: ... def stat(self) -> stat_result: ... class stat_result: # For backward compatibility, the return value of stat() is also # accessible as a tuple of at least 10 integers giving the most important # (and portable) members of the stat structure, in the order st_mode, # st_ino, st_dev, st_nlink, st_uid, st_gid, st_size, st_atime, st_mtime, # st_ctime. More items may be added at the end by some implementations. st_mode: int # protection bits, st_ino: int # inode number, st_dev: int # device, st_nlink: int # number of hard links, st_uid: int # user id of owner, st_gid: int # group id of owner, st_size: int # size of file, in bytes, st_atime: float # time of most recent access, st_mtime: float # time of most recent content modification, st_ctime: float # platform dependent (time of most recent metadata change on Unix, or the time of creation on Windows) if sys.version_info >= (3, 3): st_atime_ns: int # time of most recent access, in nanoseconds st_mtime_ns: int # time of most recent content modification in nanoseconds st_ctime_ns: int # platform dependent (time of most recent metadata change on Unix, or the time of creation on Windows) in nanoseconds # not documented def __init__(self, tuple: Tuple[int, ...]) -> None: ... # On some Unix systems (such as Linux), the following attributes may also # be available: st_blocks: int # number of blocks allocated for file st_blksize: int # filesystem blocksize st_rdev: int # type of device if an inode device st_flags: int # user defined flags for file # On other Unix systems (such as FreeBSD), the following attributes may be # available (but may be only filled out if root tries to use them): st_gen: int # file generation number st_birthtime: int # time of file creation # On Mac OS systems, the following attributes may also be available: st_rsize: int st_creator: int st_type: int class statvfs_result: # Unix only f_bsize: int f_frsize: int f_blocks: int f_bfree: int f_bavail: int f_files: int f_ffree: int f_favail: int f_flag: int f_namemax: int # ----- os function stubs ----- if sys.version_info >= (3, 6): def fsencode(filename: Union[str, bytes, PathLike]) -> bytes: ... else: def fsencode(filename: Union[str, bytes]) -> bytes: ... if sys.version_info >= (3, 6): def fsdecode(filename: Union[str, bytes, PathLike]) -> str: ... else: def fsdecode(filename: Union[str, bytes]) -> str: ... if sys.version_info >= (3, 6): @overload def fspath(path: str) -> str: ... @overload def fspath(path: bytes) -> bytes: ... @overload def fspath(path: PathLike) -> Any: ... def get_exec_path(env: Optional[Mapping[str, str]] = ...) -> List[str]: ... # NOTE: get_exec_path(): returns List[bytes] when env not None def ctermid() -> str: ... # Unix only def getegid() -> int: ... # Unix only def geteuid() -> int: ... # Unix only def getgid() -> int: ... # Unix only if sys.version_info >= (3, 3): def getgrouplist(user: str, gid: int) -> List[int]: ... # Unix only def getgroups() -> List[int]: ... # Unix only, behaves differently on Mac def initgroups(username: str, gid: int) -> None: ... # Unix only def getlogin() -> str: ... def getpgid(pid: int) -> int: ... # Unix only def getpgrp() -> int: ... # Unix only def getpid() -> int: ... def getppid() -> int: ... if sys.version_info >= (3, 3): def getpriority(which: int, who: int) -> int: ... # Unix only def setpriority(which: int, who: int, priority: int) -> None: ... # Unix only def getresuid() -> Tuple[int, int, int]: ... # Unix only def getresgid() -> Tuple[int, int, int]: ... # Unix only def getuid() -> int: ... # Unix only def setegid(egid: int) -> None: ... # Unix only def seteuid(euid: int) -> None: ... # Unix only def setgid(gid: int) -> None: ... # Unix only def setgroups(groups: Sequence[int]) -> None: ... # Unix only def setpgrp() -> None: ... # Unix only def setpgid(pid: int, pgrp: int) -> None: ... # Unix only def setregid(rgid: int, egid: int) -> None: ... # Unix only def setresgid(rgid: int, egid: int, sgid: int) -> None: ... # Unix only def setresuid(ruid: int, euid: int, suid: int) -> None: ... # Unix only def setreuid(ruid: int, euid: int) -> None: ... # Unix only def getsid(pid: int) -> int: ... # Unix only def setsid() -> None: ... # Unix only def setuid(uid: int) -> None: ... # Unix only def strerror(code: int) -> str: ... def umask(mask: int) -> int: ... if sys.version_info >= (3, 3): from posix import uname_result def uname() -> uname_result: ... # Unix only else: def uname() -> Tuple[str, str, str, str, str]: ... # Unix only @overload def getenv(key: Text) -> Optional[str]: ... @overload def getenv(key: Text, default: _T) -> Union[str, _T]: ... def getenvb(key: bytes, default: bytes = ...) -> bytes: ... def putenv(key: Union[bytes, Text], value: Union[bytes, Text]) -> None: ... def unsetenv(key: Union[bytes, Text]) -> None: ... # Return IO or TextIO def fdopen(fd: int, mode: str = ..., buffering: int = ..., encoding: str = ..., errors: str = ..., newline: str = ..., closefd: bool = ...) -> Any: ... def close(fd: int) -> None: ... def closerange(fd_low: int, fd_high: int) -> None: ... def device_encoding(fd: int) -> Optional[str]: ... def dup(fd: int) -> int: ... def dup2(fd: int, fd2: int) -> None: ... def fchmod(fd: int, mode: int) -> None: ... # Unix only def fchown(fd: int, uid: int, gid: int) -> None: ... # Unix only def fdatasync(fd: int) -> None: ... # Unix only, not Mac def fpathconf(fd: int, name: Union[str, int]) -> int: ... # Unix only def fstat(fd: int) -> stat_result: ... def fstatvfs(fd: int) -> statvfs_result: ... # Unix only def fsync(fd: int) -> None: ... def ftruncate(fd: int, length: int) -> None: ... # Unix only if sys.version_info >= (3, 5): def get_blocking(fd: int) -> bool: ... # Unix only def set_blocking(fd: int, blocking: bool) -> None: ... # Unix only def isatty(fd: int) -> bool: ... # Unix only if sys.version_info >= (3, 3): def lockf(__fd: int, __cmd: int, __length: int) -> None: ... # Unix only def lseek(fd: int, pos: int, how: int) -> int: ... if sys.version_info >= (3, 3): def open(file: _PathType, flags: int, mode: int = ..., *, dir_fd: Optional[int] = ...) -> int: ... else: def open(file: _PathType, flags: int, mode: int = ...) -> int: ... def openpty() -> Tuple[int, int]: ... # some flavors of Unix def pipe() -> Tuple[int, int]: ... if sys.version_info >= (3, 3): def pipe2(flags: int) -> Tuple[int, int]: ... # some flavors of Unix def posix_fallocate(fd: int, offset: int, length: int) -> None: ... # Unix only def posix_fadvise(fd: int, offset: int, length: int, advice: int) -> None: ... # Unix only def pread(fd: int, buffersize: int, offset: int) -> bytes: ... # Unix only def pwrite(fd: int, string: bytes, offset: int) -> int: ... # Unix only def read(fd: int, n: int) -> bytes: ... if sys.version_info >= (3, 3): @overload def sendfile(__out_fd: int, __in_fd: int, offset: Optional[int], count: int) -> int: ... # Unix only @overload def sendfile(__out_fd: int, __in_fd: int, offset: int, count: int, headers: Sequence[bytes] = ..., trailers: Sequence[bytes] = ..., flags: int = ...) -> int: ... # FreeBSD and Mac OS X only def readv(fd: int, buffers: Sequence[bytearray]) -> int: ... # Unix only def writev(fd: int, buffers: Sequence[bytes]) -> int: ... # Unix only terminal_size = NamedTuple('terminal_size', [('columns', int), ('lines', int)]) def get_terminal_size(fd: int = ...) -> terminal_size: ... if sys.version_info >= (3, 4): def get_inheritable(fd: int) -> bool: ... def set_inheritable(fd: int, inheritable: bool) -> None: ... def tcgetpgrp(fd: int) -> int: ... # Unix only def tcsetpgrp(fd: int, pg: int) -> None: ... # Unix only def ttyname(fd: int) -> str: ... # Unix only def write(fd: int, string: bytes) -> int: ... if sys.version_info >= (3, 3): def access(path: _FdOrPathType, mode: int, *, dir_fd: Optional[int] = ..., effective_ids: bool = ..., follow_symlinks: bool = ...) -> bool: ... else: def access(path: _PathType, mode: int) -> bool: ... def chdir(path: _FdOrPathType) -> None: ... def fchdir(fd: int) -> None: ... def getcwd() -> str: ... def getcwdb() -> bytes: ... if sys.version_info >= (3, 3): def chflags(path: _PathType, flags: int, follow_symlinks: bool = ...) -> None: ... # some flavors of Unix def chmod(path: _FdOrPathType, mode: int, *, dir_fd: Optional[int] = ..., follow_symlinks: bool = ...) -> None: ... def chown(path: _FdOrPathType, uid: int, gid: int, *, dir_fd: Optional[int] = ..., follow_symlinks: bool = ...) -> None: ... # Unix only else: def chflags(path: _PathType, flags: int) -> None: ... # Some flavors of Unix def chmod(path: _PathType, mode: int) -> None: ... def chown(path: _PathType, uid: int, gid: int) -> None: ... # Unix only def chroot(path: _PathType) -> None: ... # Unix only def lchflags(path: _PathType, flags: int) -> None: ... # Unix only def lchmod(path: _PathType, mode: int) -> None: ... # Unix only def lchown(path: _PathType, uid: int, gid: int) -> None: ... # Unix only if sys.version_info >= (3, 3): def link(src: _PathType, link_name: _PathType, *, src_dir_fd: Optional[int] = ..., dst_dir_fd: Optional[int] = ..., follow_symlinks: bool = ...) -> None: ... else: def link(src: _PathType, link_name: _PathType) -> None: ... if sys.version_info >= (3, 3): @overload def listdir(path: Optional[str] = ...) -> List[str]: ... @overload def listdir(path: bytes) -> List[bytes]: ... @overload def listdir(path: int) -> List[str]: ... else: @overload def listdir(path: Optional[str] = ...) -> List[str]: ... @overload def listdir(path: bytes) -> List[bytes]: ... if sys.version_info >= (3, 3): def lstat(path: _PathType, *, dir_fd: Optional[int] = ...) -> stat_result: ... def mkdir(path: _PathType, mode: int = ..., *, dir_fd: Optional[int] = ...) -> None: ... def mkfifo(path: _PathType, mode: int = ..., *, dir_fd: Optional[int] = ...) -> None: ... # Unix only else: def lstat(path: _PathType) -> stat_result: ... def mkdir(path: _PathType, mode: int = ...) -> None: ... def mkfifo(path: _PathType, mode: int = ...) -> None: ... # Unix only if sys.version_info >= (3, 4): def makedirs(name: _PathType, mode: int = ..., exist_ok: bool = ...) -> None: ... else: def makedirs(path: _PathType, mode: int = ..., exist_ok: bool = ...) -> None: ... if sys.version_info >= (3, 4): def mknod(path: _PathType, mode: int = ..., device: int = ..., *, dir_fd: Optional[int] = ...) -> None: ... elif sys.version_info >= (3, 3): def mknod(filename: _PathType, mode: int = ..., device: int = ..., *, dir_fd: Optional[int] = ...) -> None: ... else: def mknod(filename: _PathType, mode: int = ..., device: int = ...) -> None: ... def major(device: int) -> int: ... def minor(device: int) -> int: ... def makedev(major: int, minor: int) -> int: ... def pathconf(path: _FdOrPathType, name: Union[str, int]) -> int: ... # Unix only if sys.version_info >= (3, 6): def readlink(path: Union[AnyStr, PathLike[AnyStr]], *, dir_fd: Optional[int] = ...) -> AnyStr: ... elif sys.version_info >= (3, 3): def readlink(path: AnyStr, *, dir_fd: Optional[int] = ...) -> AnyStr: ... else: def readlink(path: AnyStr) -> AnyStr: ... if sys.version_info >= (3, 3): def remove(path: _PathType, *, dir_fd: Optional[int] = ...) -> None: ... else: def remove(path: _PathType) -> None: ... if sys.version_info >= (3, 4): def removedirs(name: _PathType) -> None: ... else: def removedirs(path: _PathType) -> None: ... if sys.version_info >= (3, 3): def rename(src: _PathType, dst: _PathType, *, src_dir_fd: Optional[int] = ..., dst_dir_fd: Optional[int] = ...) -> None: ... else: def rename(src: _PathType, dst: _PathType) -> None: ... def renames(old: _PathType, new: _PathType) -> None: ... if sys.version_info >= (3, 3): def replace(src: _PathType, dst: _PathType, *, src_dir_fd: Optional[int] = ..., dst_dir_fd: Optional[int] = ...) -> None: ... def rmdir(path: _PathType, *, dir_fd: Optional[int] = ...) -> None: ... else: def rmdir(path: _PathType) -> None: ... if sys.version_info >= (3, 6): @overload def scandir() -> Iterator[DirEntry[str]]: ... @overload def scandir(path: Union[AnyStr, PathLike[AnyStr]]) -> Iterator[DirEntry[AnyStr]]: ... elif sys.version_info >= (3, 5): @overload def scandir() -> Iterator[DirEntry[str]]: ... @overload def scandir(path: AnyStr) -> Iterator[DirEntry[AnyStr]]: ... if sys.version_info >= (3, 3): def stat(path: _FdOrPathType, *, dir_fd: Optional[int] = ..., follow_symlinks: bool = ...) -> stat_result: ... else: def stat(path: _PathType) -> stat_result: ... @overload def stat_float_times() -> bool: ... @overload def stat_float_times(__newvalue: bool) -> None: ... def statvfs(path: _FdOrPathType) -> statvfs_result: ... # Unix only if sys.version_info >= (3, 3): def symlink(source: _PathType, link_name: _PathType, target_is_directory: bool = ..., *, dir_fd: Optional[int] = ...) -> None: ... def sync() -> None: ... # Unix only def truncate(path: _FdOrPathType, length: int) -> None: ... # Unix only up to version 3.4 def unlink(path: _PathType, *, dir_fd: Optional[int] = ...) -> None: ... def utime(path: _FdOrPathType, times: Optional[Union[Tuple[int, int], Tuple[float, float]]] = ..., *, ns: Tuple[int, int] = ..., dir_fd: Optional[int] = ..., follow_symlinks: bool = ...) -> None: ... else: def symlink(source: _PathType, link_name: _PathType, target_is_directory: bool = ...) -> None: ... # final argument in Windows only def unlink(path: _PathType) -> None: ... def utime(path: _PathType, times: Optional[Tuple[float, float]]) -> None: ... if sys.version_info >= (3, 6): def walk(top: Union[AnyStr, PathLike[AnyStr]], topdown: bool = ..., onerror: Optional[Callable[[OSError], Any]] = ..., followlinks: bool = ...) -> Iterator[Tuple[AnyStr, List[AnyStr], List[AnyStr]]]: ... else: def walk(top: AnyStr, topdown: bool = ..., onerror: Optional[Callable[[OSError], Any]] = ..., followlinks: bool = ...) -> Iterator[Tuple[AnyStr, List[AnyStr], List[AnyStr]]]: ... if sys.version_info >= (3, 3): def fwalk(top: _PathType = ..., topdown: bool = ..., onerror: Optional[Callable] = ..., *, follow_symlinks: bool = ..., dir_fd: Optional[int] = ...) -> Iterator[Tuple[str, List[str], List[str], int]]: ... # Unix only def getxattr(path: _FdOrPathType, attribute: _PathType, *, follow_symlinks: bool = ...) -> bytes: ... # Linux only def listxattr(path: _FdOrPathType, *, follow_symlinks: bool = ...) -> List[str]: ... # Linux only def removexattr(path: _FdOrPathType, attribute: _PathType, *, follow_symlinks: bool = ...) -> None: ... # Linux only def setxattr(path: _FdOrPathType, attribute: _PathType, value: bytes, flags: int = ..., *, follow_symlinks: bool = ...) -> None: ... # Linux only def abort() -> NoReturn: ... # These are defined as execl(file, *args) but the first *arg is mandatory. def execl(file: _PathType, __arg0: Union[bytes, Text], *args: Union[bytes, Text]) -> NoReturn: ... def execlp(file: _PathType, __arg0: Union[bytes, Text], *args: Union[bytes, Text]) -> NoReturn: ... # These are: execle(file, *args, env) but env is pulled from the last element of the args. def execle(file: _PathType, __arg0: Union[bytes, Text], *args: Any) -> NoReturn: ... def execlpe(file: _PathType, __arg0: Union[bytes, Text], *args: Any) -> NoReturn: ... # The docs say `args: tuple or list of strings` # The implementation enforces tuple or list so we can't use Sequence. _ExecVArgs = Union[Tuple[Union[bytes, Text], ...], List[bytes], List[Text], List[Union[bytes, Text]]] def execv(path: _PathType, args: _ExecVArgs) -> None: ... def execve(path: _FdOrPathType, args: _ExecVArgs, env: Mapping[str, str]) -> None: ... def execvp(file: _PathType, args: _ExecVArgs) -> None: ... def execvpe(file: _PathType, args: _ExecVArgs, env: Mapping[str, str]) -> None: ... def _exit(n: int) -> NoReturn: ... def fork() -> int: ... # Unix only def forkpty() -> Tuple[int, int]: ... # some flavors of Unix def kill(pid: int, sig: int) -> None: ... def killpg(pgid: int, sig: int) -> None: ... # Unix only def nice(increment: int) -> int: ... # Unix only def plock(op: int) -> None: ... # Unix only ???op is int? if sys.version_info >= (3, 0): class _wrap_close(_TextIOWrapper): def close(self) -> Optional[int]: ... # type: ignore def popen(command: str, mode: str = ..., buffering: int = ...) -> _wrap_close: ... else: class _wrap_close(IO[Text]): def close(self) -> Optional[int]: ... # type: ignore def popen(__cmd: Text, __mode: Text = ..., __bufsize: int = ...) -> _wrap_close: ... def popen2(__cmd: Text, __mode: Text = ..., __bufsize: int = ...) -> Tuple[IO[Text], IO[Text]]: ... def popen3(__cmd: Text, __mode: Text = ..., __bufsize: int = ...) -> Tuple[IO[Text], IO[Text], IO[Text]]: ... def popen4(__cmd: Text, __mode: Text = ..., __bufsize: int = ...) -> Tuple[IO[Text], IO[Text]]: ... def spawnl(mode: int, path: _PathType, arg0: Union[bytes, Text], *args: Union[bytes, Text]) -> int: ... def spawnle(mode: int, path: _PathType, arg0: Union[bytes, Text], *args: Any) -> int: ... # Imprecise sig def spawnlp(mode: int, file: _PathType, arg0: Union[bytes, Text], *args: Union[bytes, Text]) -> int: ... # Unix only TODO def spawnlpe(mode: int, file: _PathType, arg0: Union[bytes, Text], *args: Any) -> int: ... # Imprecise signature; Unix only TODO def spawnv(mode: int, path: _PathType, args: List[Union[bytes, Text]]) -> int: ... def spawnve(mode: int, path: _PathType, args: List[Union[bytes, Text]], env: Mapping[str, str]) -> int: ... def spawnvp(mode: int, file: _PathType, args: List[Union[bytes, Text]]) -> int: ... # Unix only def spawnvpe(mode: int, file: _PathType, args: List[Union[bytes, Text]], env: Mapping[str, str]) -> int: ... # Unix only def startfile(path: _PathType, operation: Optional[str] = ...) -> None: ... # Windows only def system(command: _PathType) -> int: ... if sys.version_info >= (3, 3): from posix import times_result def times() -> times_result: ... else: def times() -> Tuple[float, float, float, float, float]: ... def wait() -> Tuple[int, int]: ... # Unix only if sys.version_info >= (3, 3): from posix import waitid_result def waitid(idtype: int, ident: int, options: int) -> waitid_result: ... # Unix only def waitpid(pid: int, options: int) -> Tuple[int, int]: ... def wait3(options: int) -> Tuple[int, int, Any]: ... # Unix only def wait4(pid: int, options: int) -> Tuple[int, int, Any]: ... # Unix only def WCOREDUMP(status: int) -> bool: ... # Unix only def WIFCONTINUED(status: int) -> bool: ... # Unix only def WIFSTOPPED(status: int) -> bool: ... # Unix only def WIFSIGNALED(status: int) -> bool: ... # Unix only def WIFEXITED(status: int) -> bool: ... # Unix only def WEXITSTATUS(status: int) -> int: ... # Unix only def WSTOPSIG(status: int) -> int: ... # Unix only def WTERMSIG(status: int) -> int: ... # Unix only if sys.version_info >= (3, 3): from posix import sched_param def sched_get_priority_min(policy: int) -> int: ... # some flavors of Unix def sched_get_priority_max(policy: int) -> int: ... # some flavors of Unix def sched_setscheduler(pid: int, policy: int, param: sched_param) -> None: ... # some flavors of Unix def sched_getscheduler(pid: int) -> int: ... # some flavors of Unix def sched_setparam(pid: int, param: sched_param) -> None: ... # some flavors of Unix def sched_getparam(pid: int) -> sched_param: ... # some flavors of Unix def sched_rr_get_interval(pid: int) -> float: ... # some flavors of Unix def sched_yield() -> None: ... # some flavors of Unix def sched_setaffinity(pid: int, mask: Iterable[int]) -> None: ... # some flavors of Unix def sched_getaffinity(pid: int) -> Set[int]: ... # some flavors of Unix def confstr(name: Union[str, int]) -> Optional[str]: ... # Unix only if sys.version_info >= (3, 4): def cpu_count() -> Optional[int]: ... def getloadavg() -> Tuple[float, float, float]: ... # Unix only def sysconf(name: Union[str, int]) -> int: ... # Unix only if sys.version_info >= (3, 6): def getrandom(size: int, flags: int = ...) -> bytes: ... def urandom(size: int) -> bytes: ... else: def urandom(n: int) -> bytes: ... mypy-0.560/typeshed/stdlib/3/os/path.pyi0000644€tŠÔÚ€2›s®0000000705413215007212024243 0ustar jukkaDROPBOX\Domain Users00000000000000# Stubs for os.path # Ron Murawski # based on http://docs.python.org/3.2/library/os.path.html # adapted for 2.7 by Michal Pokorny import sys from typing import ( overload, List, Any, AnyStr, Sequence, Tuple, BinaryIO, TextIO, TypeVar, Union, Text, Callable ) _T = TypeVar('_T') if sys.version_info >= (3, 6): from builtins import _PathLike _PathType = Union[bytes, Text, _PathLike] else: _PathType = Union[bytes, Text] # ----- os.path variables ----- supports_unicode_filenames = False # aliases (also in os) curdir = ... # type: str pardir = ... # type: str sep = ... # type: str altsep = ... # type: str extsep = ... # type: str pathsep = ... # type: str defpath = ... # type: str devnull = ... # type: str # ----- os.path function stubs ----- def abspath(path: AnyStr) -> AnyStr: ... def basename(path: AnyStr) -> AnyStr: ... if sys.version_info >= (3, 5): def commonpath(paths: Sequence[AnyStr]) -> AnyStr: ... # NOTE: Empty lists results in '' (str) regardless of contained type. # Also, in Python 2 mixed sequences of Text and bytes results in either Text or bytes # So, fall back to Any def commonprefix(list: Sequence[AnyStr]) -> Any: ... def dirname(path: AnyStr) -> AnyStr: ... def exists(path: _PathType) -> bool: ... def lexists(path: _PathType) -> bool: ... def expanduser(path: AnyStr) -> AnyStr: ... def expandvars(path: AnyStr) -> AnyStr: ... # These return float if os.stat_float_times() == True, # but int is a subclass of float. def getatime(path: _PathType) -> float: ... def getmtime(path: _PathType) -> float: ... def getctime(path: _PathType) -> float: ... def getsize(path: _PathType) -> int: ... def isabs(path: _PathType) -> bool: ... def isfile(path: _PathType) -> bool: ... def isdir(path: _PathType) -> bool: ... def islink(path: _PathType) -> bool: ... def ismount(path: _PathType) -> bool: ... if sys.version_info < (3, 0): # Make sure signatures are disjunct, and allow combinations of bytes and unicode. # (Since Python 2 allows that, too) # Note that e.g. os.path.join("a", "b", "c", "d", u"e") will still result in # a type error. @overload def join(__p1: bytes, *p: bytes) -> bytes: ... @overload def join(__p1: Text, *p: _PathType) -> Text: ... @overload def join(__p1: bytes, __p2: Text, *p: _PathType) -> Text: ... @overload def join(__p1: bytes, __p2: bytes, __p3: Text, *p: _PathType) -> Text: ... @overload def join(__p1: bytes, __p2: bytes, __p3: bytes, __p4: Text, *p: _PathType) -> Text: ... else: def join(path: AnyStr, *paths: AnyStr) -> AnyStr: ... def normcase(path: AnyStr) -> AnyStr: ... def normpath(path: AnyStr) -> AnyStr: ... if sys.platform == 'win32': def realpath(path: AnyStr) -> AnyStr: ... else: def realpath(filename: AnyStr) -> AnyStr: ... def relpath(path: AnyStr, start: _PathType = ...) -> AnyStr: ... def samefile(path1: _PathType, path2: _PathType) -> bool: ... def sameopenfile(fp1: int, fp2: int) -> bool: ... # TODO # def samestat(stat1: stat_result, # stat2: stat_result) -> bool: ... # Unix only if sys.version_info >= (3, 6): def split(path: Union[AnyStr, _PathLike[AnyStr]]) -> Tuple[AnyStr, AnyStr]: ... else: def split(path: AnyStr) -> Tuple[AnyStr, AnyStr]: ... def splitdrive(path: AnyStr) -> Tuple[AnyStr, AnyStr]: ... def splitext(path: AnyStr) -> Tuple[AnyStr, AnyStr]: ... def splitunc(path: AnyStr) -> Tuple[AnyStr, AnyStr]: ... # Windows only, deprecated if sys.version_info < (3,): def walk(path: AnyStr, visit: Callable[[_T, AnyStr, List[AnyStr]], Any], arg: _T) -> None: ... mypy-0.560/typeshed/stdlib/3/pipes.pyi0000644€tŠÔÚ€2›s®0000000112513215007212023777 0ustar jukkaDROPBOX\Domain Users00000000000000# Stubs for pipes # Based on http://docs.python.org/3.5/library/pipes.html import os class Template: def __init__(self) -> None: ... def reset(self) -> None: ... def clone(self) -> 'Template': ... def debug(self, flag: bool) -> None: ... def append(self, cmd: str, kind: str) -> None: ... def prepend(self, cmd: str, kind: str) -> None: ... def open(self, file: str, rw: str) -> os._wrap_close: ... def copy(self, file: str, rw: str) -> os._wrap_close: ... # Not documented, but widely used. # Documented as shlex.quote since 3.3. def quote(s: str) -> str: ... mypy-0.560/typeshed/stdlib/3/platform.pyi0000644€tŠÔÚ€2›s®0000000353413215007212024511 0ustar jukkaDROPBOX\Domain Users00000000000000# Stubs for platform (Python 3.5) from os import devnull as DEV_NULL from os import popen from typing import Tuple, NamedTuple def libc_ver(executable: str = ..., lib: str = ..., version: str = ..., chunksize: int = ...) -> Tuple[str, str]: ... def linux_distribution(distname: str = ..., version: str = ..., id: str = ..., supported_dists: Tuple[str, ...] = ..., full_distribution_name: bool = ...) -> Tuple[str, str, str]: ... def dist(distname: str = ..., version: str = ..., id: str = ..., supported_dists: Tuple[str, ...] = ...) -> Tuple[str, str, str]: ... def win32_ver(release: str = ..., version: str = ..., csd: str = ..., ptype: str = ...) -> Tuple[str, str, str, str]: ... def mac_ver(release: str = ..., versioninfo: Tuple[str, str, str] = ..., machine: str = ...) -> Tuple[str, Tuple[str, str, str], str]: ... def java_ver(release: str = ..., vendor: str = ..., vminfo: Tuple[str, str, str] = ..., osinfo: Tuple[str, str, str] = ...) -> Tuple[str, str, Tuple[str, str, str], Tuple[str, str, str]]: ... def system_alias(system: str, release: str, version: str) -> Tuple[str, str, str]: ... def architecture(executable: str = ..., bits: str = ..., linkage: str = ...) -> Tuple[str, str]: ... uname_result = NamedTuple('uname_result', [('system', str), ('node', str), ('release', str), ('version', str), ('machine', str), ('processor', str)]) def uname() -> uname_result: ... def system() -> str: ... def node() -> str: ... def release() -> str: ... def version() -> str: ... def machine() -> str: ... def processor() -> str: ... def python_implementation() -> str: ... def python_version() -> str: ... def python_version_tuple() -> Tuple[str, str, str]: ... def python_branch() -> str: ... def python_revision() -> str: ... def python_build() -> Tuple[str, str]: ... def python_compiler() -> str: ... def platform(aliased: bool = ..., terse: bool = ...) -> str: ... mypy-0.560/typeshed/stdlib/3/posix.pyi0000644€tŠÔÚ€2›s®0000000143413215007212024024 0ustar jukkaDROPBOX\Domain Users00000000000000# Stubs for posix # NOTE: These are incomplete! import sys import typing from os import stat_result from typing import NamedTuple if sys.version_info >= (3, 3): uname_result = NamedTuple('uname_result', [('sysname', str), ('nodename', str), ('release', str), ('version', str), ('machine', str)]) times_result = NamedTuple('times_result', [ ('user', float), ('system', float), ('children_user', float), ('children_system', float), ('elapsed', float), ]) waitid_result = NamedTuple('waitid_result', [ ('si_pid', int), ('si_uid', int), ('si_signo', int), ('si_status', int), ('si_code', int), ]) sched_param = NamedTuple('sched_priority', [ ('sched_priority', int), ]) mypy-0.560/typeshed/stdlib/3/posixpath.pyi0000644€tŠÔÚ€2›s®0000000321213215007212024675 0ustar jukkaDROPBOX\Domain Users00000000000000# Stubs for os.path # Ron Murawski # based on http://docs.python.org/3.2/library/os.path.html from typing import Any, List, Tuple, IO # ----- os.path variables ----- supports_unicode_filenames = False # ----- os.path function stubs ----- def abspath(path: str) -> str: ... def basename(path) -> str: ... def commonprefix(list: List[str]) -> str: ... def dirname(path: str) -> str: ... def exists(path: str) -> bool: ... def lexists(path: str) -> bool: ... def expanduser(path: str) -> str: ... def expandvars(path: str) -> str: ... def getatime(path: str) -> int: ... # return float if os.stat_float_times() returns True def getmtime(path: str) -> int: ... # return float if os.stat_float_times() returns True def getctime(path: str) -> int: ... # return float if os.stat_float_times() returns True def getsize(path: str) -> int: ... def isabs(path: str) -> bool: ... def isfile(path: str) -> bool: ... def isdir(path: str) -> bool: ... def islink(path: str) -> bool: ... def ismount(path: str) -> bool: ... def join(path: str, *paths: str) -> str: ... def normcase(path: str) -> str: ... def normpath(path: str) -> str: ... def realpath(path: str) -> str: ... def relpath(path: str, start: str = ...) -> str: ... def samefile(path1: str, path2: str) -> bool: ... def sameopenfile(fp1: IO[Any], fp2: IO[Any]) -> bool: ... # def samestat(stat1: stat_result, stat2: stat_result) -> bool: # ... # Unix only def split(path: str) -> Tuple[str, str]: ... def splitdrive(path: str) -> Tuple[str, str]: ... def splitext(path: str) -> Tuple[str, str]: ... # def splitunc(path: str) -> Tuple[str, str] : ... # Windows only, deprecated mypy-0.560/typeshed/stdlib/3/queue.pyi0000644€tŠÔÚ€2›s®0000000164613215007212024013 0ustar jukkaDROPBOX\Domain Users00000000000000# Stubs for queue # NOTE: These are incomplete! from typing import Any, TypeVar, Generic, Optional _T = TypeVar('_T') class Empty(Exception): ... class Full(Exception): ... class Queue(Generic[_T]): maxsize = ... # type: int def __init__(self, maxsize: int = ...) -> None: ... def _init(self, maxsize: int) -> None: ... def empty(self) -> bool: ... def full(self) -> bool: ... def get(self, block: bool = ..., timeout: Optional[float] = ...) -> _T: ... def get_nowait(self) -> _T: ... def _get(self) -> _T: ... def put(self, item: _T, block: bool = ..., timeout: Optional[float] = ...) -> None: ... def put_nowait(self, item: _T) -> None: ... def _put(self, item: _T) -> None: ... def join(self) -> None: ... def qsize(self) -> int: ... def _qsize(self) -> int: ... def task_done(self) -> None: ... class PriorityQueue(Queue[_T]): ... class LifoQueue(Queue[_T]): ... mypy-0.560/typeshed/stdlib/3/random.pyi0000644€tŠÔÚ€2›s®0000000631413215007212024144 0ustar jukkaDROPBOX\Domain Users00000000000000# Stubs for random # Ron Murawski # Updated by Jukka Lehtosalo # based on http://docs.python.org/3.2/library/random.html # ----- random classes ----- import _random import sys from typing import ( Any, TypeVar, Sequence, List, Callable, AbstractSet, Union, Optional ) _T = TypeVar('_T') class Random(_random.Random): def __init__(self, x: Any = ...) -> None: ... def seed(self, a: Any = ..., version: int = ...) -> None: ... def getstate(self) -> tuple: ... def setstate(self, state: tuple) -> None: ... def getrandbits(self, k: int) -> int: ... def randrange(self, start: int, stop: Union[int, None] = ..., step: int = ...) -> int: ... def randint(self, a: int, b: int) -> int: ... def choice(self, seq: Sequence[_T]) -> _T: ... def shuffle(self, x: List[Any], random: Union[Callable[[], float], None] = ...) -> None: ... def sample(self, population: Union[Sequence[_T], AbstractSet[_T]], k: int) -> List[_T]: ... def random(self) -> float: ... def uniform(self, a: float, b: float) -> float: ... def triangular(self, low: float = ..., high: float = ..., mode: float = ...) -> float: ... def betavariate(self, alpha: float, beta: float) -> float: ... def expovariate(self, lambd: float) -> float: ... def gammavariate(self, alpha: float, beta: float) -> float: ... def gauss(self, mu: float, sigma: float) -> float: ... def lognormvariate(self, mu: float, sigma: float) -> float: ... def normalvariate(self, mu: float, sigma: float) -> float: ... def vonmisesvariate(self, mu: float, kappa: float) -> float: ... def paretovariate(self, alpha: float) -> float: ... def weibullvariate(self, alpha: float, beta: float) -> float: ... # SystemRandom is not implemented for all OS's; good on Windows & Linux class SystemRandom(Random): ... # ----- random function stubs ----- def seed(a: Any = ..., version: int = ...) -> None: ... def getstate() -> object: ... def setstate(state: object) -> None: ... def getrandbits(k: int) -> int: ... def randrange(start: int, stop: Union[None, int] = ..., step: int = ...) -> int: ... def randint(a: int, b: int) -> int: ... def choice(seq: Sequence[_T]) -> _T: ... if sys.version_info >= (3, 6): def choices(population: Sequence[_T], weights: Optional[Sequence[float]] = ..., *, cum_weights: Optional[Sequence[float]] = ..., k: int = ...) -> List[_T]: ... def shuffle(x: List[Any], random: Union[Callable[[], float], None] = ...) -> None: ... def sample(population: Union[Sequence[_T], AbstractSet[_T]], k: int) -> List[_T]: ... def random() -> float: ... def uniform(a: float, b: float) -> float: ... def triangular(low: float = ..., high: float = ..., mode: float = ...) -> float: ... def betavariate(alpha: float, beta: float) -> float: ... def expovariate(lambd: float) -> float: ... def gammavariate(alpha: float, beta: float) -> float: ... def gauss(mu: float, sigma: float) -> float: ... def lognormvariate(mu: float, sigma: float) -> float: ... def normalvariate(mu: float, sigma: float) -> float: ... def vonmisesvariate(mu: float, kappa: float) -> float: ... def paretovariate(alpha: float) -> float: ... def weibullvariate(alpha: float, beta: float) -> float: ... mypy-0.560/typeshed/stdlib/3/re.pyi0000644€tŠÔÚ€2›s®0000001142513215007212023271 0ustar jukkaDROPBOX\Domain Users00000000000000# Stubs for re # Ron Murawski # 'bytes' support added by Jukka Lehtosalo # based on: http://docs.python.org/3.2/library/re.html # and http://hg.python.org/cpython/file/618ea5612e83/Lib/re.py import sys from typing import ( List, Iterator, overload, Callable, Tuple, Sequence, Dict, Generic, AnyStr, Match, Pattern, Any, Optional, Union ) # ----- re variables and constants ----- if sys.version_info >= (3, 6): import enum class RegexFlag(enum.IntFlag): A = 0 ASCII = 0 DEBUG = 0 I = 0 IGNORECASE = 0 L = 0 LOCALE = 0 M = 0 MULTILINE = 0 S = 0 DOTALL = 0 X = 0 VERBOSE = 0 U = 0 UNICODE = 0 T = 0 TEMPLATE = 0 A = RegexFlag.A ASCII = RegexFlag.ASCII DEBUG = RegexFlag.DEBUG I = RegexFlag.I IGNORECASE = RegexFlag.IGNORECASE L = RegexFlag.L LOCALE = RegexFlag.LOCALE M = RegexFlag.M MULTILINE = RegexFlag.MULTILINE S = RegexFlag.S DOTALL = RegexFlag.DOTALL X = RegexFlag.X VERBOSE = RegexFlag.VERBOSE U = RegexFlag.U UNICODE = RegexFlag.UNICODE T = RegexFlag.T TEMPLATE = RegexFlag.TEMPLATE _FlagsType = Union[int, RegexFlag] else: A = 0 ASCII = 0 DEBUG = 0 I = 0 IGNORECASE = 0 L = 0 LOCALE = 0 M = 0 MULTILINE = 0 S = 0 DOTALL = 0 X = 0 VERBOSE = 0 U = 0 UNICODE = 0 T = 0 TEMPLATE = 0 _FlagsType = int class error(Exception): ... @overload def compile(pattern: AnyStr, flags: _FlagsType = ...) -> Pattern[AnyStr]: ... @overload def compile(pattern: Pattern[AnyStr], flags: _FlagsType = ...) -> Pattern[AnyStr]: ... @overload def search(pattern: AnyStr, string: AnyStr, flags: _FlagsType = ...) -> Match[AnyStr]: ... @overload def search(pattern: Pattern[AnyStr], string: AnyStr, flags: _FlagsType = ...) -> Match[AnyStr]: ... @overload def match(pattern: AnyStr, string: AnyStr, flags: _FlagsType = ...) -> Match[AnyStr]: ... @overload def match(pattern: Pattern[AnyStr], string: AnyStr, flags: _FlagsType = ...) -> Match[AnyStr]: ... # New in Python 3.4 @overload def fullmatch(pattern: AnyStr, string: AnyStr, flags: _FlagsType = ...) -> Optional[Match[AnyStr]]: ... @overload def fullmatch(pattern: Pattern[AnyStr], string: AnyStr, flags: _FlagsType = ...) -> Optional[Match[AnyStr]]: ... @overload def split(pattern: AnyStr, string: AnyStr, maxsplit: int = ..., flags: _FlagsType = ...) -> List[AnyStr]: ... @overload def split(pattern: Pattern[AnyStr], string: AnyStr, maxsplit: int = ..., flags: _FlagsType = ...) -> List[AnyStr]: ... @overload def findall(pattern: AnyStr, string: AnyStr, flags: _FlagsType = ...) -> List[Any]: ... @overload def findall(pattern: Pattern[AnyStr], string: AnyStr, flags: _FlagsType = ...) -> List[Any]: ... # Return an iterator yielding match objects over all non-overlapping matches # for the RE pattern in string. The string is scanned left-to-right, and # matches are returned in the order found. Empty matches are included in the # result unless they touch the beginning of another match. @overload def finditer(pattern: AnyStr, string: AnyStr, flags: _FlagsType = ...) -> Iterator[Match[AnyStr]]: ... @overload def finditer(pattern: Pattern[AnyStr], string: AnyStr, flags: _FlagsType = ...) -> Iterator[Match[AnyStr]]: ... @overload def sub(pattern: AnyStr, repl: AnyStr, string: AnyStr, count: int = ..., flags: _FlagsType = ...) -> AnyStr: ... @overload def sub(pattern: AnyStr, repl: Callable[[Match[AnyStr]], AnyStr], string: AnyStr, count: int = ..., flags: _FlagsType = ...) -> AnyStr: ... @overload def sub(pattern: Pattern[AnyStr], repl: AnyStr, string: AnyStr, count: int = ..., flags: _FlagsType = ...) -> AnyStr: ... @overload def sub(pattern: Pattern[AnyStr], repl: Callable[[Match[AnyStr]], AnyStr], string: AnyStr, count: int = ..., flags: _FlagsType = ...) -> AnyStr: ... @overload def subn(pattern: AnyStr, repl: AnyStr, string: AnyStr, count: int = ..., flags: _FlagsType = ...) -> Tuple[AnyStr, int]: ... @overload def subn(pattern: AnyStr, repl: Callable[[Match[AnyStr]], AnyStr], string: AnyStr, count: int = ..., flags: _FlagsType = ...) -> Tuple[AnyStr, int]: ... @overload def subn(pattern: Pattern[AnyStr], repl: AnyStr, string: AnyStr, count: int = ..., flags: _FlagsType = ...) -> Tuple[AnyStr, int]: ... @overload def subn(pattern: Pattern[AnyStr], repl: Callable[[Match[AnyStr]], AnyStr], string: AnyStr, count: int = ..., flags: _FlagsType = ...) -> Tuple[AnyStr, int]: ... def escape(string: AnyStr) -> AnyStr: ... def purge() -> None: ... def template(pattern: Union[AnyStr, Pattern[AnyStr]], flags: _FlagsType = ...) -> Pattern[AnyStr]: ... mypy-0.560/typeshed/stdlib/3/reprlib.pyi0000644€tŠÔÚ€2›s®0000000234613215007212024324 0ustar jukkaDROPBOX\Domain Users00000000000000# Stubs for reprlib (Python 3) from array import array from typing import Any, Callable, Deque, Dict, FrozenSet, List, Set, Tuple _ReprFunc = Callable[[Any], str] def recursive_repr(fillvalue: str = ...) -> Callable[[_ReprFunc], _ReprFunc]: ... class Repr: maxlevel: int maxdict: int maxlist: int maxtuple: int maxset: int maxfrozenset: int maxdeque: int maxarray: int maxlong: int maxstring: int maxother: int def __init__(self) -> None: ... def repr(self, x: Any) -> str: ... def repr1(self, x: Any, level: int) -> str: ... def repr_tuple(self, x: Tuple[Any, ...], level: int) -> str: ... def repr_list(self, x: List[Any], level: int) -> str: ... def repr_array(self, x: array, level: int) -> str: ... def repr_set(self, x: Set[Any], level: int) -> str: ... def repr_frozenset(self, x: FrozenSet[Any], level: int) -> str: ... def repr_deque(self, x: Deque[Any], level: int) -> str: ... def repr_dict(self, x: Dict[Any, Any], level: int) -> str: ... def repr_str(self, x: str, level: int) -> str: ... def repr_int(self, x: int, level: int) -> str: ... def repr_instance(self, x: Any, level: int) -> str: ... aRepr: Repr def repr(x: object) -> str: ... mypy-0.560/typeshed/stdlib/3/resource.pyi0000644€tŠÔÚ€2›s®0000000314413215007212024511 0ustar jukkaDROPBOX\Domain Users00000000000000# Stubs for resource # NOTE: These are incomplete! from typing import Tuple, Optional, NamedTuple RLIMIT_AS = ... # type: int RLIMIT_CORE = ... # type: int RLIMIT_CPU = ... # type: int RLIMIT_DATA = ... # type: int RLIMIT_FSIZE = ... # type: int RLIMIT_MEMLOCK = ... # type: int RLIMIT_MSGQUEUE = ... # type: int RLIMIT_NICE = ... # type: int RLIMIT_NOFILE = ... # type: int RLIMIT_NPROC = ... # type: int RLIMIT_OFILE = ... # type: int RLIMIT_RSS = ... # type: int RLIMIT_RTPRIO = ... # type: int RLIMIT_RTTIME = ... # type: int RLIMIT_SIGPENDING = ... # type: int RLIMIT_STACK = ... # type: int RLIM_INFINITY = ... # type: int RUSAGE_CHILDREN = ... # type: int RUSAGE_SELF = ... # type: int RUSAGE_THREAD = ... # type: int _RUsage = NamedTuple('_RUsage', [('ru_utime', float), ('ru_stime', float), ('ru_maxrss', int), ('ru_ixrss', int), ('ru_idrss', int), ('ru_isrss', int), ('ru_minflt', int), ('ru_majflt', int), ('ru_nswap', int), ('ru_inblock', int), ('ru_oublock', int), ('ru_msgsnd', int), ('ru_msgrcv', int), ('ru_nsignals', int), ('ru_nvcsw', int), ('ru_nivcsw', int)]) def getpagesize() -> int: ... def getrlimit(resource: int) -> Tuple[int, int]: ... def getrusage(who: int) -> _RUsage: ... def prlimit(pid: int, resource: int, limits: Optional[Tuple[int, int]]) -> Tuple[int, int]: ... def setrlimit(resource: int, limits: Tuple[int, int]) -> None: ... # NOTE: This is an alias of OSError in Python 3.3. class error(Exception): ... mypy-0.560/typeshed/stdlib/3/runpy.pyi0000644€tŠÔÚ€2›s®0000000076713215007212024047 0ustar jukkaDROPBOX\Domain Users00000000000000from typing import Any class _TempModule: mod_name = ... # type: Any module = ... # type: Any def __init__(self, mod_name): ... def __enter__(self): ... def __exit__(self, *args): ... class _ModifiedArgv0: value = ... # type: Any def __init__(self, value): ... def __enter__(self): ... def __exit__(self, *args): ... def run_module(mod_name, init_globals=None, run_name=None, alter_sys=False): ... def run_path(path_name, init_globals=None, run_name=None): ... mypy-0.560/typeshed/stdlib/3/shelve.pyi0000644€tŠÔÚ€2›s®0000000304013215007212024143 0ustar jukkaDROPBOX\Domain Users00000000000000from typing import Any, Dict, Iterator, Optional, Tuple import collections class Shelf(collections.MutableMapping): def __init__(self, dict: Dict[bytes, Any], protocol: Optional[int] = ..., writeback: bool = ..., keyencoding: str = ...) -> None: ... def __iter__(self) -> Iterator[str]: ... def __len__(self) -> int: ... def __contains__(self, key: Any) -> bool: ... # key should be str, but it would conflict with superclass's type signature def get(self, key: str, default: Any = ...) -> Any: ... def __getitem__(self, key: str) -> Any: ... def __setitem__(self, key: str, value: Any) -> None: ... def __delitem__(self, key: str) -> None: ... def __enter__(self) -> Shelf: ... def __exit__(self, type: Any, value: Any, traceback: Any) -> None: ... def close(self) -> None: ... def __del__(self) -> None: ... def sync(self) -> None: ... class BsdDbShelf(Shelf): def __init__(self, dict: Dict[bytes, Any], protocol: Optional[int] = ..., writeback: bool = ..., keyencoding: str = ...) -> None: ... def set_location(self, key: Any) -> Tuple[str, Any]: ... def next(self) -> Tuple[str, Any]: ... def previous(self) -> Tuple[str, Any]: ... def first(self) -> Tuple[str, Any]: ... def last(self) -> Tuple[str, Any]: ... class DbfilenameShelf(Shelf): def __init__(self, filename: str, flag: str = ..., protocol: Optional[int] = ..., writeback: bool = ...) -> None: ... def open(filename: str, flag: str = ..., protocol: Optional[int] = ..., writeback: bool = ...) -> DbfilenameShelf: ... mypy-0.560/typeshed/stdlib/3/shlex.pyi0000644€tŠÔÚ€2›s®0000000322013215007212024000 0ustar jukkaDROPBOX\Domain Users00000000000000# Stubs for shlex # Based on http://docs.python.org/3.2/library/shlex.html from typing import List, Tuple, Any, TextIO, Union, Optional, Iterator import sys def split(s: str, comments: bool = ..., posix: bool = ...) -> List[str]: ... # Added in 3.3, use (undocumented) pipes.quote in previous versions. def quote(s: str) -> str: ... class shlex(Iterator[str]): commenters = ... # type: str wordchars = ... # type: str whitespace = ... # type: str escape = ... # type: str quotes = ... # type: str escapedquotes = ... # type: str whitespace_split = ... # type: bool infile = ... # type: str instream = ... # type: TextIO source = ... # type: str debug = 0 lineno = 0 token = ... # type: str eof = ... # type: str if sys.version_info >= (3, 6): punctuation_chars = ... # type: str if sys.version_info >= (3, 6): def __init__(self, instream: Union[str, TextIO] = ..., infile: Optional[str] = ..., posix: bool = ..., punctuation_chars: Union[bool, str] = ...) -> None: ... else: def __init__(self, instream: Union[str, TextIO] = ..., infile: Optional[str] = ..., posix: bool = ...) -> None: ... def get_token(self) -> str: ... def push_token(self, tok: str) -> None: ... def read_token(self) -> str: ... def sourcehook(self, filename: str) -> Tuple[str, TextIO]: ... # TODO argument types def push_source(self, newstream: Any, newfile: Any = ...) -> None: ... def pop_source(self) -> None: ... def error_leader(self, infile: str = ..., lineno: int = ...) -> None: ... mypy-0.560/typeshed/stdlib/3/shutil.pyi0000644€tŠÔÚ€2›s®0000001167213215007212024177 0ustar jukkaDROPBOX\Domain Users00000000000000# Stubs for shutil import os import sys # Based on http://docs.python.org/3.2/library/shutil.html # 'bytes' paths are not properly supported: they don't work with all functions, # sometimes they only work partially (broken exception messages), and the test # cases don't use them. from typing import ( List, Iterable, Callable, Any, Tuple, Sequence, NamedTuple, IO, AnyStr, Optional, Union ) if sys.version_info >= (3, 6): _Path = Union[str, os.PathLike[str]] # Return value of some functions that may either return a path-like object that was passed in or # a string _PathReturn = Any else: _Path = str _PathReturn = str def copyfileobj(fsrc: IO[AnyStr], fdst: IO[AnyStr], length: int = ...) -> None: ... if sys.version_info >= (3, 3): def copyfile(src: _Path, dst: _Path, *, follow_symlinks: bool = ...) -> _PathReturn: ... def copymode(src: _Path, dst: _Path, *, follow_symlinks: bool = ...) -> None: ... def copystat(src: _Path, dst: _Path, *, follow_symlinks: bool = ...) -> None: ... def copy(src: _Path, dst: _Path, *, follow_symlinks: bool = ...) -> _PathReturn: ... def copy2(src: _Path, dst: _Path, *, follow_symlinks: bool = ...) -> _PathReturn: ... else: def copyfile(src: _Path, dst: _Path) -> None: ... def copymode(src: _Path, dst: _Path) -> None: ... def copystat(src: _Path, dst: _Path) -> None: ... def copy(src: _Path, dst: _Path) -> None: ... def copy2(src: _Path, dst: _Path) -> None: ... def ignore_patterns(*patterns: _Path) -> Callable[[_Path, List[str]], Iterable[str]]: ... _IgnoreFn = Union[None, Callable[[str, List[str]], Iterable[str]], Callable[[_Path, List[str]], Iterable[str]]] if sys.version_info >= (3, 3): def copytree(src: _Path, dst: _Path, symlinks: bool = ..., ignore: _IgnoreFn = ..., copy_function: Callable[[str, str], None] = ..., ignore_dangling_symlinks: bool = ...) -> _PathReturn: ... else: def copytree(src: str, dst: str, symlinks: bool = ..., ignore: _IgnoreFn = ..., copy_function: Callable[[str, str], None] = ..., ignore_dangling_symlinks: bool = ...) -> None: ... def rmtree(path: _Path, ignore_errors: bool = ..., onerror: Callable[[Any, Any, Any], None] = ...) -> None: ... if sys.version_info >= (3, 5): def move(src: _Path, dst: _Path, copy_function: Union[Callable[[str, str], None], Callable[[_Path, _Path], None]] = ...) -> _PathReturn: ... elif sys.version_info >= (3, 3): def move(src: _Path, dst: _Path) -> str: ... else: def move(src: _Path, dst: _Path) -> None: ... if sys.version_info >= (3, 3): _ntuple_diskusage = NamedTuple('usage', [('total', int), ('used', int), ('free', int)]) def disk_usage(path: _Path) -> _ntuple_diskusage: ... def chown(path: _Path, user: Optional[str] = ..., group: Optional[str] = ...) -> None: ... def which(cmd: _Path, mode: int = ..., path: Optional[_Path] = ...) -> Optional[str]: ... if sys.version_info >= (3, 4): class Error(OSError): ... class SameFileError(Error): ... class SpecialFileError(OSError): ... class ExecError(OSError): ... class ReadError(OSError): ... else: class Error(EnvironmentError): ... class SpecialFileError(EnvironmentError): ... class ExecError(EnvironmentError): ... class ReadError(EnvironmentError): ... class RegistryError(Exception): ... def make_archive(base_name: str, format: str, root_dir: _Path = ..., base_dir: _Path = ..., verbose: bool = ..., dry_run: bool = ..., owner: str = ..., group: str = ..., logger: Any = ...) -> str: ... def get_archive_formats() -> List[Tuple[str, str]]: ... # TODO function is a callback that receives keyword arguments; should make it not use Any # once we have support for callable types with keyword args def register_archive_format(name: str, function: Any, extra_args: Sequence[Tuple[str, Any]] = ..., description: str = ...) -> None: ... def unregister_archive_format(name: str) -> None: ... # Should be _Path once http://bugs.python.org/issue30218 is fixed def unpack_archive(filename: str, extract_dir: _Path = ..., format: str = ...) -> None: ... def register_unpack_format(name: str, extensions: List[str], function: Any, extra_args: Sequence[Tuple[str, Any]] = ..., description: str = ...) -> None: ... def unregister_unpack_format(name: str) -> None: ... def get_unpack_formats() -> List[Tuple[str, List[str], str]]: ... if sys.version_info >= (3, 3): def get_terminal_size(fallback: Tuple[int, int] = ...) -> os.terminal_size: ... mypy-0.560/typeshed/stdlib/3/signal.pyi0000644€tŠÔÚ€2›s®0000001062413215007212024140 0ustar jukkaDROPBOX\Domain Users00000000000000"""Stub file for the 'signal' module.""" import sys from enum import IntEnum from typing import Any, Callable, List, Tuple, Dict, Generic, Union, Optional, Iterable, Set from types import FrameType class ItimerError(IOError): ... ITIMER_PROF = ... # type: int ITIMER_REAL = ... # type: int ITIMER_VIRTUAL = ... # type: int NSIG = ... # type: int if sys.version_info >= (3, 5): class Signals(IntEnum): SIGABRT = ... SIGALRM = ... SIGBUS = ... SIGCHLD = ... SIGCLD = ... SIGCONT = ... SIGFPE = ... SIGHUP = ... SIGILL = ... SIGINT = ... SIGIO = ... SIGIOT = ... SIGKILL = ... SIGPIPE = ... SIGPOLL = ... SIGPROF = ... SIGPWR = ... SIGQUIT = ... SIGRTMAX = ... SIGRTMIN = ... SIGSEGV = ... SIGSTOP = ... SIGSYS = ... SIGTERM = ... SIGTRAP = ... SIGTSTP = ... SIGTTIN = ... SIGTTOU = ... SIGURG = ... SIGUSR1 = ... SIGUSR2 = ... SIGVTALRM = ... SIGWINCH = ... SIGXCPU = ... SIGXFSZ = ... class Handlers(IntEnum): SIG_DFL = ... SIG_IGN = ... SIG_DFL = Handlers.SIG_DFL SIG_IGN = Handlers.SIG_IGN class Sigmasks(IntEnum): SIG_BLOCK = ... SIG_UNBLOCK = ... SIG_SETMASK = ... SIG_BLOCK = Sigmasks.SIG_BLOCK SIG_UNBLOCK = Sigmasks.SIG_UNBLOCK SIG_SETMASK = Sigmasks.SIG_SETMASK _SIG = Signals _SIGNUM = Union[int, Signals] _HANDLER = Union[Callable[[Signals, FrameType], None], int, Handlers, None] else: SIG_DFL = ... # type: int SIG_IGN = ... # type: int SIG_BLOCK = ... # type: int SIG_UNBLOCK = ... # type: int SIG_SETMASK = ... # type: int _SIG = int _SIGNUM = int _HANDLER = Union[Callable[[int, FrameType], None], int, None] SIGABRT = ... # type: _SIG SIGALRM = ... # type: _SIG SIGBUS = ... # type: _SIG SIGCHLD = ... # type: _SIG SIGCLD = ... # type: _SIG SIGCONT = ... # type: _SIG SIGFPE = ... # type: _SIG SIGHUP = ... # type: _SIG SIGILL = ... # type: _SIG SIGINT = ... # type: _SIG SIGIO = ... # type: _SIG SIGIOT = ... # type: _SIG SIGKILL = ... # type: _SIG SIGPIPE = ... # type: _SIG SIGPOLL = ... # type: _SIG SIGPROF = ... # type: _SIG SIGPWR = ... # type: _SIG SIGQUIT = ... # type: _SIG SIGRTMAX = ... # type: _SIG SIGRTMIN = ... # type: _SIG SIGSEGV = ... # type: _SIG SIGSTOP = ... # type: _SIG SIGSYS = ... # type: _SIG SIGTERM = ... # type: _SIG SIGTRAP = ... # type: _SIG SIGTSTP = ... # type: _SIG SIGTTIN = ... # type: _SIG SIGTTOU = ... # type: _SIG SIGURG = ... # type: _SIG SIGUSR1 = ... # type: _SIG SIGUSR2 = ... # type: _SIG SIGVTALRM = ... # type: _SIG SIGWINCH = ... # type: _SIG SIGXCPU = ... # type: _SIG SIGXFSZ = ... # type: _SIG CTRL_C_EVENT = 0 # Windows CTRL_BREAK_EVENT = 0 # Windows class struct_siginfo(Tuple[int, int, int, int, int, int, int]): def __init__(self, sequence: Iterable[int]) -> None: ... @property def si_signo(self) -> int: ... @property def si_code(self) -> int: ... @property def si_errno(self) -> int: ... @property def si_pid(self) -> int: ... @property def si_uid(self) -> int: ... @property def si_status(self) -> int: ... @property def si_band(self) -> int: ... def alarm(time: int) -> int: ... def default_int_handler(signum: int, frame: FrameType) -> None: raise KeyboardInterrupt() def getitimer(which: int) -> Tuple[float, float]: ... def getsignal(signalnum: _SIGNUM) -> _HANDLER: raise ValueError() def pause() -> None: ... def pthread_kill(thread_id: int, signum: int) -> None: raise OSError() def pthread_sigmask(how: int, mask: Iterable[int]) -> Set[_SIGNUM]: raise OSError() def set_wakeup_fd(fd: int) -> int: ... def setitimer(which: int, seconds: float, interval: float = ...) -> Tuple[float, float]: ... def siginterrupt(signalnum: int, flag: bool) -> None: raise OSError() def signal(signalnum: _SIGNUM, handler: _HANDLER) -> _HANDLER: raise OSError() def sigpending() -> Any: raise OSError() def sigtimedwait(sigset: Iterable[int], timeout: float) -> Optional[struct_siginfo]: raise OSError() raise ValueError() def sigwait(sigset: Iterable[int]) -> _SIGNUM: raise OSError() def sigwaitinfo(sigset: Iterable[int]) -> struct_siginfo: raise OSError() mypy-0.560/typeshed/stdlib/3/smtplib.pyi0000644€tŠÔÚ€2›s®0000000777413215007212024351 0ustar jukkaDROPBOX\Domain Users00000000000000from email.message import Message as _Message from typing import ( Any, AnyStr, Dict, Generic, List, Optional, Sequence, Tuple, Union) _Reply = Tuple[int, bytes] _SendErrs = Dict[str, _Reply] class SMTPException(OSError): ... class SMTPServerDisconnected(SMTPException): ... class SMTPResponseException(SMTPException): smtp_code = ... # type: int smtp_error = ... # type: Union[bytes, str] args = ... # type: Union[Tuple[int, Union[bytes, str]], Tuple[int, bytes, str]] def __init__(self, code: int, msg: Union[bytes, str]) -> None: ... class SMTPSenderRefused(SMTPResponseException): smtp_code = ... # type: int smtp_error = ... # type: bytes sender = ... # type: str args = ... # type: Tuple[int, bytes, str] def __init__(self, code: int, msg: bytes, sender: str) -> None: ... class SMTPRecipientsRefused(SMTPException): recipients = ... # type: _SendErrs args = ... # type: Tuple[_SendErrs] def __init__(self, recipients: _SendErrs) -> None: ... class SMTPDataError(SMTPResponseException): ... class SMTPConnectError(SMTPResponseException): ... class SMTPHeloError(SMTPResponseException): ... class SMTPAuthenticationError(SMTPResponseException): ... def quoteaddr(addrstring): ... def quotedata(data): ... class SMTP: debuglevel = ... # type: int file = ... # type: Any helo_resp = ... # type: Any ehlo_msg = ... # type: Any ehlo_resp = ... # type: Any does_esmtp = ... # type: Any default_port = ... # type: Any timeout = ... # type: float esmtp_features = ... # type: Any source_address = ... # type: Any local_hostname = ... # type: Any def __init__(self, host: str = ..., port: int = ..., local_hostname: Optional[str] = ..., timeout: float = ..., source_address: Tuple[str, int] = ...) -> None: ... def __enter__(self): ... def __exit__(self, *args): ... def set_debuglevel(self, debuglevel: int) -> None: ... sock = ... # type: Any def connect(self, host=..., port=..., source_address=...): ... def send(self, s): ... def putcmd(self, cmd, args=...): ... def getreply(self) -> _Reply: ... def docmd(self, cmd, args=...): ... def helo(self, name=...): ... def ehlo(self, name=...): ... def has_extn(self, opt): ... def help(self, args=...): ... def rset(self) -> _Reply: ... def noop(self) -> _Reply: ... def mail(self, sender: str, options: Sequence[str] = ...) -> _Reply: ... def rcpt(self, recip: str, options: Sequence[str] = ...) -> _Reply: ... def data(self, msg): ... def verify(self, address): ... vrfy = ... # type: Any def expn(self, address): ... def ehlo_or_helo_if_needed(self): ... def login(self, user, password): ... def starttls(self, keyfile=..., certfile=..., context=...): ... def sendmail(self, from_addr: str, to_addrs: Union[str, Sequence[str]], msg: Union[bytes, str], mail_options: Sequence[str] = ..., rcpt_options: List[str] = ...) -> _SendErrs: ... def send_message(self, msg: _Message, from_addr: Optional[str] = ..., to_addrs: Optional[Union[str, Sequence[str]]] = ..., mail_options: List[str] = ..., rcpt_options: Sequence[str] = ...) -> _SendErrs: ... def close(self): ... def quit(self) -> _Reply: ... class SMTP_SSL(SMTP): default_port = ... # type: Any keyfile = ... # type: Any certfile = ... # type: Any context = ... # type: Any def __init__(self, host=..., port=..., local_hostname=..., keyfile=..., certfile=..., timeout=..., source_address=..., context=...): ... class LMTP(SMTP): ehlo_msg = ... # type: Any def __init__(self, host: str = ..., port: int = ..., local_hostname: Optional[str] = ..., source_address: Optional[Tuple[str, int]] = ...) -> None: ... sock = ... # type: Any file = ... # type: Any def connect(self, host=..., port=..., source_address=...): ... mypy-0.560/typeshed/stdlib/3/socketserver.pyi0000644€tŠÔÚ€2›s®0000000744613215007212025412 0ustar jukkaDROPBOX\Domain Users00000000000000# Stubs for socketserver from typing import Any, BinaryIO, Optional, Tuple from socket import SocketType import sys import types class BaseServer: address_family = ... # type: int RequestHandlerClass = ... # type: type server_address = ... # type: Tuple[str, int] socket = ... # type: SocketType allow_reuse_address = ... # type: bool request_queue_size = ... # type: int socket_type = ... # type: int timeout = ... # type: Optional[float] def __init__(self, server_address: Tuple[str, int], RequestHandlerClass: type) -> None: ... def fileno(self) -> int: ... def handle_request(self) -> None: ... def serve_forever(self, poll_interval: float = ...) -> None: ... def shutdown(self) -> None: ... def server_close(self) -> None: ... def finish_request(self, request: bytes, client_address: Tuple[str, int]) -> None: ... def get_request(self) -> None: ... def handle_error(self, request: bytes, client_address: Tuple[str, int]) -> None: ... def handle_timeout(self) -> None: ... def process_request(self, request: bytes, client_address: Tuple[str, int]) -> None: ... def server_activate(self) -> None: ... def server_bind(self) -> None: ... def verify_request(self, request: bytes, client_address: Tuple[str, int]) -> bool: ... if sys.version_info >= (3, 6): def __enter__(self) -> 'BaseServer': ... def __exit__(self, exc_type: Optional[type], exc_val: Optional[Exception], exc_tb: Optional[types.TracebackType]) -> bool: ... if sys.version_info >= (3, 3): def service_actions(self) -> None: ... class TCPServer(BaseServer): def __init__(self, server_address: Tuple[str, int], RequestHandlerClass: type, bind_and_activate: bool = ...) -> None: ... class UDPServer(BaseServer): def __init__(self, server_address: Tuple[str, int], RequestHandlerClass: type, bind_and_activate: bool = ...) -> None: ... if sys.platform != 'win32': class UnixStreamServer(BaseServer): def __init__(self, server_address: Tuple[str, int], RequestHandlerClass: type, bind_and_activate: bool = ...) -> None: ... class UnixDatagramServer(BaseServer): def __init__(self, server_address: Tuple[str, int], RequestHandlerClass: type, bind_and_activate: bool = ...) -> None: ... class ForkingMixIn: ... class ThreadingMixIn: ... class ForkingTCPServer(ForkingMixIn, TCPServer): ... class ForkingUDPServer(ForkingMixIn, UDPServer): ... class ThreadingTCPServer(ThreadingMixIn, TCPServer): ... class ThreadingUDPServer(ThreadingMixIn, UDPServer): ... if sys.platform != 'win32': class ThreadingUnixStreamServer(ThreadingMixIn, UnixStreamServer): ... class ThreadingUnixDatagramServer(ThreadingMixIn, UnixDatagramServer): ... class BaseRequestHandler: # Those are technically of types, respectively: # * Union[SocketType, Tuple[bytes, SocketType]] # * Union[Tuple[str, int], str] # But there are some concerns that having unions here would cause # too much inconvenience to people using it (see # https://github.com/python/typeshed/pull/384#issuecomment-234649696) request = ... # type: Any client_address = ... # type: Any server = ... # type: BaseServer def setup(self) -> None: ... def handle(self) -> None: ... def finish(self) -> None: ... class StreamRequestHandler(BaseRequestHandler): rfile = ... # type: BinaryIO wfile = ... # type: BinaryIO class DatagramRequestHandler(BaseRequestHandler): rfile = ... # type: BinaryIO wfile = ... # type: BinaryIO mypy-0.560/typeshed/stdlib/3/spwd.pyi0000644€tŠÔÚ€2›s®0000000122213215007212023632 0ustar jukkaDROPBOX\Domain Users00000000000000from typing import List, NamedTuple struct_spwd = NamedTuple("struct_spwd", [("sp_namp", str), ("sp_pwdp", str), ("sp_lstchg", int), ("sp_min", int), ("sp_max", int), ("sp_warn", int), ("sp_inact", int), ("sp_expire", int), ("sp_flag", int)]) def getspall() -> List[struct_spwd]: ... def getspnam(name: str) -> struct_spwd: ... mypy-0.560/typeshed/stdlib/3/sqlite3/0000755€tŠÔÚ€2›s®0000000000013215007244023526 5ustar jukkaDROPBOX\Domain Users00000000000000mypy-0.560/typeshed/stdlib/3/sqlite3/__init__.pyi0000644€tŠÔÚ€2›s®0000000005313215007212026001 0ustar jukkaDROPBOX\Domain Users00000000000000from sqlite3.dbapi2 import * # noqa: F403 mypy-0.560/typeshed/stdlib/3/sqlite3/dbapi2.pyi0000644€tŠÔÚ€2›s®0000002366713215007212025423 0ustar jukkaDROPBOX\Domain Users00000000000000# Filip Hron # based heavily on Andrey Vlasovskikh's python-skeletons https://github.com/JetBrains/python-skeletons/blob/master/sqlite3.py import sys from typing import Any, Union, List, Iterator, Optional, TypeVar, Callable from datetime import time, datetime from collections import Iterable _T = TypeVar('_T') paramstyle = ... # type: str threadsafety = ... # type: int apilevel = ... # type: str Date = ... # type: datetime Time = ... # type: time Timestamp = ... # type: datetime def DateFromTicks(ticks): ... def TimeFromTicks(ticks): ... def TimestampFromTicks(ticks): ... version_info = ... # type: Any sqlite_version_info = ... # type: Any Binary = ... # type: Any def register_adapters_and_converters(): ... # The remaining definitions are imported from _sqlite3. PARSE_COLNAMES = ... # type: int PARSE_DECLTYPES = ... # type: int SQLITE_ALTER_TABLE = ... # type: int SQLITE_ANALYZE = ... # type: int SQLITE_ATTACH = ... # type: int SQLITE_CREATE_INDEX = ... # type: int SQLITE_CREATE_TABLE = ... # type: int SQLITE_CREATE_TEMP_INDEX = ... # type: int SQLITE_CREATE_TEMP_TABLE = ... # type: int SQLITE_CREATE_TEMP_TRIGGER = ... # type: int SQLITE_CREATE_TEMP_VIEW = ... # type: int SQLITE_CREATE_TRIGGER = ... # type: int SQLITE_CREATE_VIEW = ... # type: int SQLITE_DELETE = ... # type: int SQLITE_DENY = ... # type: int SQLITE_DETACH = ... # type: int SQLITE_DROP_INDEX = ... # type: int SQLITE_DROP_TABLE = ... # type: int SQLITE_DROP_TEMP_INDEX = ... # type: int SQLITE_DROP_TEMP_TABLE = ... # type: int SQLITE_DROP_TEMP_TRIGGER = ... # type: int SQLITE_DROP_TEMP_VIEW = ... # type: int SQLITE_DROP_TRIGGER = ... # type: int SQLITE_DROP_VIEW = ... # type: int SQLITE_IGNORE = ... # type: int SQLITE_INSERT = ... # type: int SQLITE_OK = ... # type: int SQLITE_PRAGMA = ... # type: int SQLITE_READ = ... # type: int SQLITE_REINDEX = ... # type: int SQLITE_SELECT = ... # type: int SQLITE_TRANSACTION = ... # type: int SQLITE_UPDATE = ... # type: int adapters = ... # type: Any converters = ... # type: Any sqlite_version = ... # type: str version = ... # type: str # TODO: adapt needs to get probed def adapt(obj, protocol, alternate): ... def complete_statement(sql: str) -> bool: ... if sys.version_info >= (3, 4): def connect(database: Union[bytes, str], timeout: float = ..., detect_types: int = ..., isolation_level: Union[str, None] = ..., check_same_thread: bool = ..., factory: Union[Connection, None] = ..., cached_statements: int = ..., uri: bool = ...) -> Connection: ... else: def connect(database: Union[bytes, str], timeout: float = ..., detect_types: int = ..., isolation_level: Union[str, None] = ..., check_same_thread: bool = ..., factory: Union[Connection, None] = ..., cached_statements: int = ...) -> Connection: ... def enable_callback_tracebacks(flag: bool) -> None: ... def enable_shared_cache(do_enable: int) -> None: ... def register_adapter(type: _T, callable: Callable[[_T], Union[int, float, str, bytes]]) -> None: ... # TODO: sqlite3.register_converter.__doc__ specifies callable as unknown def register_converter(typename: str, callable: Callable[[bytes], Any]) -> None: ... class Cache: def __init__(self, *args, **kwargs) -> None: ... def display(self, *args, **kwargs) -> None: ... def get(self, *args, **kwargs) -> None: ... class Connection: DataError = ... # type: Any DatabaseError = ... # type: Any Error = ... # type: Any IntegrityError = ... # type: Any InterfaceError = ... # type: Any InternalError = ... # type: Any NotSupportedError = ... # type: Any OperationalError = ... # type: Any ProgrammingError = ... # type: Any Warning = ... # type: Any in_transaction = ... # type: Any isolation_level = ... # type: Any row_factory = ... # type: Any text_factory = ... # type: Any total_changes = ... # type: Any def __init__(self, *args, **kwargs): ... def close(self) -> None: ... def commit(self) -> None: ... def create_aggregate(self, name: str, num_params: int, aggregate_class: type) -> None: ... def create_collation(self, name: str, callable: Any) -> None: ... def create_function(self, name: str, num_params: int, func: Any) -> None: ... def cursor(self, cursorClass: Optional[type] = ...) -> Cursor: ... def execute(self, sql: str, parameters: Iterable = ...) -> Cursor: ... # TODO: please check in executemany() if seq_of_parameters type is possible like this def executemany(self, sql: str, seq_of_parameters: Iterable[Iterable]) -> Cursor: ... def executescript(self, sql_script: Union[bytes, str]) -> Cursor: ... def interrupt(self, *args, **kwargs) -> None: ... def iterdump(self, *args, **kwargs) -> None: ... def rollback(self, *args, **kwargs) -> None: ... # TODO: set_authorizer(authorzer_callback) # see https://docs.python.org/2/library/sqlite3.html#sqlite3.Connection.set_authorizer # returns [SQLITE_OK, SQLITE_DENY, SQLITE_IGNORE] so perhaps int def set_authorizer(self, *args, **kwargs) -> None: ... # set_progress_handler(handler, n) -> see https://docs.python.org/2/library/sqlite3.html#sqlite3.Connection.set_progress_handler def set_progress_handler(self, *args, **kwargs) -> None: ... def set_trace_callback(self, *args, **kwargs): ... def __call__(self, *args, **kwargs): ... def __enter__(self, *args, **kwargs): ... def __exit__(self, *args, **kwargs): ... class Cursor(Iterator[Any]): arraysize = ... # type: Any connection = ... # type: Any description = ... # type: Any lastrowid = ... # type: Any row_factory = ... # type: Any rowcount = ... # type: Any # TODO: Cursor class accepts exactly 1 argument # required type is sqlite3.Connection (which is imported as _Connection) # however, the name of the __init__ variable is unknown def __init__(self, *args, **kwargs): ... def close(self, *args, **kwargs): ... def execute(self, sql: str, parameters: Iterable = ...) -> Cursor: ... def executemany(self, sql: str, seq_of_parameters: Iterable[Iterable]): ... def executescript(self, sql_script: Union[bytes, str]) -> Cursor: ... def fetchall(self) -> List[Any]: ... def fetchmany(self, size: Optional[int] = ...) -> List[Any]: ... def fetchone(self) -> Any: ... def setinputsizes(self, *args, **kwargs): ... def setoutputsize(self, *args, **kwargs): ... def __iter__(self) -> Cursor: ... def __next__(self) -> Any: ... class DataError(DatabaseError): ... class DatabaseError(Error): ... class Error(Exception): ... class IntegrityError(DatabaseError): ... class InterfaceError(Error): ... class InternalError(DatabaseError): ... class NotSupportedError(DatabaseError): ... class OperationalError(DatabaseError): ... class OptimizedUnicode: maketrans = ... # type: Any def __init__(self, *args, **kwargs): ... def capitalize(self, *args, **kwargs): ... def casefold(self, *args, **kwargs): ... def center(self, *args, **kwargs): ... def count(self, *args, **kwargs): ... def encode(self, *args, **kwargs): ... def endswith(self, *args, **kwargs): ... def expandtabs(self, *args, **kwargs): ... def find(self, *args, **kwargs): ... def format(self, *args, **kwargs): ... def format_map(self, *args, **kwargs): ... def index(self, *args, **kwargs): ... def isalnum(self, *args, **kwargs): ... def isalpha(self, *args, **kwargs): ... def isdecimal(self, *args, **kwargs): ... def isdigit(self, *args, **kwargs): ... def isidentifier(self, *args, **kwargs): ... def islower(self, *args, **kwargs): ... def isnumeric(self, *args, **kwargs): ... def isprintable(self, *args, **kwargs): ... def isspace(self, *args, **kwargs): ... def istitle(self, *args, **kwargs): ... def isupper(self, *args, **kwargs): ... def join(self, *args, **kwargs): ... def ljust(self, *args, **kwargs): ... def lower(self, *args, **kwargs): ... def lstrip(self, *args, **kwargs): ... def partition(self, *args, **kwargs): ... def replace(self, *args, **kwargs): ... def rfind(self, *args, **kwargs): ... def rindex(self, *args, **kwargs): ... def rjust(self, *args, **kwargs): ... def rpartition(self, *args, **kwargs): ... def rsplit(self, *args, **kwargs): ... def rstrip(self, *args, **kwargs): ... def split(self, *args, **kwargs): ... def splitlines(self, *args, **kwargs): ... def startswith(self, *args, **kwargs): ... def strip(self, *args, **kwargs): ... def swapcase(self, *args, **kwargs): ... def title(self, *args, **kwargs): ... def translate(self, *args, **kwargs): ... def upper(self, *args, **kwargs): ... def zfill(self, *args, **kwargs): ... def __add__(self, other): ... def __contains__(self, *args, **kwargs): ... def __eq__(self, other): ... def __format__(self, *args, **kwargs): ... def __ge__(self, other): ... def __getitem__(self, index): ... def __getnewargs__(self, *args, **kwargs): ... def __gt__(self, other): ... def __hash__(self): ... def __iter__(self): ... def __le__(self, other): ... def __len__(self, *args, **kwargs): ... def __lt__(self, other): ... def __mod__(self, other): ... def __mul__(self, other): ... def __ne__(self, other): ... def __rmod__(self, other): ... def __rmul__(self, other): ... class PrepareProtocol: def __init__(self, *args, **kwargs): ... class ProgrammingError(DatabaseError): ... class Row: def __init__(self, *args, **kwargs): ... def keys(self, *args, **kwargs): ... def __eq__(self, other): ... def __ge__(self, other): ... def __getitem__(self, index): ... def __gt__(self, other): ... def __hash__(self): ... def __iter__(self): ... def __le__(self, other): ... def __len__(self, *args, **kwargs): ... def __lt__(self, other): ... def __ne__(self, other): ... class Statement: def __init__(self, *args, **kwargs): ... class Warning(Exception): ... mypy-0.560/typeshed/stdlib/3/sre_constants.pyi0000644€tŠÔÚ€2›s®0000001116213215007212025546 0ustar jukkaDROPBOX\Domain Users00000000000000# Source: https://github.com/python/cpython/blob/master/Lib/sre_constants.py from typing import Any, Dict, List, Optional, Union MAGIC = ... # type: int class error(Exception): msg = ... # type: str pattern = ... # type: Optional[Union[str, bytes]] pos = ... # type: Optional[int] lineno = ... # type: int colno = ... # type: int def __init__(self, msg: str, pattern: Union[str, bytes] = ..., pos: int = ...) -> None: ... class _NamedIntConstant(int): name = ... # type: Any def __new__(cls, value: int, name: str): ... MAXREPEAT = ... # type: _NamedIntConstant OPCODES = ... # type: List[_NamedIntConstant] ATCODES = ... # type: List[_NamedIntConstant] CHCODES = ... # type: List[_NamedIntConstant] OP_IGNORE = ... # type: Dict[_NamedIntConstant, _NamedIntConstant] AT_MULTILINE = ... # type: Dict[_NamedIntConstant, _NamedIntConstant] AT_LOCALE = ... # type: Dict[_NamedIntConstant, _NamedIntConstant] AT_UNICODE = ... # type: Dict[_NamedIntConstant, _NamedIntConstant] CH_LOCALE = ... # type: Dict[_NamedIntConstant, _NamedIntConstant] CH_UNICODE = ... # type: Dict[_NamedIntConstant, _NamedIntConstant] SRE_FLAG_TEMPLATE = ... # type: int SRE_FLAG_IGNORECASE = ... # type: int SRE_FLAG_LOCALE = ... # type: int SRE_FLAG_MULTILINE = ... # type: int SRE_FLAG_DOTALL = ... # type: int SRE_FLAG_UNICODE = ... # type: int SRE_FLAG_VERBOSE = ... # type: int SRE_FLAG_DEBUG = ... # type: int SRE_FLAG_ASCII = ... # type: int SRE_INFO_PREFIX = ... # type: int SRE_INFO_LITERAL = ... # type: int SRE_INFO_CHARSET = ... # type: int # Stubgen above; manually defined constants below (dynamic at runtime) # from OPCODES FAILURE = ... # type: _NamedIntConstant SUCCESS = ... # type: _NamedIntConstant ANY = ... # type: _NamedIntConstant ANY_ALL = ... # type: _NamedIntConstant ASSERT = ... # type: _NamedIntConstant ASSERT_NOT = ... # type: _NamedIntConstant AT = ... # type: _NamedIntConstant BRANCH = ... # type: _NamedIntConstant CALL = ... # type: _NamedIntConstant CATEGORY = ... # type: _NamedIntConstant CHARSET = ... # type: _NamedIntConstant BIGCHARSET = ... # type: _NamedIntConstant GROUPREF = ... # type: _NamedIntConstant GROUPREF_EXISTS = ... # type: _NamedIntConstant GROUPREF_IGNORE = ... # type: _NamedIntConstant IN = ... # type: _NamedIntConstant IN_IGNORE = ... # type: _NamedIntConstant INFO = ... # type: _NamedIntConstant JUMP = ... # type: _NamedIntConstant LITERAL = ... # type: _NamedIntConstant LITERAL_IGNORE = ... # type: _NamedIntConstant MARK = ... # type: _NamedIntConstant MAX_UNTIL = ... # type: _NamedIntConstant MIN_UNTIL = ... # type: _NamedIntConstant NOT_LITERAL = ... # type: _NamedIntConstant NOT_LITERAL_IGNORE = ... # type: _NamedIntConstant NEGATE = ... # type: _NamedIntConstant RANGE = ... # type: _NamedIntConstant REPEAT = ... # type: _NamedIntConstant REPEAT_ONE = ... # type: _NamedIntConstant SUBPATTERN = ... # type: _NamedIntConstant MIN_REPEAT_ONE = ... # type: _NamedIntConstant RANGE_IGNORE = ... # type: _NamedIntConstant MIN_REPEAT = ... # type: _NamedIntConstant MAX_REPEAT = ... # type: _NamedIntConstant # from ATCODES AT_BEGINNING = ... # type: _NamedIntConstant AT_BEGINNING_LINE = ... # type: _NamedIntConstant AT_BEGINNING_STRING = ... # type: _NamedIntConstant AT_BOUNDARY = ... # type: _NamedIntConstant AT_NON_BOUNDARY = ... # type: _NamedIntConstant AT_END = ... # type: _NamedIntConstant AT_END_LINE = ... # type: _NamedIntConstant AT_END_STRING = ... # type: _NamedIntConstant AT_LOC_BOUNDARY = ... # type: _NamedIntConstant AT_LOC_NON_BOUNDARY = ... # type: _NamedIntConstant AT_UNI_BOUNDARY = ... # type: _NamedIntConstant AT_UNI_NON_BOUNDARY = ... # type: _NamedIntConstant # from CHCODES CATEGORY_DIGIT = ... # type: _NamedIntConstant CATEGORY_NOT_DIGIT = ... # type: _NamedIntConstant CATEGORY_SPACE = ... # type: _NamedIntConstant CATEGORY_NOT_SPACE = ... # type: _NamedIntConstant CATEGORY_WORD = ... # type: _NamedIntConstant CATEGORY_NOT_WORD = ... # type: _NamedIntConstant CATEGORY_LINEBREAK = ... # type: _NamedIntConstant CATEGORY_NOT_LINEBREAK = ... # type: _NamedIntConstant CATEGORY_LOC_WORD = ... # type: _NamedIntConstant CATEGORY_LOC_NOT_WORD = ... # type: _NamedIntConstant CATEGORY_UNI_DIGIT = ... # type: _NamedIntConstant CATEGORY_UNI_NOT_DIGIT = ... # type: _NamedIntConstant CATEGORY_UNI_SPACE = ... # type: _NamedIntConstant CATEGORY_UNI_NOT_SPACE = ... # type: _NamedIntConstant CATEGORY_UNI_WORD = ... # type: _NamedIntConstant CATEGORY_UNI_NOT_WORD = ... # type: _NamedIntConstant CATEGORY_UNI_LINEBREAK = ... # type: _NamedIntConstant CATEGORY_UNI_NOT_LINEBREAK = ... # type: _NamedIntConstant mypy-0.560/typeshed/stdlib/3/sre_parse.pyi0000644€tŠÔÚ€2›s®0000000631613215007212024651 0ustar jukkaDROPBOX\Domain Users00000000000000# Source: https://github.com/python/cpython/blob/master/Lib/sre_parse.py from typing import ( Any, Dict, FrozenSet, Iterable, List, Match, Optional, Pattern as _Pattern, Tuple, Union ) from sre_constants import _NamedIntConstant as NIC, error as _Error SPECIAL_CHARS = ... # type: str REPEAT_CHARS = ... # type: str DIGITS = ... # type: FrozenSet[str] OCTDIGITS = ... # type: FrozenSet[str] HEXDIGITS = ... # type: FrozenSet[str] ASCIILETTERS = ... # type: FrozenSet[str] WHITESPACE = ... # type: FrozenSet[str] ESCAPES = ... # type: Dict[str, Tuple[NIC, int]] CATEGORIES = ... # type: Dict[str, Union[Tuple[NIC, NIC], Tuple[NIC, List[Tuple[NIC, NIC]]]]] FLAGS = ... # type: Dict[str, int] GLOBAL_FLAGS = ... # type: int class Verbose(Exception): ... class Pattern: flags = ... # type: int groupdict = ... # type: Dict[str, int] groupwidths = ... # type: List[Optional[int]] lookbehindgroups = ... # type: Optional[int] def __init__(self) -> None: ... @property def groups(self) -> int: ... def opengroup(self, name: str = ...) -> int: ... def closegroup(self, gid: int, p: SubPattern) -> None: ... def checkgroup(self, gid: int) -> bool: ... def checklookbehindgroup(self, gid: int, source: Tokenizer) -> None: ... _OpSubpatternType = Tuple[Optional[int], int, int, SubPattern] _OpGroupRefExistsType = Tuple[int, SubPattern, SubPattern] _OpInType = List[Tuple[NIC, int]] _OpBranchType = Tuple[None, List[SubPattern]] _AvType = Union[_OpInType, _OpBranchType, Iterable[SubPattern], _OpGroupRefExistsType, _OpSubpatternType] _CodeType = Tuple[NIC, _AvType] class SubPattern: pattern = ... # type: Pattern data = ... # type: List[_CodeType] width = ... # type: Optional[int] def __init__(self, pattern: Pattern, data: List[_CodeType] = ...) -> None: ... def dump(self, level: int = ...) -> None: ... def __len__(self) -> int: ... def __delitem__(self, index: Union[int, slice]) -> None: ... def __getitem__(self, index: Union[int, slice]) -> Union[SubPattern, _CodeType]: ... def __setitem__(self, index: Union[int, slice], code: _CodeType) -> None: ... def insert(self, index: int, code: _CodeType) -> None: ... def append(self, code: _CodeType) -> None: ... def getwidth(self) -> int: ... class Tokenizer: istext = ... # type: bool string = ... # type: Any decoded_string = ... # type: str index = ... # type: int next = ... # type: Optional[str] def __init__(self, string: Any) -> None: ... def match(self, char: str) -> bool: ... def get(self) -> Optional[str]: ... def getwhile(self, n: int, charset: Iterable[str]) -> str: ... def getuntil(self, terminator: str) -> str: ... @property def pos(self) -> int: ... def tell(self) -> int: ... def seek(self, index: int) -> None: ... def error(self, msg: str, offset: int = ...) -> _Error: ... def fix_flags(src: Union[str, bytes], flag: int) -> int: ... def parse(str: str, flags: int = ..., pattern: Pattern = ...) -> SubPattern: ... _TemplateType = Tuple[List[Tuple[int, int]], List[str]] def parse_template(source: str, pattern: _Pattern) -> _TemplateType: ... def expand_template(template: _TemplateType, match: Match) -> str: ... mypy-0.560/typeshed/stdlib/3/ssl.pyi0000644€tŠÔÚ€2›s®0000003073113215007212023465 0ustar jukkaDROPBOX\Domain Users00000000000000# Stubs for ssl (Python 3.4) from typing import ( Any, Dict, Callable, List, NamedTuple, Optional, Set, Tuple, Union, ) import socket import sys _PCTRTT = Tuple[Tuple[str, str], ...] _PCTRTTT = Tuple[_PCTRTT, ...] _PeerCertRetDictType = Dict[str, Union[str, _PCTRTTT, _PCTRTT]] _PeerCertRetType = Union[_PeerCertRetDictType, bytes, None] _EnumRetType = List[Tuple[bytes, str, Union[Set[str], bool]]] _PasswordType = Union[Callable[[], Union[str, bytes]], str, bytes] if sys.version_info >= (3, 5): _SC1ArgT = Union[SSLSocket, SSLObject] else: _SC1ArgT = SSLSocket _SrvnmeCbType = Callable[[_SC1ArgT, Optional[str], 'SSLSocket'], Optional[int]] class SSLError(OSError): library = ... # type: str reason = ... # type: str class SSLZeroReturnError(SSLError): ... class SSLWantReadError(SSLError): ... class SSLWantWriteError(SSLError): ... class SSLSyscallError(SSLError): ... class SSLEOFError(SSLError): ... class CertificateError(Exception): ... def wrap_socket(sock: socket.socket, keyfile: Optional[str] = ..., certfile: Optional[str] = ..., server_side: bool = ..., cert_reqs: int = ..., ssl_version: int = ..., ca_certs: Optional[str] = ..., do_handshake_on_connect: bool = ..., suppress_ragged_eofs: bool = ..., ciphers: Optional[str] = ...) -> 'SSLSocket': ... if sys.version_info >= (3, 4): def create_default_context(purpose: Any = ..., *, cafile: Optional[str] = ..., capath: Optional[str] = ..., cadata: Optional[str] = ...) -> 'SSLContext': ... def _create_unverified_context(protocol: int = ..., *, cert_reqs: int = ..., check_hostname: bool = ..., purpose: Any = ..., certfile: Optional[str] = ..., keyfile: Optional[str] = ..., cafile: Optional[str] = ..., capath: Optional[str] = ..., cadata: Optional[str] = ...) -> 'SSLContext': ... _create_default_https_context = ... # type: Callable[..., 'SSLContext'] def RAND_bytes(num: int) -> bytes: ... def RAND_pseudo_bytes(num: int) -> Tuple[bytes, bool]: ... def RAND_status() -> bool: ... def RAND_egd(path: str) -> None: ... def RAND_add(bytes: bytes, entropy: float) -> None: ... def match_hostname(cert: _PeerCertRetType, hostname: str) -> None: ... def cert_time_to_seconds(cert_time: str) -> int: ... def get_server_certificate(addr: Tuple[str, int], ssl_version: int = ..., ca_certs: Optional[str] = ...) -> str: ... def DER_cert_to_PEM_cert(der_cert_bytes: bytes) -> str: ... def PEM_cert_to_DER_cert(pem_cert_string: str) -> bytes: ... if sys.version_info >= (3, 4): DefaultVerifyPaths = NamedTuple('DefaultVerifyPaths', [('cafile', str), ('capath', str), ('openssl_cafile_env', str), ('openssl_cafile', str), ('openssl_capath_env', str), ('openssl_capath', str)]) def get_default_verify_paths() -> DefaultVerifyPaths: ... if sys.version_info >= (3, 4) and sys.platform == 'win32': def enum_certificates(store_name: str) -> _EnumRetType: ... def enum_crls(store_name: str) -> _EnumRetType: ... CERT_NONE = ... # type: int CERT_OPTIONAL = ... # type: int CERT_REQUIRED = ... # type: int if sys.version_info >= (3, 4): VERIFY_DEFAULT = ... # type: int VERIFY_CRL_CHECK_LEAF = ... # type: int VERIFY_CRL_CHECK_CHAIN = ... # type: int VERIFY_X509_STRICT = ... # type: int VERIFY_X509_TRUSTED_FIRST = ... # type: int PROTOCOL_SSLv23 = ... # type: int PROTOCOL_SSLv2 = ... # type: int PROTOCOL_SSLv3 = ... # type: int PROTOCOL_TLSv1 = ... # type: int if sys.version_info >= (3, 4): PROTOCOL_TLSv1_1 = ... # type: int PROTOCOL_TLSv1_2 = ... # type: int if sys.version_info >= (3, 5): PROTOCOL_TLS = ... # type: int if sys.version_info >= (3, 6): PROTOCOL_TLS_CLIENT = ... # type: int PROTOCOL_TLS_SERVER = ... # type: int OP_ALL = ... # type: int OP_NO_SSLv2 = ... # type: int OP_NO_SSLv3 = ... # type: int OP_NO_TLSv1 = ... # type: int if sys.version_info >= (3, 4): OP_NO_TLSv1_1 = ... # type: int OP_NO_TLSv1_2 = ... # type: int OP_CIPHER_SERVER_PREFERENCE = ... # type: int OP_SINGLE_DH_USE = ... # type: int OP_SINGLE_ECDH_USE = ... # type: int OP_NO_COMPRESSION = ... # type: int if sys.version_info >= (3, 6): OP_NO_TICKET = ... # type: int if sys.version_info >= (3, 5): HAS_ALPN = ... # type: int HAS_ECDH = ... # type: bool HAS_SNI = ... # type: bool HAS_NPN = ... # type: bool CHANNEL_BINDING_TYPES = ... # type: List[str] OPENSSL_VERSION = ... # type: str OPENSSL_VERSION_INFO = ... # type: Tuple[int, int, int, int, int] OPENSSL_VERSION_NUMBER = ... # type: int if sys.version_info >= (3, 4): ALERT_DESCRIPTION_HANDSHAKE_FAILURE = ... # type: int ALERT_DESCRIPTION_INTERNAL_ERROR = ... # type: int ALERT_DESCRIPTION_ACCESS_DENIED = ... # type: int ALERT_DESCRIPTION_BAD_CERTIFICATE = ... # type: int ALERT_DESCRIPTION_BAD_CERTIFICATE_HASH_VALUE = ... # type: int ALERT_DESCRIPTION_BAD_CERTIFICATE_STATUS_RESPONSE = ... # type: int ALERT_DESCRIPTION_BAD_RECORD_MAC = ... # type: int ALERT_DESCRIPTION_CERTIFICATE_EXPIRED = ... # type: int ALERT_DESCRIPTION_CERTIFICATE_REVOKED = ... # type: int ALERT_DESCRIPTION_CERTIFICATE_UNKNOWN = ... # type: int ALERT_DESCRIPTION_CERTIFICATE_UNOBTAINABLE = ... # type: int ALERT_DESCRIPTION_CLOSE_NOTIFY = ... # type: int ALERT_DESCRIPTION_DECODE_ERROR = ... # type: int ALERT_DESCRIPTION_DECOMPRESSION_FAILURE = ... # type: int ALERT_DESCRIPTION_DECRYPT_ERROR = ... # type: int ALERT_DESCRIPTION_ILLEGAL_PARAMETER = ... # type: int ALERT_DESCRIPTION_INSUFFICIENT_SECURITY = ... # type: int ALERT_DESCRIPTION_NO_RENEGOTIATION = ... # type: int ALERT_DESCRIPTION_PROTOCOL_VERSION = ... # type: int ALERT_DESCRIPTION_RECORD_OVERFLOW = ... # type: int ALERT_DESCRIPTION_UNEXPECTED_MESSAGE = ... # type: int ALERT_DESCRIPTION_UNKNOWN_CA = ... # type: int ALERT_DESCRIPTION_UNKNOWN_PSK_IDENTITY = ... # type: int ALERT_DESCRIPTION_UNRECOGNIZED_NAME = ... # type: int ALERT_DESCRIPTION_UNSUPPORTED_CERTIFICATE = ... # type: int ALERT_DESCRIPTION_UNSUPPORTED_EXTENSION = ... # type: int ALERT_DESCRIPTION_USER_CANCELLED = ... # type: int if sys.version_info >= (3, 4): _PurposeType = NamedTuple('_PurposeType', [('nid', int), ('shortname', str), ('longname', str), ('oid', str)]) class Purpose: SERVER_AUTH = ... # type: _PurposeType CLIENT_AUTH = ... # type: _PurposeType class SSLSocket(socket.socket): context = ... # type: SSLContext server_side = ... # type: bool server_hostname = ... # type: Optional[str] if sys.version_info >= (3, 6): session = ... # type: Optional[SSLSession] session_reused = ... # type: Optional[bool] def read(self, len: int = ..., buffer: Optional[bytearray] = ...) -> bytes: ... def write(self, buf: bytes) -> int: ... def do_handshake(self) -> None: ... def getpeercert(self, binary_form: bool = ...) -> _PeerCertRetType: ... def cipher(self) -> Tuple[str, int, int]: ... if sys.version_info >= (3, 5): def shared_cipher(self) -> Optional[List[Tuple[str, int, int]]]: ... def compression(self) -> Optional[str]: ... def get_channel_binding(self, cb_type: str = ...) -> Optional[bytes]: ... if sys.version_info >= (3, 5): def selected_alpn_protocol(self) -> Optional[str]: ... def selected_npn_protocol(self) -> Optional[str]: ... def unwrap(self) -> socket.socket: ... if sys.version_info >= (3, 5): def version(self) -> Optional[str]: ... def pending(self) -> int: ... class SSLContext: if sys.version_info >= (3, 4): check_hostname = ... # type: bool options = ... # type: int @property def protocol(self) -> int: ... if sys.version_info >= (3, 4): verify_flags = ... # type: int verify_mode = ... # type: int def __init__(self, protocol: int) -> None: ... if sys.version_info >= (3, 4): def cert_store_stats(self) -> Dict[str, int]: ... def load_cert_chain(self, certfile: str, keyfile: Optional[str] = ..., password: _PasswordType = ...) -> None: ... if sys.version_info >= (3, 4): def load_default_certs(self, purpose: _PurposeType = ...) -> None: ... def load_verify_locations(self, cafile: Optional[str] = ..., capath: Optional[str] = ..., cadata: Union[str, bytes, None] = ...) -> None: ... def get_ca_certs(self, binary_form: bool = ...) -> Union[List[_PeerCertRetDictType], List[bytes]]: ... else: def load_verify_locations(self, cafile: Optional[str] = ..., capath: Optional[str] = ...) -> None: ... def set_default_verify_paths(self) -> None: ... def set_ciphers(self, ciphers: str) -> None: ... if sys.version_info >= (3, 5): def set_alpn_protocols(self, protocols: List[str]) -> None: ... def set_npn_protocols(self, protocols: List[str]) -> None: ... def set_servername_callback(self, server_name_callback: Optional[_SrvnmeCbType]) -> None: ... def load_dh_params(self, dhfile: str) -> None: ... def set_ecdh_curve(self, curve_name: str) -> None: ... def wrap_socket(self, sock: socket.socket, server_side: bool = ..., do_handshake_on_connect: bool = ..., suppress_ragged_eofs: bool = ..., server_hostname: Optional[str] = ...) -> SSLSocket: ... if sys.version_info >= (3, 5): def wrap_bio(self, incoming: 'MemoryBIO', outgoing: 'MemoryBIO', server_side: bool = ..., server_hostname: Optional[str] = ...) -> 'SSLObject': ... def session_stats(self) -> Dict[str, int]: ... if sys.version_info >= (3, 5): class SSLObject: context = ... # type: SSLContext server_side = ... # type: bool server_hostname = ... # type: Optional[str] if sys.version_info >= (3, 6): session = ... # type: Optional[SSLSession] session_reused = ... # type: bool def read(self, len: int = ..., buffer: Optional[bytearray] = ...) -> bytes: ... def write(self, buf: bytes) -> int: ... def getpeercert(self, binary_form: bool = ...) -> _PeerCertRetType: ... def selected_npn_protocol(self) -> Optional[str]: ... def cipher(self) -> Tuple[str, int, int]: ... def shared_cipher(self) -> Optional[List[Tuple[str, int, int]]]: ... def compression(self) -> Optional[str]: ... def pending(self) -> int: ... def do_handshake(self) -> None: ... def unwrap(self) -> None: ... def get_channel_binding(self, cb_type: str = ...) -> Optional[bytes]: ... class MemoryBIO: pending = ... # type: int eof = ... # type: bool def read(self, n: int = ...) -> bytes: ... def write(self, buf: bytes) -> int: ... def write_eof(self) -> None: ... if sys.version_info >= (3, 6): class SSLSession: id = ... # type: bytes time = ... # type: int timeout = ... # type: int ticket_lifetime_hint = ... # type: int has_ticket = ... # type: bool # TODO below documented in cpython but not in docs.python.org # taken from python 3.4 SSL_ERROR_EOF = ... # type: int SSL_ERROR_INVALID_ERROR_CODE = ... # type: int SSL_ERROR_SSL = ... # type: int SSL_ERROR_SYSCALL = ... # type: int SSL_ERROR_WANT_CONNECT = ... # type: int SSL_ERROR_WANT_READ = ... # type: int SSL_ERROR_WANT_WRITE = ... # type: int SSL_ERROR_WANT_X509_LOOKUP = ... # type: int SSL_ERROR_ZERO_RETURN = ... # type: int def get_protocol_name(protocol_code: int) -> str: ... AF_INET = ... # type: int PEM_FOOTER = ... # type: str PEM_HEADER = ... # type: str SOCK_STREAM = ... # type: int SOL_SOCKET = ... # type: int SO_TYPE = ... # type: int mypy-0.560/typeshed/stdlib/3/stat.pyi0000644€tŠÔÚ€2›s®0000000215213215007212023633 0ustar jukkaDROPBOX\Domain Users00000000000000# Stubs for stat # Based on http://docs.python.org/3.2/library/stat.html import sys import typing def S_ISDIR(mode: int) -> bool: ... def S_ISCHR(mode: int) -> bool: ... def S_ISBLK(mode: int) -> bool: ... def S_ISREG(mode: int) -> bool: ... def S_ISFIFO(mode: int) -> bool: ... def S_ISLNK(mode: int) -> bool: ... def S_ISSOCK(mode: int) -> bool: ... def S_IMODE(mode: int) -> int: ... def S_IFMT(mode: int) -> int: ... ST_MODE = 0 ST_INO = 0 ST_DEV = 0 ST_NLINK = 0 ST_UID = 0 ST_GID = 0 ST_SIZE = 0 ST_ATIME = 0 ST_MTIME = 0 ST_CTIME = 0 S_IFSOCK = 0 S_IFLNK = 0 S_IFREG = 0 S_IFBLK = 0 S_IFDIR = 0 S_IFCHR = 0 S_IFIFO = 0 S_ISUID = 0 S_ISGID = 0 S_ISVTX = 0 S_IRWXU = 0 S_IRUSR = 0 S_IWUSR = 0 S_IXUSR = 0 S_IRWXG = 0 S_IRGRP = 0 S_IWGRP = 0 S_IXGRP = 0 S_IRWXO = 0 S_IROTH = 0 S_IWOTH = 0 S_IXOTH = 0 S_ENFMT = 0 S_IREAD = 0 S_IWRITE = 0 S_IEXEC = 0 UF_NODUMP = 0 UF_IMMUTABLE = 0 UF_APPEND = 0 UF_OPAQUE = 0 UF_NOUNLINK = 0 if sys.platform == 'darwin': UF_COMPRESSED = 0 # OS X 10.6+ only UF_HIDDEN = 0 # OX X 10.5+ only SF_ARCHIVED = 0 SF_IMMUTABLE = 0 SF_APPEND = 0 SF_NOUNLINK = 0 SF_SNAPSHOT = 0 mypy-0.560/typeshed/stdlib/3/string.pyi0000644€tŠÔÚ€2›s®0000000335313215007212024172 0ustar jukkaDROPBOX\Domain Users00000000000000# Stubs for string # Based on http://docs.python.org/3.2/library/string.html from typing import Mapping, Sequence, Any, Optional, Union, List, Tuple, Iterable ascii_letters = ... # type: str ascii_lowercase = ... # type: str ascii_uppercase = ... # type: str digits = ... # type: str hexdigits = ... # type: str octdigits = ... # type: str punctuation = ... # type: str printable = ... # type: str whitespace = ... # type: str def capwords(s: str, sep: str = ...) -> str: ... class Template: template = ... # type: str def __init__(self, template: str) -> None: ... def substitute(self, mapping: Mapping[str, str] = ..., **kwds: str) -> str: ... def safe_substitute(self, mapping: Mapping[str, str] = ..., **kwds: str) -> str: ... # TODO(MichalPokorny): This is probably badly and/or loosely typed. class Formatter: def format(self, format_string: str, *args: Any, **kwargs: Any) -> str: ... def vformat(self, format_string: str, args: Sequence[Any], kwargs: Mapping[str, Any]) -> str: ... def parse(self, format_string: str) -> Iterable[Tuple[str, Optional[str], Optional[str], Optional[str]]]: ... def get_field(self, field_name: str, args: Sequence[Any], kwargs: Mapping[str, Any]) -> Any: ... def get_value(self, key: Union[int, str], args: Sequence[Any], kwargs: Mapping[str, Any]) -> Any: raise IndexError() raise KeyError() def check_unused_args(self, used_args: Sequence[Union[int, str]], args: Sequence[Any], kwargs: Mapping[str, Any]) -> None: ... def format_field(self, value: Any, format_spec: str) -> Any: ... def convert_field(self, value: Any, conversion: str) -> Any: ... mypy-0.560/typeshed/stdlib/3/subprocess.pyi0000644€tŠÔÚ€2›s®0000003317313215007212025057 0ustar jukkaDROPBOX\Domain Users00000000000000# Stubs for subprocess # Based on http://docs.python.org/3.6/library/subprocess.html import sys from typing import Sequence, Any, Mapping, Callable, Tuple, IO, Optional, Union, List, Type, Text from types import TracebackType _FILE = Union[None, int, IO[Any]] _TXT = Union[bytes, Text] if sys.version_info >= (3, 6): from builtins import _PathLike _PATH = Union[bytes, Text, _PathLike] else: _PATH = Union[bytes, Text] # Python 3.6 does't support _CMD being a single PathLike. # See: https://bugs.python.org/issue31961 _CMD = Union[_TXT, Sequence[_PATH]] _ENV = Union[Mapping[bytes, _TXT], Mapping[Text, _TXT]] if sys.version_info >= (3, 5): class CompletedProcess: # morally: _CMD args = ... # type: Any returncode = ... # type: int # morally: Optional[_TXT] stdout = ... # type: Any stderr = ... # type: Any def __init__(self, args: _CMD, returncode: int, stdout: Optional[_TXT] = ..., stderr: Optional[_TXT] = ...) -> None: ... def check_returncode(self) -> None: ... if sys.version_info >= (3, 6): # Nearly same args as Popen.__init__ except for timeout, input, and check def run(args: _CMD, timeout: Optional[float] = ..., input: Optional[_TXT] = ..., check: bool = ..., bufsize: int = ..., executable: _PATH = ..., stdin: _FILE = ..., stdout: _FILE = ..., stderr: _FILE = ..., preexec_fn: Callable[[], Any] = ..., close_fds: bool = ..., shell: bool = ..., cwd: Optional[_PATH] = ..., env: Optional[_ENV] = ..., universal_newlines: bool = ..., startupinfo: Any = ..., creationflags: int = ..., restore_signals: bool = ..., start_new_session: bool = ..., pass_fds: Any = ..., *, encoding: Optional[str] = ..., errors: Optional[str] = ...) -> CompletedProcess: ... else: # Nearly same args as Popen.__init__ except for timeout, input, and check def run(args: _CMD, timeout: Optional[float] = ..., input: Optional[_TXT] = ..., check: bool = ..., bufsize: int = ..., executable: _PATH = ..., stdin: _FILE = ..., stdout: _FILE = ..., stderr: _FILE = ..., preexec_fn: Callable[[], Any] = ..., close_fds: bool = ..., shell: bool = ..., cwd: Optional[_PATH] = ..., env: Optional[_ENV] = ..., universal_newlines: bool = ..., startupinfo: Any = ..., creationflags: int = ..., restore_signals: bool = ..., start_new_session: bool = ..., pass_fds: Any = ...) -> CompletedProcess: ... # Same args as Popen.__init__ if sys.version_info >= (3, 3): # 3.3 added timeout def call(args: _CMD, bufsize: int = ..., executable: _PATH = ..., stdin: _FILE = ..., stdout: _FILE = ..., stderr: _FILE = ..., preexec_fn: Callable[[], Any] = ..., close_fds: bool = ..., shell: bool = ..., cwd: Optional[_PATH] = ..., env: Optional[_ENV] = ..., universal_newlines: bool = ..., startupinfo: Any = ..., creationflags: int = ..., restore_signals: bool = ..., start_new_session: bool = ..., pass_fds: Any = ..., timeout: float = ...) -> int: ... else: def call(args: _CMD, bufsize: int = ..., executable: _PATH = ..., stdin: _FILE = ..., stdout: _FILE = ..., stderr: _FILE = ..., preexec_fn: Callable[[], Any] = ..., close_fds: bool = ..., shell: bool = ..., cwd: Optional[_PATH] = ..., env: Optional[_ENV] = ..., universal_newlines: bool = ..., startupinfo: Any = ..., creationflags: int = ..., restore_signals: bool = ..., start_new_session: bool = ..., pass_fds: Any = ...) -> int: ... # Same args as Popen.__init__ if sys.version_info >= (3, 3): # 3.3 added timeout def check_call(args: _CMD, bufsize: int = ..., executable: _PATH = ..., stdin: _FILE = ..., stdout: _FILE = ..., stderr: _FILE = ..., preexec_fn: Callable[[], Any] = ..., close_fds: bool = ..., shell: bool = ..., cwd: Optional[_PATH] = ..., env: Optional[_ENV] = ..., universal_newlines: bool = ..., startupinfo: Any = ..., creationflags: int = ..., restore_signals: bool = ..., start_new_session: bool = ..., pass_fds: Any = ..., timeout: float = ...) -> int: ... else: def check_call(args: _CMD, bufsize: int = ..., executable: _PATH = ..., stdin: _FILE = ..., stdout: _FILE = ..., stderr: _FILE = ..., preexec_fn: Callable[[], Any] = ..., close_fds: bool = ..., shell: bool = ..., cwd: Optional[_PATH] = ..., env: Optional[_ENV] = ..., universal_newlines: bool = ..., startupinfo: Any = ..., creationflags: int = ..., restore_signals: bool = ..., start_new_session: bool = ..., pass_fds: Any = ...) -> int: ... if sys.version_info >= (3, 6): # 3.6 added encoding and errors def check_output(args: _CMD, bufsize: int = ..., executable: _PATH = ..., stdin: _FILE = ..., stderr: _FILE = ..., preexec_fn: Callable[[], Any] = ..., close_fds: bool = ..., shell: bool = ..., cwd: Optional[_PATH] = ..., env: Optional[_ENV] = ..., universal_newlines: bool = ..., startupinfo: Any = ..., creationflags: int = ..., restore_signals: bool = ..., start_new_session: bool = ..., pass_fds: Any = ..., *, timeout: float = ..., input: _TXT = ..., encoding: Optional[str] = ..., errors: Optional[str] = ..., ) -> Any: ... # morally: -> _TXT elif sys.version_info >= (3, 4): # 3.4 added input def check_output(args: _CMD, bufsize: int = ..., executable: _PATH = ..., stdin: _FILE = ..., stderr: _FILE = ..., preexec_fn: Callable[[], Any] = ..., close_fds: bool = ..., shell: bool = ..., cwd: Optional[_PATH] = ..., env: Optional[_ENV] = ..., universal_newlines: bool = ..., startupinfo: Any = ..., creationflags: int = ..., restore_signals: bool = ..., start_new_session: bool = ..., pass_fds: Any = ..., timeout: float = ..., input: _TXT = ..., ) -> Any: ... # morally: -> _TXT elif sys.version_info >= (3, 3): # 3.3 added timeout def check_output(args: _CMD, bufsize: int = ..., executable: _PATH = ..., stdin: _FILE = ..., stderr: _FILE = ..., preexec_fn: Callable[[], Any] = ..., close_fds: bool = ..., shell: bool = ..., cwd: Optional[_PATH] = ..., env: Optional[_ENV] = ..., universal_newlines: bool = ..., startupinfo: Any = ..., creationflags: int = ..., restore_signals: bool = ..., start_new_session: bool = ..., pass_fds: Any = ..., timeout: float = ..., ) -> Any: ... # morally: -> _TXT else: # Same args as Popen.__init__, except for stdout def check_output(args: _CMD, bufsize: int = ..., executable: _PATH = ..., stdin: _FILE = ..., stderr: _FILE = ..., preexec_fn: Callable[[], Any] = ..., close_fds: bool = ..., shell: bool = ..., cwd: Optional[_PATH] = ..., env: Optional[_ENV] = ..., universal_newlines: bool = ..., startupinfo: Any = ..., creationflags: int = ..., restore_signals: bool = ..., start_new_session: bool = ..., pass_fds: Any = ..., ) -> Any: ... # morally: -> _TXT PIPE = ... # type: int STDOUT = ... # type: int if sys.version_info >= (3, 3): DEVNULL = ... # type: int class SubprocessError(Exception): ... class TimeoutExpired(SubprocessError): ... class CalledProcessError(Exception): returncode = 0 # morally: _CMD cmd = ... # type: Any # morally: Optional[_TXT] output = ... # type: Any if sys.version_info >= (3, 5): # morally: Optional[_TXT] stdout = ... # type: Any stderr = ... # type: Any def __init__(self, returncode: int, cmd: _CMD, output: Optional[_TXT] = ..., stderr: Optional[_TXT] = ...) -> None: ... class Popen: if sys.version_info >= (3, 3): args = ... # type: _CMD stdin = ... # type: IO[Any] stdout = ... # type: IO[Any] stderr = ... # type: IO[Any] pid = 0 returncode = 0 if sys.version_info >= (3, 6): def __init__(self, args: _CMD, bufsize: int = ..., executable: Optional[_PATH] = ..., stdin: Optional[_FILE] = ..., stdout: Optional[_FILE] = ..., stderr: Optional[_FILE] = ..., preexec_fn: Optional[Callable[[], Any]] = ..., close_fds: bool = ..., shell: bool = ..., cwd: Optional[_PATH] = ..., env: Optional[_ENV] = ..., universal_newlines: bool = ..., startupinfo: Optional[Any] = ..., creationflags: int = ..., restore_signals: bool = ..., start_new_session: bool = ..., pass_fds: Any = ..., *, encoding: Optional[str] = ..., errors: Optional[str] = ...) -> None: ... else: def __init__(self, args: _CMD, bufsize: int = ..., executable: Optional[_PATH] = ..., stdin: Optional[_FILE] = ..., stdout: Optional[_FILE] = ..., stderr: Optional[_FILE] = ..., preexec_fn: Optional[Callable[[], Any]] = ..., close_fds: bool = ..., shell: bool = ..., cwd: Optional[_PATH] = ..., env: Optional[_ENV] = ..., universal_newlines: bool = ..., startupinfo: Optional[Any] = ..., creationflags: int = ..., restore_signals: bool = ..., start_new_session: bool = ..., pass_fds: Any = ...) -> None: ... def poll(self) -> int: ... if sys.version_info >= (3, 3): # 3.3 added timeout def wait(self, timeout: Optional[float] = ...) -> int: ... else: def wait(self) ->int: ... # Return str/bytes if sys.version_info >= (3, 3): def communicate(self, input: Optional[_TXT] = ..., timeout: Optional[float] = ..., # morally: -> Tuple[Optional[_TXT], Optional[_TXT]] ) -> Tuple[Any, Any]: ... else: def communicate(self, input: Optional[_TXT] = ..., # morally: -> Tuple[Optional[_TXT], Optional[_TXT]] ) -> Tuple[Any, Any]: ... def send_signal(self, signal: int) -> None: ... def terminate(self) -> None: ... def kill(self) -> None: ... def __enter__(self) -> 'Popen': ... def __exit__(self, type: Optional[Type[BaseException]], value: Optional[BaseException], traceback: Optional[TracebackType]) -> bool: ... # The result really is always a str. def getstatusoutput(cmd: _TXT) -> Tuple[int, str]: ... def getoutput(cmd: _TXT) -> str: ... # Windows-only: STARTUPINFO etc. mypy-0.560/typeshed/stdlib/3/symbol.pyi0000644€tŠÔÚ€2›s®0000000540213215007212024166 0ustar jukkaDROPBOX\Domain Users00000000000000# Stubs for symbol (Python 3) import sys from typing import Dict single_input = ... # type: int file_input = ... # type: int eval_input = ... # type: int decorator = ... # type: int decorators = ... # type: int decorated = ... # type: int if sys.version_info >= (3, 5): async_funcdef = ... # type: int funcdef = ... # type: int parameters = ... # type: int typedargslist = ... # type: int tfpdef = ... # type: int varargslist = ... # type: int vfpdef = ... # type: int stmt = ... # type: int simple_stmt = ... # type: int small_stmt = ... # type: int expr_stmt = ... # type: int if sys.version_info >= (3, 6): annassign = ... # type: int testlist_star_expr = ... # type: int augassign = ... # type: int del_stmt = ... # type: int pass_stmt = ... # type: int flow_stmt = ... # type: int break_stmt = ... # type: int continue_stmt = ... # type: int return_stmt = ... # type: int yield_stmt = ... # type: int raise_stmt = ... # type: int import_stmt = ... # type: int import_name = ... # type: int import_from = ... # type: int import_as_name = ... # type: int dotted_as_name = ... # type: int import_as_names = ... # type: int dotted_as_names = ... # type: int dotted_name = ... # type: int global_stmt = ... # type: int nonlocal_stmt = ... # type: int assert_stmt = ... # type: int compound_stmt = ... # type: int if sys.version_info >= (3, 5): async_stmt = ... # type: int if_stmt = ... # type: int while_stmt = ... # type: int for_stmt = ... # type: int try_stmt = ... # type: int with_stmt = ... # type: int with_item = ... # type: int except_clause = ... # type: int suite = ... # type: int test = ... # type: int test_nocond = ... # type: int lambdef = ... # type: int lambdef_nocond = ... # type: int or_test = ... # type: int and_test = ... # type: int not_test = ... # type: int comparison = ... # type: int comp_op = ... # type: int star_expr = ... # type: int expr = ... # type: int xor_expr = ... # type: int and_expr = ... # type: int shift_expr = ... # type: int arith_expr = ... # type: int term = ... # type: int factor = ... # type: int power = ... # type: int if sys.version_info >= (3, 5): atom_expr = ... # type: int atom = ... # type: int testlist_comp = ... # type: int trailer = ... # type: int subscriptlist = ... # type: int subscript = ... # type: int sliceop = ... # type: int exprlist = ... # type: int testlist = ... # type: int dictorsetmaker = ... # type: int classdef = ... # type: int arglist = ... # type: int argument = ... # type: int comp_iter = ... # type: int comp_for = ... # type: int comp_if = ... # type: int encoding_decl = ... # type: int yield_expr = ... # type: int if sys.version_info >= (3, 3): yield_arg = ... # type: int sym_name = ... # type: Dict[int, str] mypy-0.560/typeshed/stdlib/3/sys.pyi0000644€tŠÔÚ€2›s®0000001164513215007212023505 0ustar jukkaDROPBOX\Domain Users00000000000000# Stubs for sys # Ron Murawski # based on http://docs.python.org/3.2/library/sys.html from typing import ( List, Sequence, Any, Dict, Tuple, TextIO, overload, Optional, Union, TypeVar, Callable, Type, ) import sys from types import FrameType, TracebackType from mypy_extensions import NoReturn _T = TypeVar('_T') # ----- sys variables ----- abiflags = ... # type: str argv = ... # type: List[str] byteorder = ... # type: str builtin_module_names = ... # type: Sequence[str] # actually a tuple of strings copyright = ... # type: str # dllhandle = 0 # Windows only dont_write_bytecode: bool __displayhook__ = ... # type: Any # contains the original value of displayhook __excepthook__ = ... # type: Any # contains the original value of excepthook exec_prefix = ... # type: str executable = ... # type: str float_repr_style = ... # type: str hexversion: int last_type = ... # type: Any last_value = ... # type: Any last_traceback = ... # type: Any maxsize: int maxunicode: int meta_path = ... # type: List[Any] modules = ... # type: Dict[str, Any] path = ... # type: List[str] path_hooks = ... # type: List[Any] # TODO precise type; function, path to finder path_importer_cache = ... # type: Dict[str, Any] # TODO precise type platform = ... # type: str prefix = ... # type: str ps1 = ... # type: str ps2 = ... # type: str stdin = ... # type: TextIO stdout = ... # type: TextIO stderr = ... # type: TextIO __stdin__ = ... # type: TextIO __stdout__ = ... # type: TextIO __stderr__ = ... # type: TextIO # deprecated and removed in Python 3.3: subversion = ... # type: Tuple[str, str, str] tracebacklimit = 0 version = ... # type: str api_version = 0 warnoptions = ... # type: Any # Each entry is a tuple of the form (action, message, category, module, # lineno) # winver = '' # Windows only _xoptions = ... # type: Dict[Any, Any] flags = ... # type: _flags class _flags: debug = 0 division_warning = 0 inspect = 0 interactive = 0 optimize = 0 dont_write_bytecode = 0 no_user_site = 0 no_site = 0 ignore_environment = 0 verbose = 0 bytes_warning = 0 quiet = 0 hash_randomization = 0 float_info = ... # type: _float_info class _float_info: epsilon = 0.0 # DBL_EPSILON dig = 0 # DBL_DIG mant_dig = 0 # DBL_MANT_DIG max = 0.0 # DBL_MAX max_exp = 0 # DBL_MAX_EXP max_10_exp = 0 # DBL_MAX_10_EXP min = 0.0 # DBL_MIN min_exp = 0 # DBL_MIN_EXP min_10_exp = 0 # DBL_MIN_10_EXP radix = 0 # FLT_RADIX rounds = 0 # FLT_ROUNDS hash_info = ... # type: _hash_info class _hash_info: width = 0 modulus = 0 inf = 0 nan = 0 imag = 0 int_info = ... # type: _int_info class _int_info: bits_per_digit = 0 sizeof_digit = 0 class _version_info(Tuple[int, int, int, str, int]): major = 0 minor = 0 micro = 0 releaselevel = ... # type: str serial = 0 version_info = ... # type: _version_info def call_tracing(fn: Callable[..., _T], args: Any) -> _T: ... def _clear_type_cache() -> None: ... def _current_frames() -> Dict[int, Any]: ... def displayhook(value: Optional[int]) -> None: ... def excepthook(type_: Type[BaseException], value: BaseException, traceback: TracebackType) -> None: ... # TODO should be a union of tuple, see mypy#1178 def exc_info() -> Tuple[Optional[Type[BaseException]], Optional[BaseException], Optional[TracebackType]]: ... # sys.exit() accepts an optional argument of anything printable def exit(arg: object = ...) -> NoReturn: raise SystemExit() def getcheckinterval() -> int: ... # deprecated def getdefaultencoding() -> str: ... def getdlopenflags() -> int: ... # Unix only def getfilesystemencoding() -> str: ... def getrefcount(arg: Any) -> int: ... def getrecursionlimit() -> int: ... @overload def getsizeof(obj: object) -> int: ... @overload def getsizeof(obj: object, default: int) -> int: ... def getswitchinterval() -> float: ... @overload def _getframe() -> FrameType: ... @overload def _getframe(depth: int) -> FrameType: ... _ProfileFunc = Callable[[FrameType, str, Any], Any] def getprofile() -> Optional[_ProfileFunc]: ... def setprofile(profilefunc: Optional[_ProfileFunc]) -> None: ... _TraceFunc = Callable[[FrameType, str, Any], Optional[Callable[[FrameType, str, Any], Any]]] def gettrace() -> Optional[_TraceFunc]: ... def settrace(tracefunc: _TraceFunc) -> None: ... def getwindowsversion() -> Any: ... # Windows only, TODO return type def intern(string: str) -> str: ... if sys.version_info >= (3, 5): def is_finalizing() -> bool: ... def setcheckinterval(interval: int) -> None: ... # deprecated def setdlopenflags(n: int) -> None: ... # Linux only def setrecursionlimit(limit: int) -> None: ... def setswitchinterval(interval: float) -> None: ... def settscdump(on_flag: bool) -> None: ... def gettotalrefcount() -> int: ... # Debug builds only mypy-0.560/typeshed/stdlib/3/tempfile.pyi0000644€tŠÔÚ€2›s®0000000724413215007212024474 0ustar jukkaDROPBOX\Domain Users00000000000000# Stubs for tempfile # Ron Murawski # based on http://docs.python.org/3.3/library/tempfile.html import sys from types import TracebackType from typing import Any, AnyStr, Generic, IO, Optional, Tuple, Type # global variables TMP_MAX: int tempdir = ... # type: Optional[str] template = ... # type: str if sys.version_info >= (3, 5): def TemporaryFile( mode: str = ..., buffering: int = ..., encoding: str = ..., newline: str = ..., suffix: Optional[AnyStr]= ..., prefix: Optional[AnyStr] = ..., dir: Optional[AnyStr] = ... ) -> IO[Any]: ... def NamedTemporaryFile( mode: str = ..., buffering: int = ..., encoding: str = ..., newline: str = ..., suffix: Optional[AnyStr] = ..., prefix: Optional[AnyStr] = ..., dir: Optional[AnyStr] = ..., delete: bool =... ) -> IO[Any]: ... def SpooledTemporaryFile( max_size: int = ..., mode: str = ..., buffering: int = ..., encoding: str = ..., newline: str = ..., suffix: Optional[AnyStr] = ..., prefix: Optional[AnyStr] = ..., dir: Optional[AnyStr] = ... ) -> IO[Any]: ... class TemporaryDirectory(Generic[AnyStr]): name = ... # type: str def __init__(self, suffix: Optional[AnyStr] = ..., prefix: Optional[AnyStr] = ..., dir: Optional[AnyStr] = ...) -> None: ... def cleanup(self) -> None: ... def __enter__(self) -> AnyStr: ... def __exit__(self, exc_type: Optional[Type[BaseException]], exc_val: Optional[BaseException], exc_tb: Optional[TracebackType]) -> bool: ... def mkstemp(suffix: Optional[AnyStr] = ..., prefix: Optional[AnyStr] = ..., dir: Optional[AnyStr] = ..., text: bool = ...) -> Tuple[int, AnyStr]: ... def mkdtemp(suffix: Optional[AnyStr] = ..., prefix: Optional[AnyStr] = ..., dir: Optional[str] = ...) -> AnyStr: ... def mktemp(suffix: Optional[AnyStr] = ..., prefix: Optional[AnyStr] = ..., dir: Optional[AnyStr] = ...) -> AnyStr: ... def gettempdirb() -> bytes: ... def gettempprefixb() -> bytes: ... else: def TemporaryFile( mode: str = ..., buffering: int = ..., encoding: str = ..., newline: str = ..., suffix: str = ..., prefix: str = ..., dir: Optional[str] = ... ) -> IO[Any]: ... def NamedTemporaryFile( mode: str = ..., buffering: int = ..., encoding: str = ..., newline: str = ..., suffix: str = ..., prefix: str = ..., dir: Optional[str] = ..., delete: bool =... ) -> IO[Any]: ... def SpooledTemporaryFile( max_size: int = ..., mode: str = ..., buffering: int = ..., encoding: str = ..., newline: str = ..., suffix: str = ..., prefix: str = ..., dir: Optional[str] = ... ) -> IO[Any]: ... class TemporaryDirectory: name = ... # type: str def __init__(self, suffix: str = ..., prefix: str = ..., dir: Optional[str] = ...) -> None: ... def cleanup(self) -> None: ... def __enter__(self) -> str: ... def __exit__(self, exc_type: Optional[Type[BaseException]], exc_val: Optional[BaseException], exc_tb: Optional[TracebackType]) -> bool: ... def mkstemp(suffix: str = ..., prefix: str = ..., dir: Optional[str] = ..., text: bool = ...) -> Tuple[int, str]: ... def mkdtemp(suffix: str = ..., prefix: str = ..., dir: Optional[str] = ...) -> str: ... def mktemp(suffix: str = ..., prefix: str = ..., dir: Optional[str] = ...) -> str: ... def gettempdir() -> str: ... def gettempprefix() -> str: ... mypy-0.560/typeshed/stdlib/3/textwrap.pyi0000644€tŠÔÚ€2›s®0000000660113215007212024541 0ustar jukkaDROPBOX\Domain Users00000000000000from typing import Callable, List, Optional, Dict, Pattern class TextWrapper: width: int = ... initial_indent: str = ... subsequent_indent: str = ... expand_tabs: bool = ... replace_whitespace: bool = ... fix_sentence_endings: bool = ... drop_whitespace: bool = ... break_long_words: bool = ... break_on_hyphens: bool = ... tabsize: int = ... max_lines: Optional[int] = ... placeholder: str = ... # Attributes not present in documentation sentence_end_re: Pattern[str] = ... wordsep_re: Pattern[str] = ... wordsep_simple_re: Pattern[str] = ... whitespace_trans: str = ... unicode_whitespace_trans: Dict[int, int] = ... uspace: int = ... x: int = ... def __init__( self, width: int = ..., initial_indent: str = ..., subsequent_indent: str = ..., expand_tabs: bool = ..., replace_whitespace: bool = ..., fix_sentence_endings: bool = ..., break_long_words: bool = ..., drop_whitespace: bool = ..., break_on_hyphens: bool = ..., tabsize: int = ..., *, max_lines: Optional[int] = ..., placeholder: str = ...) -> None: ... # Private methods *are* part of the documented API for subclasses. def _munge_whitespace(self, text: str) -> str: ... def _split(self, text: str) -> List[str]: ... def _fix_sentence_endings(self, chunks: List[str]) -> None: ... def _handle_long_word(self, reversed_chunks: List[str], cur_line: List[str], cur_len: int, width: int) -> None: ... def _wrap_chunks(self, chunks: List[str]) -> List[str]: ... def _split_chunks(self, text: str) -> List[str]: ... def wrap(self, text: str) -> List[str]: ... def fill(self, text: str) -> str: ... def wrap( text: str = ..., width: int = ..., *, initial_indent: str = ..., subsequent_indent: str = ..., expand_tabs: bool = ..., tabsize: int = ..., replace_whitespace: bool = ..., fix_sentence_endings: bool = ..., break_long_words: bool = ..., break_on_hyphens: bool = ..., drop_whitespace: bool = ..., max_lines: int = ..., placeholder: str = ... ) -> List[str]: ... def fill( text: str, width: int = ..., *, initial_indent: str = ..., subsequent_indent: str = ..., expand_tabs: bool = ..., tabsize: int = ..., replace_whitespace: bool = ..., fix_sentence_endings: bool = ..., break_long_words: bool = ..., break_on_hyphens: bool = ..., drop_whitespace: bool = ..., max_lines: int = ..., placeholder: str = ... ) -> str: ... def shorten( text: str, width: int, *, initial_indent: str = ..., subsequent_indent: str = ..., expand_tabs: bool = ..., tabsize: int = ..., replace_whitespace: bool = ..., fix_sentence_endings: bool = ..., break_long_words: bool = ..., break_on_hyphens: bool = ..., drop_whitespace: bool = ..., # Omit `max_lines: int = None`, it is forced to 1 here. placeholder: str = ... ) -> str: ... def dedent(text: str) -> str: ... def indent(text: str, prefix: str, predicate: Callable[[str], bool] = ...) -> str: ... mypy-0.560/typeshed/stdlib/3/time.pyi0000644€tŠÔÚ€2›s®0000000671013215007212023622 0ustar jukkaDROPBOX\Domain Users00000000000000# Stubs for time # Ron Murawski # based on: http://docs.python.org/3.3/library/time.html#module-time # see: http://nullege.com/codes/search?cq=time import sys from typing import Any, NamedTuple, Tuple, Union from types import SimpleNamespace TimeTuple = Tuple[int, int, int, int, int, int, int, int, int] # ----- variables and constants ----- accept2dyear = False altzone = 0 daylight = 0 timezone = 0 tzname = ... # type: Tuple[str, str] if sys.version_info >= (3, 3) and sys.platform != 'win32': CLOCK_HIGHRES = 0 # Solaris only CLOCK_MONOTONIC = 0 # Unix only CLOCK_MONOTONIC_RAW = 0 # Linux 2.6.28 or later CLOCK_PROCESS_CPUTIME_ID = 0 # Unix only CLOCK_REALTIME = 0 # Unix only CLOCK_THREAD_CPUTIME_ID = 0 # Unix only if sys.version_info >= (3, 3): class struct_time( NamedTuple( '_struct_time', [('tm_year', int), ('tm_mon', int), ('tm_mday', int), ('tm_hour', int), ('tm_min', int), ('tm_sec', int), ('tm_wday', int), ('tm_yday', int), ('tm_isdst', int), ('tm_zone', str), ('tm_gmtoff', int)] ) ): def __init__( self, o: Union[ Tuple[int, int, int, int, int, int, int, int, int], Tuple[int, int, int, int, int, int, int, int, int, str], Tuple[int, int, int, int, int, int, int, int, int, str, int] ], _arg: Any = ..., ) -> None: ... def __new__( cls, o: Union[ Tuple[int, int, int, int, int, int, int, int, int], Tuple[int, int, int, int, int, int, int, int, int, str], Tuple[int, int, int, int, int, int, int, int, int, str, int] ], _arg: Any = ..., ) -> struct_time: ... else: class struct_time( NamedTuple( '_struct_time', [('tm_year', int), ('tm_mon', int), ('tm_mday', int), ('tm_hour', int), ('tm_min', int), ('tm_sec', int), ('tm_wday', int), ('tm_yday', int), ('tm_isdst', int)] ) ): def __init__(self, o: TimeTuple, _arg: Any = ...) -> None: ... def __new__(cls, o: TimeTuple, _arg: Any = ...) -> struct_time: ... # ----- functions ----- def asctime(t: Union[TimeTuple, struct_time, None] = ...) -> str: ... # return current time def clock() -> float: ... def ctime(secs: Union[float, None] = ...) -> str: ... # return current time def gmtime(secs: Union[float, None] = ...) -> struct_time: ... # return current time def localtime(secs: Union[float, None] = ...) -> struct_time: ... # return current time def mktime(t: Union[TimeTuple, struct_time]) -> float: ... def sleep(secs: Union[int, float]) -> None: ... def strftime(format: str, t: Union[TimeTuple, struct_time, None] = ...) -> str: ... # return current time def strptime(string: str, format: str = ...) -> struct_time: ... def time() -> float: ... if sys.platform != 'win32': def tzset() -> None: ... # Unix only if sys.version_info >= (3, 3): def get_clock_info(name: str) -> SimpleNamespace: ... def monotonic() -> float: ... def perf_counter() -> float: ... def process_time() -> float: ... if sys.platform != 'win32': def clock_getres(clk_id: int) -> float: ... # Unix only def clock_gettime(clk_id: int) -> float: ... # Unix only def clock_settime(clk_id: int, time: float) -> None: ... # Unix only mypy-0.560/typeshed/stdlib/3/tkinter/0000755€tŠÔÚ€2›s®0000000000013215007244023622 5ustar jukkaDROPBOX\Domain Users00000000000000mypy-0.560/typeshed/stdlib/3/tkinter/__init__.pyi0000644€tŠÔÚ€2›s®0000005606313215007212026111 0ustar jukkaDROPBOX\Domain Users00000000000000from typing import Any from tkinter.constants import * # noqa: F403 TclError = ... # type: Any wantobjects = ... # type: Any TkVersion = ... # type: Any TclVersion = ... # type: Any READABLE = ... # type: Any WRITABLE = ... # type: Any EXCEPTION = ... # type: Any class Event: ... def NoDefaultRoot(): ... class Variable: def __init__(self, master=None, value=None, name=None): ... def __del__(self): ... def set(self, value): ... initialize = ... # type: Any def get(self): ... def trace_variable(self, mode, callback): ... trace = ... # type: Any def trace_vdelete(self, mode, cbname): ... def trace_vinfo(self): ... def __eq__(self, other): ... class StringVar(Variable): def __init__(self, master=None, value=None, name=None): ... def get(self): ... class IntVar(Variable): def __init__(self, master=None, value=None, name=None): ... def get(self): ... class DoubleVar(Variable): def __init__(self, master=None, value=None, name=None): ... def get(self): ... class BooleanVar(Variable): def __init__(self, master=None, value=None, name=None): ... def set(self, value): ... initialize = ... # type: Any def get(self): ... def mainloop(n=0): ... getint = ... # type: Any getdouble = ... # type: Any def getboolean(s): ... class Misc: def destroy(self): ... def deletecommand(self, name): ... def tk_strictMotif(self, boolean=None): ... def tk_bisque(self): ... def tk_setPalette(self, *args, **kw): ... def tk_menuBar(self, *args): ... def wait_variable(self, name: str = ...): ... waitvar = ... # type: Any def wait_window(self, window=None): ... def wait_visibility(self, window=None): ... def setvar(self, name: str = ..., value: str = ...): ... def getvar(self, name: str = ...): ... def getint(self, s): ... def getdouble(self, s): ... def getboolean(self, s): ... def focus_set(self): ... focus = ... # type: Any def focus_force(self): ... def focus_get(self): ... def focus_displayof(self): ... def focus_lastfor(self): ... def tk_focusFollowsMouse(self): ... def tk_focusNext(self): ... def tk_focusPrev(self): ... def after(self, ms, func=None, *args): ... def after_idle(self, func, *args): ... def after_cancel(self, id): ... def bell(self, displayof=0): ... def clipboard_get(self, **kw): ... def clipboard_clear(self, **kw): ... def clipboard_append(self, string, **kw): ... def grab_current(self): ... def grab_release(self): ... def grab_set(self): ... def grab_set_global(self): ... def grab_status(self): ... def option_add(self, pattern, value, priority=None): ... def option_clear(self): ... def option_get(self, name, className): ... def option_readfile(self, fileName, priority=None): ... def selection_clear(self, **kw): ... def selection_get(self, **kw): ... def selection_handle(self, command, **kw): ... def selection_own(self, **kw): ... def selection_own_get(self, **kw): ... def send(self, interp, cmd, *args): ... def lower(self, belowThis=None): ... def tkraise(self, aboveThis=None): ... lift = ... # type: Any def winfo_atom(self, name, displayof=0): ... def winfo_atomname(self, id, displayof=0): ... def winfo_cells(self): ... def winfo_children(self): ... def winfo_class(self): ... def winfo_colormapfull(self): ... def winfo_containing(self, rootX, rootY, displayof=0): ... def winfo_depth(self): ... def winfo_exists(self): ... def winfo_fpixels(self, number): ... def winfo_geometry(self): ... def winfo_height(self): ... def winfo_id(self): ... def winfo_interps(self, displayof=0): ... def winfo_ismapped(self): ... def winfo_manager(self): ... def winfo_name(self): ... def winfo_parent(self): ... def winfo_pathname(self, id, displayof=0): ... def winfo_pixels(self, number): ... def winfo_pointerx(self): ... def winfo_pointerxy(self): ... def winfo_pointery(self): ... def winfo_reqheight(self): ... def winfo_reqwidth(self): ... def winfo_rgb(self, color): ... def winfo_rootx(self): ... def winfo_rooty(self): ... def winfo_screen(self): ... def winfo_screencells(self): ... def winfo_screendepth(self): ... def winfo_screenheight(self): ... def winfo_screenmmheight(self): ... def winfo_screenmmwidth(self): ... def winfo_screenvisual(self): ... def winfo_screenwidth(self): ... def winfo_server(self): ... def winfo_toplevel(self): ... def winfo_viewable(self): ... def winfo_visual(self): ... def winfo_visualid(self): ... def winfo_visualsavailable(self, includeids=0): ... def winfo_vrootheight(self): ... def winfo_vrootwidth(self): ... def winfo_vrootx(self): ... def winfo_vrooty(self): ... def winfo_width(self): ... def winfo_x(self): ... def winfo_y(self): ... def update(self): ... def update_idletasks(self): ... def bindtags(self, tagList=None): ... def bind(self, sequence=None, func=None, add=None): ... def unbind(self, sequence, funcid=None): ... def bind_all(self, sequence=None, func=None, add=None): ... def unbind_all(self, sequence): ... def bind_class(self, className, sequence=None, func=None, add=None): ... def unbind_class(self, className, sequence): ... def mainloop(self, n=0): ... def quit(self): ... def nametowidget(self, name): ... register = ... # type: Any def configure(self, cnf=None, **kw): ... config = ... # type: Any def cget(self, key): ... __getitem__ = ... # type: Any def __setitem__(self, key, value): ... def keys(self): ... def pack_propagate(self, flag=...): ... propagate = ... # type: Any def pack_slaves(self): ... slaves = ... # type: Any def place_slaves(self): ... def grid_anchor(self, anchor=None): ... anchor = ... # type: Any def grid_bbox(self, column=None, row=None, col2=None, row2=None): ... bbox = ... # type: Any def grid_columnconfigure(self, index, cnf=..., **kw): ... columnconfigure = ... # type: Any def grid_location(self, x, y): ... def grid_propagate(self, flag=...): ... def grid_rowconfigure(self, index, cnf=..., **kw): ... rowconfigure = ... # type: Any def grid_size(self): ... size = ... # type: Any def grid_slaves(self, row=None, column=None): ... def event_add(self, virtual, *sequences): ... def event_delete(self, virtual, *sequences): ... def event_generate(self, sequence, **kw): ... def event_info(self, virtual=None): ... def image_names(self): ... def image_types(self): ... class CallWrapper: func = ... # type: Any subst = ... # type: Any widget = ... # type: Any def __init__(self, func, subst, widget): ... def __call__(self, *args): ... class XView: def xview(self, *args): ... def xview_moveto(self, fraction): ... def xview_scroll(self, number, what): ... class YView: def yview(self, *args): ... def yview_moveto(self, fraction): ... def yview_scroll(self, number, what): ... class Wm: def wm_aspect(self, minNumer=None, minDenom=None, maxNumer=None, maxDenom=None): ... aspect = ... # type: Any def wm_attributes(self, *args): ... attributes = ... # type: Any def wm_client(self, name=None): ... client = ... # type: Any def wm_colormapwindows(self, *wlist): ... colormapwindows = ... # type: Any def wm_command(self, value=None): ... command = ... # type: Any def wm_deiconify(self): ... deiconify = ... # type: Any def wm_focusmodel(self, model=None): ... focusmodel = ... # type: Any def wm_forget(self, window): ... forget = ... # type: Any def wm_frame(self): ... frame = ... # type: Any def wm_geometry(self, newGeometry=None): ... geometry = ... # type: Any def wm_grid(self, baseWidth=None, baseHeight=None, widthInc=None, heightInc=None): ... grid = ... # type: Any def wm_group(self, pathName=None): ... group = ... # type: Any def wm_iconbitmap(self, bitmap=None, default=None): ... iconbitmap = ... # type: Any def wm_iconify(self): ... iconify = ... # type: Any def wm_iconmask(self, bitmap=None): ... iconmask = ... # type: Any def wm_iconname(self, newName=None): ... iconname = ... # type: Any def wm_iconphoto(self, default=False, *args): ... iconphoto = ... # type: Any def wm_iconposition(self, x=None, y=None): ... iconposition = ... # type: Any def wm_iconwindow(self, pathName=None): ... iconwindow = ... # type: Any def wm_manage(self, widget): ... manage = ... # type: Any def wm_maxsize(self, width=None, height=None): ... maxsize = ... # type: Any def wm_minsize(self, width=None, height=None): ... minsize = ... # type: Any def wm_overrideredirect(self, boolean=None): ... overrideredirect = ... # type: Any def wm_positionfrom(self, who=None): ... positionfrom = ... # type: Any def wm_protocol(self, name=None, func=None): ... protocol = ... # type: Any def wm_resizable(self, width=None, height=None): ... resizable = ... # type: Any def wm_sizefrom(self, who=None): ... sizefrom = ... # type: Any def wm_state(self, newstate=None): ... state = ... # type: Any def wm_title(self, string=None): ... title = ... # type: Any def wm_transient(self, master=None): ... transient = ... # type: Any def wm_withdraw(self): ... withdraw = ... # type: Any class Tk(Misc, Wm): master = ... # type: Any children = ... # type: Any tk = ... # type: Any def __init__(self, screenName=None, baseName=None, className: str = ..., useTk=1, sync=0, use=None) -> None: ... def loadtk(self): ... def destroy(self): ... def readprofile(self, baseName, className): ... def report_callback_exception(self, exc, val, tb): ... def __getattr__(self, attr): ... def Tcl(screenName=None, baseName=None, className: str = ..., useTk=0): ... class Pack: def pack_configure(self, cnf=..., **kw): ... pack = ... # type: Any def pack_forget(self): ... forget = ... # type: Any def pack_info(self): ... info = ... # type: Any propagate = ... # type: Any slaves = ... # type: Any class Place: def place_configure(self, cnf=..., **kw): ... place = ... # type: Any def place_forget(self): ... forget = ... # type: Any def place_info(self): ... info = ... # type: Any slaves = ... # type: Any class Grid: def grid_configure(self, cnf=..., **kw): ... grid = ... # type: Any bbox = ... # type: Any columnconfigure = ... # type: Any def grid_forget(self): ... forget = ... # type: Any def grid_remove(self): ... def grid_info(self): ... info = ... # type: Any location = ... # type: Any propagate = ... # type: Any rowconfigure = ... # type: Any size = ... # type: Any slaves = ... # type: Any class BaseWidget(Misc): widgetName = ... # type: Any def __init__(self, master, widgetName, cnf=..., kw=..., extra=...): ... def destroy(self): ... class Widget(BaseWidget, Pack, Place, Grid): ... class Toplevel(BaseWidget, Wm): def __init__(self, master=None, cnf=..., **kw): ... class Button(Widget): def __init__(self, master=None, cnf=..., **kw): ... def flash(self): ... def invoke(self): ... class Canvas(Widget, XView, YView): def __init__(self, master=None, cnf=..., **kw): ... def addtag(self, *args): ... def addtag_above(self, newtag, tagOrId): ... def addtag_all(self, newtag): ... def addtag_below(self, newtag, tagOrId): ... def addtag_closest(self, newtag, x, y, halo=None, start=None): ... def addtag_enclosed(self, newtag, x1, y1, x2, y2): ... def addtag_overlapping(self, newtag, x1, y1, x2, y2): ... def addtag_withtag(self, newtag, tagOrId): ... def bbox(self, *args): ... def tag_unbind(self, tagOrId, sequence, funcid=None): ... def tag_bind(self, tagOrId, sequence=None, func=None, add=None): ... def canvasx(self, screenx, gridspacing=None): ... def canvasy(self, screeny, gridspacing=None): ... def coords(self, *args): ... def create_arc(self, *args, **kw): ... def create_bitmap(self, *args, **kw): ... def create_image(self, *args, **kw): ... def create_line(self, *args, **kw): ... def create_oval(self, *args, **kw): ... def create_polygon(self, *args, **kw): ... def create_rectangle(self, *args, **kw): ... def create_text(self, *args, **kw): ... def create_window(self, *args, **kw): ... def dchars(self, *args): ... def delete(self, *args): ... def dtag(self, *args): ... def find(self, *args): ... def find_above(self, tagOrId): ... def find_all(self): ... def find_below(self, tagOrId): ... def find_closest(self, x, y, halo=None, start=None): ... def find_enclosed(self, x1, y1, x2, y2): ... def find_overlapping(self, x1, y1, x2, y2): ... def find_withtag(self, tagOrId): ... def focus(self, *args): ... def gettags(self, *args): ... def icursor(self, *args): ... def index(self, *args): ... def insert(self, *args): ... def itemcget(self, tagOrId, option): ... def itemconfigure(self, tagOrId, cnf=None, **kw): ... itemconfig = ... # type: Any def tag_lower(self, *args): ... lower = ... # type: Any def move(self, *args): ... def postscript(self, cnf=..., **kw): ... def tag_raise(self, *args): ... lift = ... # type: Any def scale(self, *args): ... def scan_mark(self, x, y): ... def scan_dragto(self, x, y, gain=10): ... def select_adjust(self, tagOrId, index): ... def select_clear(self): ... def select_from(self, tagOrId, index): ... def select_item(self): ... def select_to(self, tagOrId, index): ... def type(self, tagOrId): ... class Checkbutton(Widget): def __init__(self, master=None, cnf=..., **kw): ... def deselect(self): ... def flash(self): ... def invoke(self): ... def select(self): ... def toggle(self): ... class Entry(Widget, XView): def __init__(self, master=None, cnf=..., **kw): ... def delete(self, first, last=None): ... def get(self): ... def icursor(self, index): ... def index(self, index): ... def insert(self, index, string): ... def scan_mark(self, x): ... def scan_dragto(self, x): ... def selection_adjust(self, index): ... select_adjust = ... # type: Any def selection_clear(self): ... select_clear = ... # type: Any def selection_from(self, index): ... select_from = ... # type: Any def selection_present(self): ... select_present = ... # type: Any def selection_range(self, start, end): ... select_range = ... # type: Any def selection_to(self, index): ... select_to = ... # type: Any class Frame(Widget): def __init__(self, master=None, cnf=..., **kw): ... class Label(Widget): def __init__(self, master=None, cnf=..., **kw): ... class Listbox(Widget, XView, YView): def __init__(self, master=None, cnf=..., **kw): ... def activate(self, index): ... def bbox(self, index): ... def curselection(self): ... def delete(self, first, last=None): ... def get(self, first, last=None): ... def index(self, index): ... def insert(self, index, *elements): ... def nearest(self, y): ... def scan_mark(self, x, y): ... def scan_dragto(self, x, y): ... def see(self, index): ... def selection_anchor(self, index): ... select_anchor = ... # type: Any def selection_clear(self, first, last=None): ... select_clear = ... # type: Any def selection_includes(self, index): ... select_includes = ... # type: Any def selection_set(self, first, last=None): ... select_set = ... # type: Any def size(self): ... def itemcget(self, index, option): ... def itemconfigure(self, index, cnf=None, **kw): ... itemconfig = ... # type: Any class Menu(Widget): def __init__(self, master=None, cnf=..., **kw): ... def tk_popup(self, x, y, entry: str = ...): ... def tk_bindForTraversal(self): ... def activate(self, index): ... def add(self, itemType, cnf=..., **kw): ... def add_cascade(self, cnf=..., **kw): ... def add_checkbutton(self, cnf=..., **kw): ... def add_command(self, cnf=..., **kw): ... def add_radiobutton(self, cnf=..., **kw): ... def add_separator(self, cnf=..., **kw): ... def insert(self, index, itemType, cnf=..., **kw): ... def insert_cascade(self, index, cnf=..., **kw): ... def insert_checkbutton(self, index, cnf=..., **kw): ... def insert_command(self, index, cnf=..., **kw): ... def insert_radiobutton(self, index, cnf=..., **kw): ... def insert_separator(self, index, cnf=..., **kw): ... def delete(self, index1, index2=None): ... def entrycget(self, index, option): ... def entryconfigure(self, index, cnf=None, **kw): ... entryconfig = ... # type: Any def index(self, index): ... def invoke(self, index): ... def post(self, x, y): ... def type(self, index): ... def unpost(self): ... def xposition(self, index): ... def yposition(self, index): ... class Menubutton(Widget): def __init__(self, master=None, cnf=..., **kw): ... class Message(Widget): def __init__(self, master=None, cnf=..., **kw): ... class Radiobutton(Widget): def __init__(self, master=None, cnf=..., **kw): ... def deselect(self): ... def flash(self): ... def invoke(self): ... def select(self): ... class Scale(Widget): def __init__(self, master=None, cnf=..., **kw): ... def get(self): ... def set(self, value): ... def coords(self, value=None): ... def identify(self, x, y): ... class Scrollbar(Widget): def __init__(self, master=None, cnf=..., **kw): ... def activate(self, index=None): ... def delta(self, deltax, deltay): ... def fraction(self, x, y): ... def identify(self, x, y): ... def get(self): ... def set(self, first, last): ... class Text(Widget, XView, YView): def __init__(self, master=None, cnf=..., **kw): ... def bbox(self, index): ... def compare(self, index1, op, index2): ... def count(self, index1, index2, *args): ... def debug(self, boolean=None): ... def delete(self, index1, index2=None): ... def dlineinfo(self, index): ... def dump(self, index1, index2=None, command=None, **kw): ... def edit(self, *args): ... def edit_modified(self, arg=None): ... def edit_redo(self): ... def edit_reset(self): ... def edit_separator(self): ... def edit_undo(self): ... def get(self, index1, index2=None): ... def image_cget(self, index, option): ... def image_configure(self, index, cnf=None, **kw): ... def image_create(self, index, cnf=..., **kw): ... def image_names(self): ... def index(self, index): ... def insert(self, index, chars, *args): ... def mark_gravity(self, markName, direction=None): ... def mark_names(self): ... def mark_set(self, markName, index): ... def mark_unset(self, *markNames): ... def mark_next(self, index): ... def mark_previous(self, index): ... def peer_create(self, newPathName, cnf=..., **kw): ... def peer_names(self): ... def replace(self, index1, index2, chars, *args): ... def scan_mark(self, x, y): ... def scan_dragto(self, x, y): ... def search(self, pattern, index, stopindex=None, forwards=None, backwards=None, exact=None, regexp=None, nocase=None, count=None, elide=None): ... def see(self, index): ... def tag_add(self, tagName, index1, *args): ... def tag_unbind(self, tagName, sequence, funcid=None): ... def tag_bind(self, tagName, sequence, func, add=None): ... def tag_cget(self, tagName, option): ... def tag_configure(self, tagName, cnf=None, **kw): ... tag_config = ... # type: Any def tag_delete(self, *tagNames): ... def tag_lower(self, tagName, belowThis=None): ... def tag_names(self, index=None): ... def tag_nextrange(self, tagName, index1, index2=None): ... def tag_prevrange(self, tagName, index1, index2=None): ... def tag_raise(self, tagName, aboveThis=None): ... def tag_ranges(self, tagName): ... def tag_remove(self, tagName, index1, index2=None): ... def window_cget(self, index, option): ... def window_configure(self, index, cnf=None, **kw): ... window_config = ... # type: Any def window_create(self, index, cnf=..., **kw): ... def window_names(self): ... def yview_pickplace(self, *what): ... class _setit: def __init__(self, var, value, callback=None): ... def __call__(self, *args): ... class OptionMenu(Menubutton): widgetName = ... # type: Any menuname = ... # type: Any def __init__(self, master, variable, value, *values, **kwargs): ... def __getitem__(self, name): ... def destroy(self): ... class Image: name = ... # type: Any tk = ... # type: Any def __init__(self, imgtype, name=None, cnf=..., master=None, **kw): ... def __del__(self): ... def __setitem__(self, key, value): ... def __getitem__(self, key): ... def configure(self, **kw): ... config = ... # type: Any def height(self): ... def type(self): ... def width(self): ... class PhotoImage(Image): def __init__(self, name=None, cnf=..., master=None, **kw): ... def blank(self): ... def cget(self, option): ... def __getitem__(self, key): ... def copy(self): ... def zoom(self, x, y: str = ...): ... def subsample(self, x, y: str = ...): ... def get(self, x, y): ... def put(self, data, to=None): ... def write(self, filename, format=None, from_coords=None): ... class BitmapImage(Image): def __init__(self, name=None, cnf=..., master=None, **kw): ... def image_names(): ... def image_types(): ... class Spinbox(Widget, XView): def __init__(self, master=None, cnf=..., **kw): ... def bbox(self, index): ... def delete(self, first, last=None): ... def get(self): ... def icursor(self, index): ... def identify(self, x, y): ... def index(self, index): ... def insert(self, index, s): ... def invoke(self, element): ... def scan(self, *args): ... def scan_mark(self, x): ... def scan_dragto(self, x): ... def selection(self, *args): ... def selection_adjust(self, index): ... def selection_clear(self): ... def selection_element(self, element=None): ... class LabelFrame(Widget): def __init__(self, master=None, cnf=..., **kw): ... class PanedWindow(Widget): def __init__(self, master=None, cnf=..., **kw): ... def add(self, child, **kw): ... def remove(self, child): ... forget = ... # type: Any def identify(self, x, y): ... def proxy(self, *args): ... def proxy_coord(self): ... def proxy_forget(self): ... def proxy_place(self, x, y): ... def sash(self, *args): ... def sash_coord(self, index): ... def sash_mark(self, index): ... def sash_place(self, index, x, y): ... def panecget(self, child, option): ... def paneconfigure(self, tagOrId, cnf=None, **kw): ... paneconfig = ... # type: Any def panes(self): ... mypy-0.560/typeshed/stdlib/3/tkinter/constants.pyi0000644€tŠÔÚ€2›s®0000000364613215007212026365 0ustar jukkaDROPBOX\Domain Users00000000000000from typing import Any NO = ... # type: Any YES = ... # type: Any TRUE = ... # type: Any FALSE = ... # type: Any ON = ... # type: Any OFF = ... # type: Any N = ... # type: Any S = ... # type: Any W = ... # type: Any E = ... # type: Any NW = ... # type: Any SW = ... # type: Any NE = ... # type: Any SE = ... # type: Any NS = ... # type: Any EW = ... # type: Any NSEW = ... # type: Any CENTER = ... # type: Any NONE = ... # type: Any X = ... # type: Any Y = ... # type: Any BOTH = ... # type: Any LEFT = ... # type: Any TOP = ... # type: Any RIGHT = ... # type: Any BOTTOM = ... # type: Any RAISED = ... # type: Any SUNKEN = ... # type: Any FLAT = ... # type: Any RIDGE = ... # type: Any GROOVE = ... # type: Any SOLID = ... # type: Any HORIZONTAL = ... # type: Any VERTICAL = ... # type: Any NUMERIC = ... # type: Any CHAR = ... # type: Any WORD = ... # type: Any BASELINE = ... # type: Any INSIDE = ... # type: Any OUTSIDE = ... # type: Any SEL = ... # type: Any SEL_FIRST = ... # type: Any SEL_LAST = ... # type: Any END = ... # type: Any INSERT = ... # type: Any CURRENT = ... # type: Any ANCHOR = ... # type: Any ALL = ... # type: Any NORMAL = ... # type: Any DISABLED = ... # type: Any ACTIVE = ... # type: Any HIDDEN = ... # type: Any CASCADE = ... # type: Any CHECKBUTTON = ... # type: Any COMMAND = ... # type: Any RADIOBUTTON = ... # type: Any SEPARATOR = ... # type: Any SINGLE = ... # type: Any BROWSE = ... # type: Any MULTIPLE = ... # type: Any EXTENDED = ... # type: Any DOTBOX = ... # type: Any UNDERLINE = ... # type: Any PIESLICE = ... # type: Any CHORD = ... # type: Any ARC = ... # type: Any FIRST = ... # type: Any LAST = ... # type: Any BUTT = ... # type: Any PROJECTING = ... # type: Any ROUND = ... # type: Any BEVEL = ... # type: Any MITER = ... # type: Any MOVETO = ... # type: Any SCROLL = ... # type: Any UNITS = ... # type: Any PAGES = ... # type: Any mypy-0.560/typeshed/stdlib/3/tkinter/ttk.pyi0000644€tŠÔÚ€2›s®0000001211213215007212025137 0ustar jukkaDROPBOX\Domain Users00000000000000from typing import Any import tkinter def tclobjs_to_py(adict): ... def setup_master(master=None): ... class Style: master = ... # type: Any tk = ... # type: Any def __init__(self, master=None): ... def configure(self, style, query_opt=None, **kw): ... def map(self, style, query_opt=None, **kw): ... def lookup(self, style, option, state=None, default=None): ... def layout(self, style, layoutspec=None): ... def element_create(self, elementname, etype, *args, **kw): ... def element_names(self): ... def element_options(self, elementname): ... def theme_create(self, themename, parent=None, settings=None): ... def theme_settings(self, themename, settings): ... def theme_names(self): ... def theme_use(self, themename=None): ... class Widget(tkinter.Widget): def __init__(self, master, widgetname, kw=None): ... def identify(self, x, y): ... def instate(self, statespec, callback=None, *args, **kw): ... def state(self, statespec=None): ... class Button(Widget): def __init__(self, master=None, **kw): ... def invoke(self): ... class Checkbutton(Widget): def __init__(self, master=None, **kw): ... def invoke(self): ... class Entry(Widget, tkinter.Entry): def __init__(self, master=None, widget=None, **kw): ... def bbox(self, index): ... def identify(self, x, y): ... def validate(self): ... class Combobox(Entry): def __init__(self, master=None, **kw): ... def current(self, newindex=None): ... def set(self, value): ... class Frame(Widget): def __init__(self, master=None, **kw): ... class Label(Widget): def __init__(self, master=None, **kw): ... class Labelframe(Widget): def __init__(self, master=None, **kw): ... LabelFrame = ... # type: Any class Menubutton(Widget): def __init__(self, master=None, **kw): ... class Notebook(Widget): def __init__(self, master=None, **kw): ... def add(self, child, **kw): ... def forget(self, tab_id): ... def hide(self, tab_id): ... def identify(self, x, y): ... def index(self, tab_id): ... def insert(self, pos, child, **kw): ... def select(self, tab_id=None): ... def tab(self, tab_id, option=None, **kw): ... def tabs(self): ... def enable_traversal(self): ... class Panedwindow(Widget, tkinter.PanedWindow): def __init__(self, master=None, **kw): ... forget = ... # type: Any def insert(self, pos, child, **kw): ... def pane(self, pane, option=None, **kw): ... def sashpos(self, index, newpos=None): ... PanedWindow = ... # type: Any class Progressbar(Widget): def __init__(self, master=None, **kw): ... def start(self, interval=None): ... def step(self, amount=None): ... def stop(self): ... class Radiobutton(Widget): def __init__(self, master=None, **kw): ... def invoke(self): ... class Scale(Widget, tkinter.Scale): def __init__(self, master=None, **kw): ... def configure(self, cnf=None, **kw): ... def get(self, x=None, y=None): ... class Scrollbar(Widget, tkinter.Scrollbar): def __init__(self, master=None, **kw): ... class Separator(Widget): def __init__(self, master=None, **kw): ... class Sizegrip(Widget): def __init__(self, master=None, **kw): ... class Treeview(Widget, tkinter.XView, tkinter.YView): def __init__(self, master=None, **kw): ... def bbox(self, item, column=None): ... def get_children(self, item=None): ... def set_children(self, item, *newchildren): ... def column(self, column, option=None, **kw): ... def delete(self, *items): ... def detach(self, *items): ... def exists(self, item): ... def focus(self, item=None): ... def heading(self, column, option=None, **kw): ... def identify(self, component, x, y): ... def identify_row(self, y): ... def identify_column(self, x): ... def identify_region(self, x, y): ... def identify_element(self, x, y): ... def index(self, item): ... def insert(self, parent, index, iid=None, **kw): ... def item(self, item, option=None, **kw): ... def move(self, item, parent, index): ... reattach = ... # type: Any def next(self, item): ... def parent(self, item): ... def prev(self, item): ... def see(self, item): ... def selection(self, selop=None, items=None): ... def selection_set(self, items): ... def selection_add(self, items): ... def selection_remove(self, items): ... def selection_toggle(self, items): ... def set(self, item, column=None, value=None): ... def tag_bind(self, tagname, sequence=None, callback=None): ... def tag_configure(self, tagname, option=None, **kw): ... def tag_has(self, tagname, item=None): ... class LabeledScale(Frame): label = ... # type: Any scale = ... # type: Any def __init__(self, master=None, variable=None, from_=0, to=10, **kw): ... def destroy(self): ... value = ... # type: Any class OptionMenu(Menubutton): def __init__(self, master, variable, default=None, *values, **kwargs): ... def __getitem__(self, item): ... def set_menu(self, default=None, *values): ... def destroy(self): ... mypy-0.560/typeshed/stdlib/3/tokenize.pyi0000644€tŠÔÚ€2›s®0000000446613215007212024522 0ustar jukkaDROPBOX\Domain Users00000000000000from typing import Any, Callable, Generator, Iterable, List, NamedTuple, Optional, Union, Sequence, TextIO, Tuple from builtins import open as _builtin_open from token import * # noqa: F403 COMMENT = ... # type: int NL = ... # type: int ENCODING = ... # type: int _Position = Tuple[int, int] _TokenInfo = NamedTuple('TokenInfo', [ ('type', int), ('string', str), ('start', _Position), ('end', _Position), ('line', str) ]) class TokenInfo(_TokenInfo): @property def exact_type(self) -> int: ... # Backwards compatible tokens can be sequences of a shorter length too _Token = Union[TokenInfo, Sequence[Union[int, str, _Position]]] class TokenError(Exception): ... class StopTokenizing(Exception): ... class Untokenizer: tokens = ... # type: List[str] prev_row = ... # type: int prev_col = ... # type: int encoding = ... # type: Optional[str] def __init__(self) -> None: ... def add_whitespace(self, start: _Position) -> None: ... def untokenize(self, iterable: Iterable[_Token]) -> str: ... def compat(self, token: Sequence[Union[int, str]], iterable: Iterable[_Token]) -> None: ... def untokenize(iterable: Iterable[_Token]) -> Any: ... def detect_encoding(readline: Callable[[], bytes]) -> Tuple[str, Sequence[bytes]]: ... def tokenize(readline: Callable[[], bytes]) -> Generator[TokenInfo, None, None]: ... def generate_tokens(readline: Callable[[], str]) -> Generator[TokenInfo, None, None]: ... def open(filename: Union[str, bytes, int]) -> TextIO: ... # Names in __all__ with no definition: # AMPER # AMPEREQUAL # ASYNC # AT # ATEQUAL # AWAIT # CIRCUMFLEX # CIRCUMFLEXEQUAL # COLON # COMMA # DEDENT # DOT # DOUBLESLASH # DOUBLESLASHEQUAL # DOUBLESTAR # DOUBLESTAREQUAL # ELLIPSIS # ENDMARKER # EQEQUAL # EQUAL # ERRORTOKEN # GREATER # GREATEREQUAL # INDENT # ISEOF # ISNONTERMINAL # ISTERMINAL # LBRACE # LEFTSHIFT # LEFTSHIFTEQUAL # LESS # LESSEQUAL # LPAR # LSQB # MINEQUAL # MINUS # NAME # NEWLINE # NOTEQUAL # NT_OFFSET # NUMBER # N_TOKENS # OP # PERCENT # PERCENTEQUAL # PLUS # PLUSEQUAL # RARROW # RBRACE # RIGHTSHIFT # RIGHTSHIFTEQUAL # RPAR # RSQB # SEMI # SLASH # SLASHEQUAL # STAR # STAREQUAL # STRING # TILDE # VBAR # VBAREQUAL # tok_name mypy-0.560/typeshed/stdlib/3/types.pyi0000644€tŠÔÚ€2›s®0000001604613215007212024033 0ustar jukkaDROPBOX\Domain Users00000000000000# Stubs for types # Note, all classes "defined" here require special handling. # TODO parts of this should be conditional on version import sys from typing import ( Any, Awaitable, Callable, Dict, Generic, Iterator, Mapping, Optional, Tuple, TypeVar, Union, overload, Type ) # ModuleType is exported from this module, but for circular import # reasons exists in its own stub file (with ModuleSpec and Loader). from _importlib_modulespec import ModuleType as ModuleType # Exported _T = TypeVar('_T') _T_co = TypeVar('_T_co', covariant=True) _T_contra = TypeVar('_T_contra', contravariant=True) _KT = TypeVar('_KT') _VT = TypeVar('_VT') class _Cell: cell_contents = ... # type: Any class FunctionType: __closure__ = ... # type: Optional[Tuple[_Cell, ...]] __code__ = ... # type: CodeType __defaults__ = ... # type: Optional[Tuple[Any, ...]] __dict__ = ... # type: Dict[str, Any] __globals__ = ... # type: Dict[str, Any] __name__ = ... # type: str __qualname__ = ... # type: str __annotations__ = ... # type: Dict[str, Any] __kwdefaults__ = ... # type: Dict[str, Any] def __call__(self, *args: Any, **kwargs: Any) -> Any: ... def __get__(self, obj: Optional[object], type: Optional[type]) -> 'MethodType': ... LambdaType = FunctionType class CodeType: """Create a code object. Not for the faint of heart.""" co_argcount = ... # type: int co_kwonlyargcount = ... # type: int co_nlocals = ... # type: int co_stacksize = ... # type: int co_flags = ... # type: int co_code = ... # type: bytes co_consts = ... # type: Tuple[Any, ...] co_names = ... # type: Tuple[str, ...] co_varnames = ... # type: Tuple[str, ...] co_filename = ... # type: Optional[str] co_name = ... # type: str co_firstlineno = ... # type: int co_lnotab = ... # type: bytes co_freevars = ... # type: Tuple[str, ...] co_cellvars = ... # type: Tuple[str, ...] def __init__( self, argcount: int, kwonlyargcount: int, nlocals: int, stacksize: int, flags: int, codestring: bytes, constants: Tuple[Any, ...], names: Tuple[str, ...], varnames: Tuple[str, ...], filename: str, name: str, firstlineno: int, lnotab: bytes, freevars: Tuple[str, ...] = ..., cellvars: Tuple[str, ...] = ..., ) -> None: ... class MappingProxyType(Mapping[_KT, _VT], Generic[_KT, _VT]): def __init__(self, mapping: Mapping[_KT, _VT]) -> None: ... def __getitem__(self, k: _KT) -> _VT: ... def __iter__(self) -> Iterator[_KT]: ... def __len__(self) -> int: ... # TODO: use __getattr__ and __setattr__ instead of inheriting from Any, pending mypy#521. class SimpleNamespace(Any): ... # type: ignore class GeneratorType: gi_code = ... # type: CodeType gi_frame = ... # type: FrameType gi_running = ... # type: bool gi_yieldfrom = ... # type: Optional[GeneratorType] def __iter__(self) -> 'GeneratorType': ... def __next__(self) -> Any: ... def close(self) -> None: ... def send(self, arg: Any) -> Any: ... @overload def throw(self, val: BaseException) -> Any: ... @overload def throw(self, typ: type, val: BaseException = ..., tb: 'TracebackType' = ...) -> Any: ... if sys.version_info >= (3, 6): class AsyncGeneratorType(Generic[_T_co, _T_contra]): ag_await: Optional[Awaitable[Any]] ag_frame: FrameType ag_running: bool ag_code: CodeType def __aiter__(self) -> Awaitable[AsyncGeneratorType[_T_co, _T_contra]]: ... def __anext__(self) -> Awaitable[_T_co]: ... def asend(self, val: _T_contra) -> Awaitable[_T_co]: ... @overload def athrow(self, val: BaseException) -> Awaitable[_T_co]: ... @overload def athrow(self, typ: Type[BaseException], val: BaseException, tb: TracebackType = ...) -> Awaitable[_T_co]: ... def aclose(self) -> Awaitable[_T_co]: ... class CoroutineType: cr_await = ... # type: Optional[Any] cr_code = ... # type: CodeType cr_frame = ... # type: FrameType cr_running = ... # type: bool def close(self) -> None: ... def send(self, arg: Any) -> Any: ... @overload def throw(self, val: BaseException) -> Any: ... @overload def throw(self, typ: type, val: BaseException = ..., tb: 'TracebackType' = ...) -> Any: ... class _StaticFunctionType: """Fictional type to correct the type of MethodType.__func__. FunctionType is a descriptor, so mypy follows the descriptor protocol and converts MethodType.__func__ back to MethodType (the return type of FunctionType.__get__). But this is actually a special case; MethodType is implemented in C and its attribute access doesn't go through __getattribute__. By wrapping FunctionType in _StaticFunctionType, we get the right result; similar to wrapping a function in staticmethod() at runtime to prevent it being bound as a method. """ def __get__(self, obj: Optional[object], type: Optional[type]) -> 'FunctionType': ... class MethodType: __func__ = ... # type: _StaticFunctionType __self__ = ... # type: object __name__ = ... # type: str __qualname__ = ... # type: str def __init__(self, func: Callable, obj: object) -> None: ... def __call__(self, *args: Any, **kwargs: Any) -> Any: ... class BuiltinFunctionType: __self__ = ... # type: Union[object, ModuleType] __name__ = ... # type: str __qualname__ = ... # type: str def __call__(self, *args: Any, **kwargs: Any) -> Any: ... BuiltinMethodType = BuiltinFunctionType class TracebackType: tb_frame = ... # type: FrameType tb_lasti = ... # type: int tb_lineno = ... # type: int tb_next = ... # type: TracebackType class FrameType: f_back = ... # type: FrameType f_builtins = ... # type: Dict[str, Any] f_code = ... # type: CodeType f_globals = ... # type: Dict[str, Any] f_lasti = ... # type: int f_lineno = ... # type: int f_locals = ... # type: Dict[str, Any] f_trace = ... # type: Callable[[], None] def clear(self) -> None: ... class GetSetDescriptorType: __name__ = ... # type: str __objclass__ = ... # type: type def __get__(self, obj: Any, type: type = ...) -> Any: ... def __set__(self, obj: Any) -> None: ... def __delete__(self, obj: Any) -> None: ... class MemberDescriptorType: __name__ = ... # type: str __objclass__ = ... # type: type def __get__(self, obj: Any, type: type = ...) -> Any: ... def __set__(self, obj: Any) -> None: ... def __delete__(self, obj: Any) -> None: ... def new_class(name: str, bases: Tuple[type, ...] = ..., kwds: Dict[str, Any] = ..., exec_body: Callable[[Dict[str, Any]], None] = ...) -> type: ... def prepare_class(name: str, bases: Tuple[type, ...] = ..., kwds: Dict[str, Any] = ...) -> Tuple[type, Dict[str, Any], Dict[str, Any]]: ... # Actually a different type, but `property` is special and we want that too. DynamicClassAttribute = property def coroutine(f: Callable[..., Any]) -> CoroutineType: ... mypy-0.560/typeshed/stdlib/3/typing.pyi0000644€tŠÔÚ€2›s®0000004460313215007212024201 0ustar jukkaDROPBOX\Domain Users00000000000000# Stubs for typing import sys from abc import abstractmethod, ABCMeta from types import CodeType, FrameType, TracebackType import collections # Needed by aliases like DefaultDict, see mypy issue 2986 # Definitions of special type checking related constructs. Their definition # are not used, so their value does not matter. overload = object() Any = object() TypeVar = object() _promote = object() no_type_check = object() class _SpecialForm: def __getitem__(self, typeargs: Any) -> Any: ... Tuple: _SpecialForm = ... Generic: _SpecialForm = ... Protocol: _SpecialForm = ... Callable: _SpecialForm = ... Type: _SpecialForm = ... ClassVar: _SpecialForm = ... class GenericMeta(type): ... # Return type that indicates a function does not return. # This type is equivalent to the None type, but the no-op Union is necessary to # distinguish the None type from the None value. NoReturn = Union[None] # Type aliases and type constructors class TypeAlias: # Class for defining generic aliases for library types. def __init__(self, target_type: type) -> None: ... def __getitem__(self, typeargs: Any) -> Any: ... Union = TypeAlias(object) Optional = TypeAlias(object) List = TypeAlias(object) Dict = TypeAlias(object) DefaultDict = TypeAlias(object) Set = TypeAlias(object) FrozenSet = TypeAlias(object) Counter = TypeAlias(object) Deque = TypeAlias(object) if sys.version_info >= (3, 3): ChainMap = TypeAlias(object) # Predefined type variables. AnyStr = TypeVar('AnyStr', str, bytes) # Abstract base classes. # These type variables are used by the container types. _T = TypeVar('_T') _S = TypeVar('_S') _KT = TypeVar('_KT') # Key type. _VT = TypeVar('_VT') # Value type. _T_co = TypeVar('_T_co', covariant=True) # Any type covariant containers. _V_co = TypeVar('_V_co', covariant=True) # Any type covariant containers. _KT_co = TypeVar('_KT_co', covariant=True) # Key type covariant containers. _VT_co = TypeVar('_VT_co', covariant=True) # Value type covariant containers. _T_contra = TypeVar('_T_contra', contravariant=True) # Ditto contravariant. _TC = TypeVar('_TC', bound=Type[object]) def runtime(cls: _TC) -> _TC: ... @runtime class SupportsInt(Protocol, metaclass=ABCMeta): @abstractmethod def __int__(self) -> int: ... @runtime class SupportsFloat(Protocol, metaclass=ABCMeta): @abstractmethod def __float__(self) -> float: ... @runtime class SupportsComplex(Protocol, metaclass=ABCMeta): @abstractmethod def __complex__(self) -> complex: ... @runtime class SupportsBytes(Protocol, metaclass=ABCMeta): @abstractmethod def __bytes__(self) -> bytes: ... @runtime class SupportsAbs(Protocol[_T_co]): @abstractmethod def __abs__(self) -> _T_co: ... @runtime class SupportsRound(Protocol[_T_co]): @abstractmethod def __round__(self, ndigits: int = ...) -> _T_co: ... @runtime class Reversible(Protocol[_T_co]): @abstractmethod def __reversed__(self) -> Iterator[_T_co]: ... @runtime class Sized(Protocol, metaclass=ABCMeta): @abstractmethod def __len__(self) -> int: ... @runtime class Hashable(Protocol, metaclass=ABCMeta): # TODO: This is special, in that a subclass of a hashable class may not be hashable # (for example, list vs. object). It's not obvious how to represent this. This class # is currently mostly useless for static checking. @abstractmethod def __hash__(self) -> int: ... @runtime class Iterable(Protocol[_T_co]): @abstractmethod def __iter__(self) -> Iterator[_T_co]: ... @runtime class Iterator(Iterable[_T_co], Protocol[_T_co]): @abstractmethod def __next__(self) -> _T_co: ... def __iter__(self) -> 'Iterator[_T_co]': ... class Generator(Iterator[_T_co], Generic[_T_co, _T_contra, _V_co]): @abstractmethod def __next__(self) -> _T_co: ... @abstractmethod def send(self, value: _T_contra) -> _T_co: ... @abstractmethod def throw(self, typ: Type[BaseException], val: Optional[BaseException] = ..., tb: Optional[TracebackType] = ...) -> _T_co: ... @abstractmethod def close(self) -> None: ... @abstractmethod def __iter__(self) -> 'Generator[_T_co, _T_contra, _V_co]': ... gi_code = ... # type: CodeType gi_frame = ... # type: FrameType gi_running = ... # type: bool gi_yieldfrom = ... # type: Optional[Generator] # TODO: Several types should only be defined if sys.python_version >= (3, 5): # Awaitable, AsyncIterator, AsyncIterable, Coroutine, Collection. # See https: //github.com/python/typeshed/issues/655 for why this is not easy. @runtime class Awaitable(Protocol[_T_co]): @abstractmethod def __await__(self) -> Generator[Any, None, _T_co]: ... class Coroutine(Awaitable[_V_co], Generic[_T_co, _T_contra, _V_co]): @abstractmethod def send(self, value: _T_contra) -> _T_co: ... @abstractmethod def throw(self, typ: Type[BaseException], val: Optional[BaseException] = ..., tb: Optional[TracebackType] = ...) -> _T_co: ... @abstractmethod def close(self) -> None: ... # NOTE: This type does not exist in typing.py or PEP 484. # The parameters corrrespond to Generator, but the 4th is the original type. class AwaitableGenerator(Generator[_T_co, _T_contra, _V_co], Awaitable[_V_co], Generic[_T_co, _T_contra, _V_co, _S]): pass @runtime class AsyncIterable(Protocol[_T_co]): @abstractmethod def __aiter__(self) -> 'AsyncIterator[_T_co]': ... @runtime class AsyncIterator(AsyncIterable[_T_co], Protocol[_T_co]): @abstractmethod def __anext__(self) -> Awaitable[_T_co]: ... def __aiter__(self) -> 'AsyncIterator[_T_co]': ... if sys.version_info >= (3, 6): class AsyncGenerator(AsyncIterator[_T_co], Generic[_T_co, _T_contra]): @abstractmethod def __anext__(self) -> Awaitable[_T_co]: ... @abstractmethod def asend(self, value: _T_contra) -> Awaitable[_T_co]: ... @abstractmethod def athrow(self, typ: Type[BaseException], val: Optional[BaseException] = ..., tb: Any = ...) -> Awaitable[_T_co]: ... @abstractmethod def aclose(self) -> Awaitable[_T_co]: ... @abstractmethod def __aiter__(self) -> 'AsyncGenerator[_T_co, _T_contra]': ... ag_await = ... # type: Any ag_code = ... # type: CodeType ag_frame = ... # type: FrameType ag_running = ... # type: bool @runtime class Container(Protocol[_T_co]): @abstractmethod def __contains__(self, x: object) -> bool: ... if sys.version_info >= (3, 6): @runtime class Collection(Sized, Iterable[_T_co], Container[_T_co], Protocol[_T_co]): ... _Collection = Collection else: @runtime class _Collection(Sized, Iterable[_T_co], Container[_T_co], Protocol[_T_co]): ... class Sequence(_Collection[_T_co], Reversible[_T_co], Generic[_T_co]): @overload @abstractmethod def __getitem__(self, i: int) -> _T_co: ... @overload @abstractmethod def __getitem__(self, s: slice) -> Sequence[_T_co]: ... # Mixin methods if sys.version_info >= (3, 5): def index(self, x: Any, start: int = ..., end: int = ...) -> int: ... else: def index(self, x: Any) -> int: ... def count(self, x: Any) -> int: ... def __contains__(self, x: object) -> bool: ... def __iter__(self) -> Iterator[_T_co]: ... def __reversed__(self) -> Iterator[_T_co]: ... class MutableSequence(Sequence[_T], Generic[_T]): @abstractmethod def insert(self, index: int, object: _T) -> None: ... @overload @abstractmethod def __setitem__(self, i: int, o: _T) -> None: ... @overload @abstractmethod def __setitem__(self, s: slice, o: Iterable[_T]) -> None: ... @overload @abstractmethod def __delitem__(self, i: int) -> None: ... @overload @abstractmethod def __delitem__(self, i: slice) -> None: ... # Mixin methods def append(self, object: _T) -> None: ... def extend(self, iterable: Iterable[_T]) -> None: ... def reverse(self) -> None: ... def pop(self, index: int = ...) -> _T: ... def remove(self, object: _T) -> None: ... def __iadd__(self, x: Iterable[_T]) -> MutableSequence[_T]: ... class AbstractSet(_Collection[_T_co], Generic[_T_co]): @abstractmethod def __contains__(self, x: object) -> bool: ... # Mixin methods def __le__(self, s: AbstractSet[Any]) -> bool: ... def __lt__(self, s: AbstractSet[Any]) -> bool: ... def __gt__(self, s: AbstractSet[Any]) -> bool: ... def __ge__(self, s: AbstractSet[Any]) -> bool: ... def __and__(self, s: AbstractSet[Any]) -> AbstractSet[_T_co]: ... def __or__(self, s: AbstractSet[_T]) -> AbstractSet[Union[_T_co, _T]]: ... def __sub__(self, s: AbstractSet[Any]) -> AbstractSet[_T_co]: ... def __xor__(self, s: AbstractSet[_T]) -> AbstractSet[Union[_T_co, _T]]: ... # TODO: Argument can be a more general ABC? def isdisjoint(self, s: AbstractSet[Any]) -> bool: ... class MutableSet(AbstractSet[_T], Generic[_T]): @abstractmethod def add(self, x: _T) -> None: ... @abstractmethod def discard(self, x: _T) -> None: ... # Mixin methods def clear(self) -> None: ... def pop(self) -> _T: ... def remove(self, element: _T) -> None: ... def __ior__(self, s: AbstractSet[_S]) -> MutableSet[Union[_T, _S]]: ... def __iand__(self, s: AbstractSet[Any]) -> MutableSet[_T]: ... def __ixor__(self, s: AbstractSet[_S]) -> MutableSet[Union[_T, _S]]: ... def __isub__(self, s: AbstractSet[Any]) -> MutableSet[_T]: ... class MappingView(Sized): def __len__(self) -> int: ... class ItemsView(AbstractSet[Tuple[_KT_co, _VT_co]], MappingView, Generic[_KT_co, _VT_co]): def __contains__(self, o: object) -> bool: ... def __iter__(self) -> Iterator[Tuple[_KT_co, _VT_co]]: ... class KeysView(AbstractSet[_KT_co], MappingView, Generic[_KT_co]): def __contains__(self, o: object) -> bool: ... def __iter__(self) -> Iterator[_KT_co]: ... class ValuesView(MappingView, Iterable[_VT_co], Generic[_VT_co]): def __contains__(self, o: object) -> bool: ... def __iter__(self) -> Iterator[_VT_co]: ... @runtime class ContextManager(Protocol[_T_co]): def __enter__(self) -> _T_co: ... def __exit__(self, exc_type: Optional[Type[BaseException]], exc_value: Optional[BaseException], traceback: Optional[TracebackType]) -> Optional[bool]: ... if sys.version_info >= (3, 5): @runtime class AsyncContextManager(Protocol[_T_co]): def __aenter__(self) -> Awaitable[_T_co]: ... def __aexit__(self, exc_type: Optional[Type[BaseException]], exc_value: Optional[BaseException], traceback: Optional[TracebackType]) -> Awaitable[Optional[bool]]: ... class Mapping(_Collection[_KT], Generic[_KT, _VT_co]): # TODO: We wish the key type could also be covariant, but that doesn't work, # see discussion in https: //github.com/python/typing/pull/273. @abstractmethod def __getitem__(self, k: _KT) -> _VT_co: ... # Mixin methods @overload def get(self, k: _KT) -> Optional[_VT_co]: ... @overload def get(self, k: _KT, default: Union[_VT_co, _T]) -> Union[_VT_co, _T]: ... def items(self) -> AbstractSet[Tuple[_KT, _VT_co]]: ... def keys(self) -> AbstractSet[_KT]: ... def values(self) -> ValuesView[_VT_co]: ... def __contains__(self, o: object) -> bool: ... class MutableMapping(Mapping[_KT, _VT], Generic[_KT, _VT]): @abstractmethod def __setitem__(self, k: _KT, v: _VT) -> None: ... @abstractmethod def __delitem__(self, v: _KT) -> None: ... def clear(self) -> None: ... @overload def pop(self, k: _KT) -> _VT: ... @overload def pop(self, k: _KT, default: Union[_VT, _T] = ...) -> Union[_VT, _T]: ... def popitem(self) -> Tuple[_KT, _VT]: ... def setdefault(self, k: _KT, default: _VT = ...) -> _VT: ... # 'update' used to take a Union, but using overloading is better. # The second overloaded type here is a bit too general, because # Mapping[Tuple[_KT, _VT], W] is a subclass of Iterable[Tuple[_KT, _VT]], # but will always have the behavior of the first overloaded type # at runtime, leading to keys of a mix of types _KT and Tuple[_KT, _VT]. # We don't currently have any way of forcing all Mappings to use # the first overload, but by using overloading rather than a Union, # mypy will commit to using the first overload when the argument is # known to be a Mapping with unknown type parameters, which is closer # to the behavior we want. See mypy issue #1430. @overload def update(self, __m: Mapping[_KT, _VT], **kwargs: _VT) -> None: ... @overload def update(self, __m: Iterable[Tuple[_KT, _VT]], **kwargs: _VT) -> None: ... @overload def update(self, **kwargs: _VT) -> None: ... Text = str TYPE_CHECKING = True class IO(Iterator[AnyStr], Generic[AnyStr]): # TODO detach # TODO use abstract properties @property def mode(self) -> str: ... @property def name(self) -> str: ... @abstractmethod def close(self) -> None: ... @property def closed(self) -> bool: ... @abstractmethod def fileno(self) -> int: ... @abstractmethod def flush(self) -> None: ... @abstractmethod def isatty(self) -> bool: ... # TODO what if n is None? @abstractmethod def read(self, n: int = ...) -> AnyStr: ... @abstractmethod def readable(self) -> bool: ... @abstractmethod def readline(self, limit: int = ...) -> AnyStr: ... @abstractmethod def readlines(self, hint: int = ...) -> list[AnyStr]: ... @abstractmethod def seek(self, offset: int, whence: int = ...) -> int: ... @abstractmethod def seekable(self) -> bool: ... @abstractmethod def tell(self) -> int: ... @abstractmethod def truncate(self, size: Optional[int] = ...) -> int: ... @abstractmethod def writable(self) -> bool: ... # TODO buffer objects @abstractmethod def write(self, s: AnyStr) -> int: ... @abstractmethod def writelines(self, lines: Iterable[AnyStr]) -> None: ... @abstractmethod def __next__(self) -> AnyStr: ... @abstractmethod def __iter__(self) -> Iterator[AnyStr]: ... @abstractmethod def __enter__(self) -> 'IO[AnyStr]': ... @abstractmethod def __exit__(self, t: Optional[Type[BaseException]], value: Optional[BaseException], traceback: Optional[TracebackType]) -> bool: ... class BinaryIO(IO[bytes]): # TODO readinto # TODO read1? # TODO peek? @overload @abstractmethod def write(self, s: bytes) -> int: ... @overload @abstractmethod def write(self, s: bytearray) -> int: ... @abstractmethod def __enter__(self) -> BinaryIO: ... class TextIO(IO[str]): # TODO use abstractproperty @property def buffer(self) -> BinaryIO: ... @property def encoding(self) -> str: ... @property def errors(self) -> Optional[str]: ... @property def line_buffering(self) -> int: ... # int on PyPy, bool on CPython @property def newlines(self) -> Any: ... # None, str or tuple @abstractmethod def __enter__(self) -> TextIO: ... class ByteString(Sequence[int]): ... class Match(Generic[AnyStr]): pos = 0 endpos = 0 lastindex = 0 lastgroup = ... # type: AnyStr string = ... # type: AnyStr # The regular expression object whose match() or search() method produced # this match instance. re = ... # type: 'Pattern[AnyStr]' def expand(self, template: AnyStr) -> AnyStr: ... @overload def group(self, group1: int = ...) -> AnyStr: ... @overload def group(self, group1: str) -> AnyStr: ... @overload def group(self, group1: int, group2: int, *groups: int) -> Sequence[AnyStr]: ... @overload def group(self, group1: str, group2: str, *groups: str) -> Sequence[AnyStr]: ... def groups(self, default: AnyStr = ...) -> Sequence[AnyStr]: ... def groupdict(self, default: AnyStr = ...) -> dict[str, AnyStr]: ... def start(self, group: Union[int, str] = ...) -> int: ... def end(self, group: Union[int, str] = ...) -> int: ... def span(self, group: Union[int, str] = ...) -> Tuple[int, int]: ... if sys.version_info >= (3, 6): def __getitem__(self, g: Union[int, str]) -> AnyStr: ... class Pattern(Generic[AnyStr]): flags = 0 groupindex = ... # type: Mapping[str, int] groups = 0 pattern = ... # type: AnyStr def search(self, string: AnyStr, pos: int = ..., endpos: int = ...) -> Match[AnyStr]: ... def match(self, string: AnyStr, pos: int = ..., endpos: int = ...) -> Match[AnyStr]: ... # New in Python 3.4 def fullmatch(self, string: AnyStr, pos: int = ..., endpos: int = ...) -> Optional[Match[AnyStr]]: ... def split(self, string: AnyStr, maxsplit: int = ...) -> list[AnyStr]: ... def findall(self, string: AnyStr, pos: int = ..., endpos: int = ...) -> list[Any]: ... def finditer(self, string: AnyStr, pos: int = ..., endpos: int = ...) -> Iterator[Match[AnyStr]]: ... @overload def sub(self, repl: AnyStr, string: AnyStr, count: int = ...) -> AnyStr: ... @overload def sub(self, repl: Callable[[Match[AnyStr]], AnyStr], string: AnyStr, count: int = ...) -> AnyStr: ... @overload def subn(self, repl: AnyStr, string: AnyStr, count: int = ...) -> Tuple[AnyStr, int]: ... @overload def subn(self, repl: Callable[[Match[AnyStr]], AnyStr], string: AnyStr, count: int = ...) -> Tuple[AnyStr, int]: ... # Functions def get_type_hints(obj: Callable, globalns: Optional[dict[str, Any]] = ..., localns: Optional[dict[str, Any]] = ...) -> dict[str, Any]: ... def cast(tp: Type[_T], obj: Any) -> _T: ... # Type constructors # NamedTuple is special-cased in the type checker class NamedTuple(tuple): _fields = ... # type: Tuple[str, ...] _source = ... # type: str def __init__(self, typename: str, fields: Iterable[Tuple[str, Any]] = ..., *, verbose: bool = ..., rename: bool = ..., **kwargs: Any) -> None: ... @classmethod def _make(cls, iterable: Iterable[Any]) -> NamedTuple: ... if sys.version_info >= (3, 1): def _asdict(self) -> collections.OrderedDict[str, Any]: ... else: def _asdict(self) -> Dict[str, Any]: ... def _replace(self, **kwargs: Any) -> NamedTuple: ... def NewType(name: str, tp: Type[_T]) -> Type[_T]: ... mypy-0.560/typeshed/stdlib/3/unittest/0000755€tŠÔÚ€2›s®0000000000013215007244024021 5ustar jukkaDROPBOX\Domain Users00000000000000mypy-0.560/typeshed/stdlib/3/unittest/__init__.pyi0000644€tŠÔÚ€2›s®0000003666713215007212026320 0ustar jukkaDROPBOX\Domain Users00000000000000# Stubs for unittest from typing import ( Any, Callable, Dict, Iterable, Iterator, List, Optional, Pattern, Sequence, Set, FrozenSet, TextIO, Tuple, Type, TypeVar, Union, Generic, overload, ContextManager ) import logging import sys from types import ModuleType, TracebackType _T = TypeVar('_T') _FT = TypeVar('_FT', bound=Callable[..., Any]) _E = TypeVar('_E', bound=Exception) def expectedFailure(func: _FT) -> _FT: ... def skip(reason: str) -> Callable[[_FT], _FT]: ... def skipIf(condition: object, reason: str) -> Callable[[_FT], _FT]: ... def skipUnless(condition: object, reason: str) -> Callable[[_FT], _FT]: ... class SkipTest(Exception): def __init__(self, reason: str) -> None: ... class TestCase: failureException = ... # type: Type[BaseException] longMessage = ... # type: bool maxDiff = ... # type: Optional[int] def __init__(self, methodName: str = ...) -> None: ... def setUp(self) -> None: ... def tearDown(self) -> None: ... @classmethod def setUpClass(cls) -> None: ... @classmethod def tearDownClass(cls) -> None: ... def run(self, result: Optional[TestResult] = ...) -> TestCase: ... def skipTest(self, reason: Any) -> None: ... if sys.version_info >= (3, 4): def subTest(self, msg: Any = ..., **params: Any) -> ContextManager[None]: ... def debug(self) -> None: ... def assertEqual(self, first: Any, second: Any, msg: Any = ...) -> None: ... def assertNotEqual(self, first: Any, second: Any, msg: Any = ...) -> None: ... def assertTrue(self, expr: Any, msg: Any = ...) -> None: ... def assertFalse(self, expr: Any, msg: Any = ...) -> None: ... def assertIs(self, first: Any, second: Any, msg: Any = ...) -> None: ... def assertIsNot(self, first: Any, second: Any, msg: Any = ...) -> None: ... def assertIsNone(self, expr: Any, msg: Any = ...) -> None: ... def assertIsNotNone(self, expr: Any, msg: Any = ...) -> None: ... def assertIn(self, first: _T, second: Iterable[_T], msg: Any = ...) -> None: ... def assertNotIn(self, first: _T, second: Iterable[_T], msg: Any = ...) -> None: ... def assertIsInstance(self, obj: Any, cls: Union[type, Tuple[type, ...]], msg: Any = ...) -> None: ... def assertNotIsInstance(self, obj: Any, cls: Union[type, Tuple[type, ...]], msg: Any = ...) -> None: ... def assertGreater(self, first: Any, second: Any, msg: Any = ...) -> None: ... def assertGreaterEqual(self, first: Any, second: Any, msg: Any = ...) -> None: ... def assertLess(self, first: Any, second: Any, msg: Any = ...) -> None: ... def assertLessEqual(self, first: Any, second: Any, msg: Any = ...) -> None: ... @overload def assertRaises(self, # type: ignore exception: Union[Type[BaseException], Tuple[Type[BaseException], ...]], callable: Callable[..., Any], *args: Any, **kwargs: Any) -> None: ... @overload def assertRaises(self, exception: Union[Type[_E], Tuple[Type[_E], ...]], msg: Any = ...) -> _AssertRaisesContext[_E]: ... @overload def assertRaisesRegex(self, # type: ignore exception: Union[Type[BaseException], Tuple[Type[BaseException], ...]], callable: Callable[..., Any], *args: Any, **kwargs: Any) -> None: ... @overload def assertRaisesRegex(self, exception: Union[Type[_E], Tuple[Type[_E], ...]], msg: Any = ...) -> _AssertRaisesContext[_E]: ... @overload def assertWarns(self, # type: ignore exception: Union[Type[Warning], Tuple[Type[Warning], ...]], callable: Callable[..., Any], *args: Any, **kwargs: Any) -> None: ... @overload def assertWarns(self, exception: Union[Type[Warning], Tuple[Type[Warning], ...]], msg: Any = ...) -> _AssertWarnsContext: ... @overload def assertWarnsRegex(self, # type: ignore exception: Union[Type[Warning], Tuple[Type[Warning], ...]], callable: Callable[..., Any], *args: Any, **kwargs: Any) -> None: ... @overload def assertWarnsRegex(self, exception: Union[Type[Warning], Tuple[Type[Warning], ...]], msg: Any = ...) -> _AssertWarnsContext: ... if sys.version_info >= (3, 4): def assertLogs( self, logger: Optional[logging.Logger] = ..., level: Union[int, str, None] = ... ) -> _AssertLogsContext: ... def assertAlmostEqual(self, first: float, second: float, places: int = ..., msg: Any = ..., delta: float = ...) -> None: ... def assertNotAlmostEqual(self, first: float, second: float, places: int = ..., msg: Any = ..., delta: float = ...) -> None: ... def assertRegex(self, text: str, regex: Union[str, Pattern[str]], msg: Any = ...) -> None: ... def assertNotRegex(self, text: str, regex: Union[str, Pattern[str]], msg: Any = ...) -> None: ... def assertCountEqual(self, first: Iterable[Any], second: Iterable[Any], msg: Any = ...) -> None: ... def addTypeEqualityFunc(self, typeobj: Type[Any], function: Callable[..., None]) -> None: ... def assertMultiLineEqual(self, first: str, second: str, msg: Any = ...) -> None: ... def assertSequenceEqual(self, first: Sequence[Any], second: Sequence[Any], msg: Any = ..., seq_type: Type[Sequence[Any]] = ...) -> None: ... def assertListEqual(self, first: List[Any], second: List[Any], msg: Any = ...) -> None: ... def assertTupleEqual(self, first: Tuple[Any, ...], second: Tuple[Any, ...], msg: Any = ...) -> None: ... def assertSetEqual(self, first: Union[Set[Any], FrozenSet[Any]], second: Union[Set[Any], FrozenSet[Any]], msg: Any = ...) -> None: ... def assertDictEqual(self, first: Dict[Any, Any], second: Dict[Any, Any], msg: Any = ...) -> None: ... def fail(self, msg: Any = ...) -> None: ... def countTestCases(self) -> int: ... def defaultTestResult(self) -> TestResult: ... def id(self) -> str: ... def shortDescription(self) -> Optional[str]: ... def addCleanup(self, function: Callable[..., Any], *args: Any, **kwargs: Any) -> None: ... def doCleanups(self) -> None: ... # below is deprecated def failUnlessEqual(self, first: Any, second: Any, msg: Any = ...) -> None: ... def assertEquals(self, first: Any, second: Any, msg: Any = ...) -> None: ... def failIfEqual(self, first: Any, second: Any, msg: Any = ...) -> None: ... def assertNotEquals(self, first: Any, second: Any, msg: Any = ...) -> None: ... def failUnless(self, expr: bool, msg: Any = ...) -> None: ... def assert_(self, expr: bool, msg: Any = ...) -> None: ... def failIf(self, expr: bool, msg: Any = ...) -> None: ... @overload def failUnlessRaises(self, # type: ignore exception: Union[Type[BaseException], Tuple[Type[BaseException], ...]], callable: Callable[..., Any] = ..., *args: Any, **kwargs: Any) -> None: ... @overload def failUnlessRaises(self, exception: Union[Type[_E], Tuple[Type[_E], ...]], msg: Any = ...) -> _AssertRaisesContext[_E]: ... def failUnlessAlmostEqual(self, first: float, second: float, places: int = ..., msg: Any = ...) -> None: ... def assertAlmostEquals(self, first: float, second: float, places: int = ..., msg: Any = ..., delta: float = ...) -> None: ... def failIfAlmostEqual(self, first: float, second: float, places: int = ..., msg: Any = ...) -> None: ... def assertNotAlmostEquals(self, first: float, second: float, places: int = ..., msg: Any = ..., delta: float = ...) -> None: ... def assertRegexpMatches(self, text: str, regex: Union[str, Pattern[str]], msg: Any = ...) -> None: ... @overload def assertRaisesRegexp(self, # type: ignore exception: Union[Type[BaseException], Tuple[Type[BaseException], ...]], callable: Callable[..., Any] = ..., *args: Any, **kwargs: Any) -> None: ... @overload def assertRaisesRegexp(self, exception: Union[Type[_E], Tuple[Type[_E], ...]], msg: Any = ...) -> _AssertRaisesContext[_E]: ... class FunctionTestCase(TestCase): def __init__(self, testFunc: Callable[[], None], setUp: Optional[Callable[[], None]] = ..., tearDown: Optional[Callable[[], None]] = ..., description: Optional[str] = ...) -> None: ... class _AssertRaisesContext(Generic[_E]): exception = ... # type: _E def __enter__(self) -> _AssertRaisesContext[_E]: ... def __exit__(self, exc_type: Optional[type], exc_val: Optional[Exception], exc_tb: Optional[TracebackType]) -> bool: ... class _AssertWarnsContext: warning = ... # type: Warning filename = ... # type: str lineno = ... # type: int def __enter__(self) -> _AssertWarnsContext: ... def __exit__(self, exc_type: Optional[type], exc_val: Optional[Exception], exc_tb: Optional[TracebackType]) -> bool: ... class _AssertLogsContext: records = ... # type: List[logging.LogRecord] output = ... # type: List[str] def __enter__(self) -> _AssertLogsContext: ... def __exit__(self, exc_type: Optional[type], exc_val: Optional[Exception], exc_tb: Optional[TracebackType]) -> bool: ... _TestType = Union[TestCase, TestSuite] class TestSuite(Iterable[_TestType]): def __init__(self, tests: Iterable[_TestType] = ...) -> None: ... def addTest(self, test: _TestType) -> None: ... def addTests(self, tests: Iterable[_TestType]) -> None: ... def run(self, result: TestResult) -> TestResult: ... def debug(self) -> None: ... def countTestCases(self) -> int: ... def __iter__(self) -> Iterator[_TestType]: ... class TestLoader: if sys.version_info >= (3, 5): errors = ... # type: List[Type[BaseException]] testMethodPrefix = ... # type: str sortTestMethodsUsing = ... # type: Callable[[str, str], bool] suiteClass = ... # type: Callable[[List[TestCase]], TestSuite] def loadTestsFromTestCase(self, testCaseClass: Type[TestCase]) -> TestSuite: ... if sys.version_info >= (3, 5): def loadTestsFromModule(self, module: ModuleType, *, pattern: Any = ...) -> TestSuite: ... else: def loadTestsFromModule(self, module: ModuleType) -> TestSuite: ... def loadTestsFromName(self, name: str, module: Optional[ModuleType] = ...) -> TestSuite: ... def loadTestsFromNames(self, names: Sequence[str], module: Optional[ModuleType] = ...) -> TestSuite: ... def getTestCaseNames(self, testCaseClass: Type[TestCase]) -> Sequence[str]: ... def discover(self, start_dir: str, pattern: str = ..., top_level_dir: Optional[str] = ...) -> TestSuite: ... _SysExcInfoType = Tuple[Optional[Type[BaseException]], Optional[BaseException], Optional[TracebackType]] class TestResult: errors = ... # type: List[Tuple[TestCase, str]] failures = ... # type: List[Tuple[TestCase, str]] skipped = ... # type: List[Tuple[TestCase, str]] expectedFailures = ... # type: List[Tuple[TestCase, str]] unexpectedSuccesses = ... # type: List[TestCase] shouldStop = ... # type: bool testsRun = ... # type: int buffer = ... # type: bool failfast = ... # type: bool tb_locals = ... # type: bool def wasSuccessful(self) -> bool: ... def stop(self) -> None: ... def startTest(self, test: TestCase) -> None: ... def stopTest(self, test: TestCase) -> None: ... def startTestRun(self) -> None: ... def stopTestRun(self) -> None: ... def addError(self, test: TestCase, err: _SysExcInfoType) -> None: ... def addFailure(self, test: TestCase, err: _SysExcInfoType) -> None: ... def addSuccess(self, test: TestCase) -> None: ... def addSkip(self, test: TestCase, reason: str) -> None: ... def addExpectedFailure(self, test: TestCase, err: _SysExcInfoType) -> None: ... def addUnexpectedSuccess(self, test: TestCase) -> None: ... if sys.version_info >= (3, 4): def addSubTest(self, test: TestCase, subtest: TestCase, outcome: Optional[_SysExcInfoType]) -> None: ... class TextTestResult(TestResult): def __init__(self, stream: TextIO = ..., descriptions: bool = ..., verbosity: int = ...) -> None: ... _TextTestResult = TextTestResult defaultTestLoader = ... # type: TestLoader _ResultClassType = Callable[[TextIO, bool, int], TestResult] class TestRunner: def run(self, test: Union[TestSuite, TestCase]) -> TestResult: ... class TextTestRunner(TestRunner): if sys.version_info >= (3, 5): def __init__(self, stream: Optional[TextIO] = ..., descriptions: bool = ..., verbosity: int = ..., failfast: bool = ..., buffer: bool = ..., resultclass: Optional[_ResultClassType] = ..., warnings: Optional[Type[Warning]] = ..., *, tb_locals: bool = ...) -> None: ... else: def __init__(self, stream: Optional[TextIO] = ..., descriptions: bool = ..., verbosity: int = ..., failfast: bool = ..., buffer: bool = ..., resultclass: Optional[_ResultClassType] = ..., warnings: Optional[Type[Warning]] = ...) -> None: ... def _makeResult(self) -> TestResult: ... if sys.version_info >= (3, 4): _DefaultTestType = Union[str, Iterable[str], None] else: _DefaultTestType = Union[str, None] # not really documented class TestProgram: result = ... # type: TestResult def main(module: str = ..., defaultTest: _DefaultTestType = ..., argv: Optional[List[str]] = ..., testRunner: Union[Type[TestRunner], TestRunner, None] = ..., testLoader: TestLoader = ..., exit: bool = ..., verbosity: int = ..., failfast: Optional[bool] = ..., catchbreak: Optional[bool] = ..., buffer: Optional[bool] = ..., warnings: Optional[str] = ...) -> TestProgram: ... def installHandler() -> None: ... def registerResult(result: TestResult) -> None: ... def removeResult(result: TestResult) -> None: ... @overload def removeHandler() -> None: ... @overload def removeHandler(function: _FT) -> _FT: ... mypy-0.560/typeshed/stdlib/3/unittest/mock.pyi0000644€tŠÔÚ€2›s®0000002061613215007212025475 0ustar jukkaDROPBOX\Domain Users00000000000000# Stubs for unittest.mock import sys from typing import Any, Optional if sys.version_info >= (3, 3): FILTER_DIR = ... # type: Any class _slotted: ... class _SentinelObject: name = ... # type: Any def __init__(self, name: Any) -> None: ... class _Sentinel: def __init__(self) -> None: ... def __getattr__(self, name: str) -> Any: ... sentinel = ... # type: Any DEFAULT = ... # type: Any class _CallList(list): def __contains__(self, value: Any) -> bool: ... class _MockIter: obj = ... # type: Any def __init__(self, obj: Any) -> None: ... def __iter__(self) -> Any: ... def __next__(self) -> Any: ... class Base: def __init__(self, *args: Any, **kwargs: Any) -> None: ... # TODO: Get rid of the # type: ignore below. # It is currently required to shut up mypy when run with `--strict` # or `--disallow-subclassing-any`. The `Any` base class is currently # the only way to allow passing an instance of `Mock` to functions # expecting other classes (as is Mock's purpose) class NonCallableMock(Any): # type: ignore def __new__(cls, *args: Any, **kw: Any) -> Any: ... def __init__(self, spec: Optional[Any] = ..., wraps: Optional[Any] = ..., name: Optional[Any] = ..., spec_set: Optional[Any] = ..., parent: Optional[Any] = ..., _spec_state: Optional[Any] = ..., _new_name: Any = ..., _new_parent: Optional[Any] = ..., _spec_as_instance: Any = ..., _eat_self: Optional[Any] = ..., unsafe: Any = ..., **kwargs: Any) -> None: ... def attach_mock(self, mock: Any, attribute: Any) -> Any: ... def mock_add_spec(self, spec: Any, spec_set: Any = ...) -> Any: ... return_value = ... # type: Any __class__ = ... # type: type called = ... # type: Any call_count = ... # type: Any call_args = ... # type: Any call_args_list = ... # type: Any mock_calls = ... # type: Any side_effect = ... # type: Any method_calls = ... # type: Any def reset_mock(self, visited: Optional[bool] = ...) -> None: ... def configure_mock(self, **kwargs: Any) -> None: ... def __getattr__(self, name: Any) -> Any: ... def __dir__(self) -> Any: ... def __setattr__(self, name: Any, value: Any) -> None: ... def __delattr__(self, name: Any) -> None: ... def assert_not_called(_mock_self) -> None: ... def assert_called_with(_mock_self, *args: Any, **kwargs: Any) -> None: ... def assert_called_once_with(_mock_self, *args: Any, **kwargs: Any) -> None: ... def assert_has_calls(self, calls: Any, any_order: bool = ...) -> None: ... def assert_any_call(self, *args: Any, **kwargs: Any) -> None: ... class CallableMixin(Base): side_effect = ... # type: Any def __init__(self, spec: Optional[Any] = ..., side_effect: Optional[Any] = ..., return_value: Any = ..., wraps: Optional[Any] = ..., name: Optional[Any] = ..., spec_set: Optional[Any] = ..., parent: Optional[Any] = ..., _spec_state: Optional[Any] = ..., _new_name: Any = ..., _new_parent: Optional[Any] = ..., **kwargs: Any) -> None: ... def __call__(_mock_self, *args: Any, **kwargs: Any) -> Any: ... class Mock(CallableMixin, NonCallableMock): def __init__(self, spec: Any = ..., spec_set: Any = ..., side_effect: Any = ..., return_value: Any = ..., wraps: Any = ..., name: Any = ..., **kwargs: Any) -> None: ... class _patch: attribute_name = ... # type: Any getter = ... # type: Any attribute = ... # type: Any new = ... # type: Any new_callable = ... # type: Any spec = ... # type: Any create = ... # type: bool has_local = ... # type: Any spec_set = ... # type: Any autospec = ... # type: Any kwargs = ... # type: Any additional_patchers = ... # type: Any def __init__(self, getter: Any, attribute: Any, new: Any, spec: Any, create: Any, spec_set: Any, autospec: Any, new_callable: Any, kwargs: Any) -> None: ... def copy(self) -> Any: ... def __call__(self, func: Any) -> Any: ... def decorate_class(self, klass: Any) -> Any: ... def decorate_callable(self, func: Any) -> Any: ... def get_original(self) -> Any: ... target = ... # type: Any temp_original = ... # type: Any is_local = ... # type: Any def __enter__(self) -> Any: ... def __exit__(self, *exc_info: Any) -> Any: ... def start(self) -> Any: ... def stop(self) -> Any: ... class _patcher: def __call__(self, target: Any, new: Optional[Any] = ..., spec: Optional[Any] = ..., create: bool = ..., spec_set: Optional[Any] = ..., autospec: Optional[Any] = ..., new_callable: Optional[Any] = ..., **kwargs: Any) -> Any: ... def object(self, target: Any, attribute: str, new: Optional[Any] = ..., spec: Optional[Any] = ..., create: bool = ..., spec_set: Optional[Any] = ..., autospec: Optional[Any] = ..., new_callable: Optional[Any] = ..., **kwargs: Any) -> _patch: ... def multiple(self, target: Any, spec: Optional[Any] = ..., create: bool = ..., spec_set: Optional[Any] = ..., autospec: Optional[Any] = ..., new_callable: Optional[Any] = ..., **kwargs: Any) -> Any: ... patch = ... # type: _patcher class _patch_dict: in_dict = ... # type: Any values = ... # type: Any clear = ... # type: Any def __init__(self, in_dict: Any, values: Any = ..., clear: Any = ..., **kwargs: Any) -> None: ... def __call__(self, f: Any) -> Any: ... def decorate_class(self, klass: Any) -> Any: ... def __enter__(self) -> Any: ... def __exit__(self, *args: Any) -> Any: ... start = ... # type: Any stop = ... # type: Any class MagicMixin: def __init__(self, *args: Any, **kw: Any) -> None: ... class NonCallableMagicMock(MagicMixin, NonCallableMock): def __init__(self) -> None: ... def mock_add_spec(self, spec: Any, spec_set: Any = ...) -> Any: ... class MagicMock(MagicMixin, Mock): def __init__(self, spec: Any = ..., spec_set: Any = ..., side_effect: Any = ..., return_value: Any = ..., wraps: Any = ..., name: Any = ..., **kwargs: Any) -> None: ... def mock_add_spec(self, spec: Any, spec_set: Any = ...) -> Any: ... class MagicProxy: name = ... # type: Any parent = ... # type: Any def __init__(self, name: Any, parent: Any) -> None: ... def __call__(self, *args: Any, **kwargs: Any) -> Any: ... def create_mock(self) -> Any: ... def __get__(self, obj: Any, _type: Optional[Any] = ...) -> Any: ... class _ANY: def __eq__(self, other: Any) -> bool: ... def __ne__(self, other: Any) -> bool: ... ANY = ... # type: Any class _Call(tuple): def __new__(cls, value: Any = ..., name: Optional[Any] = ..., parent: Optional[Any] = ..., two: bool = ..., from_kall: bool = ...) -> Any: ... name = ... # type: Any parent = ... # type: Any from_kall = ... # type: Any def __init__(self, value: Any = ..., name: Optional[Any] = ..., parent: Optional[Any] = ..., two: bool = ..., from_kall: bool = ...) -> None: ... def __eq__(self, other: Any) -> bool: ... __ne__ = ... # type: Any def __call__(self, *args: Any, **kwargs: Any) -> Any: ... def __getattr__(self, attr: Any) -> Any: ... def count(self, *args: Any, **kwargs: Any) -> Any: ... def index(self, *args: Any, **kwargs: Any) -> Any: ... def call_list(self) -> Any: ... call = ... # type: Any def create_autospec(spec: Any, spec_set: Any = ..., instance: Any = ..., _parent: Optional[Any] = ..., _name: Optional[Any] = ..., **kwargs: Any) -> Any: ... class _SpecState: spec = ... # type: Any ids = ... # type: Any spec_set = ... # type: Any parent = ... # type: Any instance = ... # type: Any name = ... # type: Any def __init__(self, spec: Any, spec_set: Any = ..., parent: Optional[Any] = ..., name: Optional[Any] = ..., ids: Optional[Any] = ..., instance: Any = ...) -> None: ... def mock_open(mock: Optional[Any] = ..., read_data: Any = ...) -> Any: ... class PropertyMock(Mock): def __get__(self, obj: Any, obj_type: Any) -> Any: ... def __set__(self, obj: Any, val: Any) -> Any: ... mypy-0.560/typeshed/stdlib/3/urllib/0000755€tŠÔÚ€2›s®0000000000013215007244023433 5ustar jukkaDROPBOX\Domain Users00000000000000mypy-0.560/typeshed/stdlib/3/urllib/__init__.pyi0000644€tŠÔÚ€2›s®0000000000013215007212025676 0ustar jukkaDROPBOX\Domain Users00000000000000mypy-0.560/typeshed/stdlib/3/urllib/error.pyi0000644€tŠÔÚ€2›s®0000000051213215007212025300 0ustar jukkaDROPBOX\Domain Users00000000000000from typing import Any, Dict, Union from urllib.response import addinfourl # Stubs for urllib.error class URLError(IOError): reason = ... # type: Union[str, BaseException] class HTTPError(URLError, addinfourl): code = ... # type: int headers = ... # type: Dict[str, str] class ContentTooShortError(URLError): ... mypy-0.560/typeshed/stdlib/3/urllib/parse.pyi0000644€tŠÔÚ€2›s®0000001222113215007212025261 0ustar jukkaDROPBOX\Domain Users00000000000000# Stubs for urllib.parse from typing import Any, List, Dict, Tuple, AnyStr, Generic, overload, Sequence, Mapping, Union, NamedTuple, Callable import sys _Str = Union[bytes, str] __all__ = ... # type: Tuple[str] uses_relative = ... # type: List[str] uses_netloc = ... # type: List[str] uses_params = ... # type: List[str] non_hierarchical = ... # type: List[str] uses_query = ... # type: List[str] uses_fragment = ... # type: List[str] scheme_chars = ... # type: str MAX_CACHE_SIZE = 0 class _ResultMixinBase(Generic[AnyStr]): def geturl(self) -> AnyStr: ... class _ResultMixinStr(_ResultMixinBase[str]): def encode(self, encoding: str = ..., errors: str = ...) -> '_ResultMixinBytes': ... class _ResultMixinBytes(_ResultMixinBase[str]): def decode(self, encoding: str = ..., errors: str = ...) -> '_ResultMixinStr': ... class _NetlocResultMixinBase(Generic[AnyStr]): username = ... # type: AnyStr password = ... # type: AnyStr hostname = ... # type: AnyStr port = ... # type: int class _NetlocResultMixinStr(_NetlocResultMixinBase[str], _ResultMixinStr): ... class _NetlocResultMixinBytes(_NetlocResultMixinBase[str], _ResultMixinBytes): ... class _DefragResultBase(tuple, Generic[AnyStr]): url = ... # type: AnyStr fragment = ... # type: AnyStr _SplitResultBase = NamedTuple( '_SplitResultBase', [ ('scheme', str), ('netloc', str), ('path', str), ('query', str), ('fragment', str) ] ) _SplitResultBytesBase = NamedTuple( '_SplitResultBytesBase', [ ('scheme', bytes), ('netloc', bytes), ('path', bytes), ('query', bytes), ('fragment', bytes) ] ) _ParseResultBase = NamedTuple( '_ParseResultBase', [ ('scheme', str), ('netloc', str), ('path', str), ('params', str), ('query', str), ('fragment', str) ] ) _ParseResultBytesBase = NamedTuple( '_ParseResultBytesBase', [ ('scheme', bytes), ('netloc', bytes), ('path', bytes), ('params', bytes), ('query', bytes), ('fragment', bytes) ] ) # Structured result objects for string data class DefragResult(_DefragResultBase[str], _ResultMixinStr): ... class SplitResult(_SplitResultBase, _NetlocResultMixinStr): ... class ParseResult(_ParseResultBase, _NetlocResultMixinStr): ... # Structured result objects for bytes data class DefragResultBytes(_DefragResultBase[bytes], _ResultMixinBytes): ... class SplitResultBytes(_SplitResultBytesBase, _NetlocResultMixinBytes): ... class ParseResultBytes(_ParseResultBytesBase, _NetlocResultMixinBytes): ... def parse_qs(qs: AnyStr, keep_blank_values: bool = ..., strict_parsing: bool = ..., encoding: str = ..., errors: str = ...) -> Dict[AnyStr, List[AnyStr]]: ... def parse_qsl(qs: AnyStr, keep_blank_values: bool = ..., strict_parsing: bool = ..., encoding: str = ..., errors: str = ...) -> List[Tuple[AnyStr, AnyStr]]: ... @overload def quote(string: str, safe: _Str = ..., encoding: str = ..., errors: str = ...) -> str: ... @overload def quote(string: bytes, safe: _Str = ...) -> str: ... def quote_from_bytes(bs: bytes, safe: _Str = ...) -> str: ... @overload def quote_plus(string: str, safe: _Str = ..., encoding: str = ..., errors: str = ...) -> str: ... @overload def quote_plus(string: bytes, safe: _Str = ...) -> str: ... def unquote(string: str, encoding: str = ..., errors: str = ...) -> str: ... def unquote_to_bytes(string: _Str) -> bytes: ... def unquote_plus(string: str, encoding: str = ..., errors: str = ...) -> str: ... @overload def urldefrag(url: str) -> DefragResult: ... @overload def urldefrag(url: bytes) -> DefragResultBytes: ... if sys.version_info >= (3, 5): def urlencode(query: Union[Mapping[Any, Any], Mapping[Any, Sequence[Any]], Sequence[Tuple[Any, Any]], Sequence[Tuple[Any, Sequence[Any]]]], doseq: bool = ..., safe: AnyStr = ..., encoding: str = ..., errors: str = ..., quote_via: Callable[[str, AnyStr, str, str], str] = ...) -> str: ... else: def urlencode(query: Union[Mapping[Any, Any], Mapping[Any, Sequence[Any]], Sequence[Tuple[Any, Any]], Sequence[Tuple[Any, Sequence[Any]]]], doseq: bool = ..., safe: AnyStr = ..., encoding: str = ..., errors: str = ...) -> str: ... def urljoin(base: AnyStr, url: AnyStr, allow_fragments: bool = ...) -> AnyStr: ... @overload def urlparse(url: str, scheme: str = ..., allow_fragments: bool = ...) -> ParseResult: ... @overload def urlparse(url: bytes, scheme: bytes = ..., allow_fragments: bool = ...) -> ParseResultBytes: ... @overload def urlsplit(url: str, scheme: str = ..., allow_fragments: bool = ...) -> SplitResult: ... @overload def urlsplit(url: bytes, scheme: bytes = ..., allow_fragments: bool = ...) -> SplitResultBytes: ... @overload def urlunparse(components: Sequence[AnyStr]) -> AnyStr: ... @overload def urlunparse(components: Tuple[AnyStr, AnyStr, AnyStr, AnyStr, AnyStr, AnyStr]) -> AnyStr: ... @overload def urlunsplit(components: Sequence[AnyStr]) -> AnyStr: ... @overload def urlunsplit(components: Tuple[AnyStr, AnyStr, AnyStr, AnyStr, AnyStr]) -> AnyStr: ... mypy-0.560/typeshed/stdlib/3/urllib/request.pyi0000644€tŠÔÚ€2›s®0000002170313215007212025644 0ustar jukkaDROPBOX\Domain Users00000000000000# Stubs for urllib.request (Python 3.4) from typing import ( Any, Callable, Dict, List, IO, Mapping, Optional, Sequence, Tuple, TypeVar, Union, overload, ) from http.client import HTTPResponse, HTTPMessage from http.cookiejar import CookieJar from email.message import Message from urllib.response import addinfourl import ssl import sys _T = TypeVar('_T') _UrlopenRet = Union[HTTPResponse, addinfourl] def urlopen( url: Union[str, 'Request'], data: Optional[bytes] = ..., timeout: float = ..., *, cafile: Optional[str] = ..., capath: Optional[str] = ..., cadefault: bool = ..., context: Optional[ssl.SSLContext] = ... ) -> _UrlopenRet: ... def install_opener(opener: OpenerDirector) -> None: ... def build_opener( *handlers: Union[BaseHandler, Callable[[], BaseHandler]] ) -> OpenerDirector: ... def url2pathname(path: str) -> str: ... def pathname2url(path: str) -> str: ... def getproxies() -> Dict[str, str]: ... def parse_http_list(s: str) -> List[str]: ... def parse_keqv_list(l: List[str]) -> Dict[str, str]: ... class Request: if sys.version_info >= (3, 4): @property def full_url(self) -> str: ... @full_url.setter def full_url(self, value: str) -> None: ... @full_url.deleter def full_url(self) -> None: ... else: full_url = ... # type: str type = ... # type: str host = ... # type: str origin_req_host = ... # type: str selector = ... # type: str data = ... # type: Optional[bytes] unverifiable = ... # type: bool method = ... # type: Optional[str] def __init__(self, url: str, data: Optional[bytes] = ..., headers: Dict[str, str] =..., origin_req_host: Optional[str] = ..., unverifiable: bool = ..., method: Optional[str] = ...) -> None: ... def get_method(self) -> str: ... def add_header(self, key: str, val: str) -> None: ... def add_unredirected_header(self, key: str, val: str) -> None: ... def has_header(self, header_name: str) -> bool: ... if sys.version_info >= (3, 4): def remove_header(self, header_name: str) -> None: ... def get_full_url(self) -> str: ... def set_proxy(self, host: str, type: str) -> None: ... @overload def get_header(self, header_name: str) -> Optional[str]: ... @overload def get_header(self, header_name: str, default: _T) -> Union[str, _T]: ... def header_items(self) -> List[Tuple[str, str]]: ... class OpenerDirector: def add_handler(self, handler: BaseHandler) -> None: ... def open(self, url: Union[str, Request], data: Optional[bytes] = ..., timeout: float = ...) -> _UrlopenRet: ... def error(self, proto: str, *args: Any) -> _UrlopenRet: ... class BaseHandler: parent = ... # type: OpenerDirector def add_parent(self, parent: OpenerDirector) -> None: ... def close(self) -> None: ... def http_error_nnn(self, req: Request, fp: IO[str], code: int, msg: int, hdrs: Mapping[str, str]) -> _UrlopenRet: ... class HTTPDefaultErrorHandler(BaseHandler): ... class HTTPRedirectHandler(BaseHandler): def redirect_request(self, req: Request, fp: IO[str], code: int, msg: int, hdrs: Mapping[str, str], newurl: str) -> Optional[Request]: ... def http_error_301(self, req: Request, fp: IO[str], code: int, msg: int, hdrs: Mapping[str, str]) -> Optional[_UrlopenRet]: ... def http_error_302(self, req: Request, fp: IO[str], code: int, msg: int, hdrs: Mapping[str, str]) -> Optional[_UrlopenRet]: ... def http_error_303(self, req: Request, fp: IO[str], code: int, msg: int, hdrs: Mapping[str, str]) -> Optional[_UrlopenRet]: ... def http_error_307(self, req: Request, fp: IO[str], code: int, msg: int, hdrs: Mapping[str, str]) -> Optional[_UrlopenRet]: ... class HTTPCookieProcessor(BaseHandler): cookiejar = ... # type: CookieJar def __init__(self, cookiejar: Optional[CookieJar] = ...) -> None: ... class ProxyHandler(BaseHandler): def __init__(self, proxies: Optional[Dict[str, str]] = ...) -> None: ... # TODO add a method for every (common) proxy protocol class HTTPPasswordMgr: def add_password(self, realm: str, uri: Union[str, Sequence[str]], user: str, passwd: str) -> None: ... def find_user_password(self, realm: str, authuri: str) -> Tuple[Optional[str], Optional[str]]: ... class HTTPPasswordMgrWithDefaultRealm(HTTPPasswordMgr): def add_password(self, realm: str, uri: Union[str, Sequence[str]], user: str, passwd: str) -> None: ... def find_user_password(self, realm: str, authuri: str) -> Tuple[Optional[str], Optional[str]]: ... if sys.version_info >= (3, 5): class HTTPPasswordMgrWithPriorAuth(HTTPPasswordMgrWithDefaultRealm): def add_password(self, realm: str, uri: Union[str, Sequence[str]], user: str, passwd: str, is_authenticated: bool = ...) -> None: ... def update_authenticated(self, uri: Union[str, Sequence[str]], is_authenticated: bool = ...) -> None: ... def is_authenticated(self, authuri: str) -> bool: ... class AbstractBasicAuthHandler: def __init__(self, password_mgr: Optional[HTTPPasswordMgr] = ...) -> None: ... def http_error_auth_reqed(self, authreq: str, host: str, req: Request, headers: Mapping[str, str]) -> None: ... class HTTPBasicAuthHandler(AbstractBasicAuthHandler, BaseHandler): def http_error_401(self, req: Request, fp: IO[str], code: int, msg: int, hdrs: Mapping[str, str]) -> Optional[_UrlopenRet]: ... class ProxyBasicAuthHandler(AbstractBasicAuthHandler, BaseHandler): def http_error_407(self, req: Request, fp: IO[str], code: int, msg: int, hdrs: Mapping[str, str]) -> Optional[_UrlopenRet]: ... class AbstractDigestAuthHandler: def __init__(self, passwd: Optional[HTTPPasswordMgr] = ...) -> None: ... def reset_retry_count(self) -> None: ... def http_error_auth_reqed(self, auth_header: str, host: str, req: Request, headers: Mapping[str, str]) -> None: ... def retry_http_digest_auth(self, req: Request, auth: str) -> Optional[_UrlopenRet]: ... def get_cnonce(self, nonce: str) -> str: ... def get_authorization(self, req: Request, chal: Mapping[str, str]) -> str: ... def get_algorithm_impls(self, algorithm: str) -> Tuple[Callable[[str], str], Callable[[str, str], str]]: ... def get_entity_digest(self, data: Optional[bytes], chal: Mapping[str, str]) -> Optional[str]: ... class HTTPDigestAuthHandler(BaseHandler, AbstractDigestAuthHandler): def http_error_401(self, req: Request, fp: IO[str], code: int, msg: int, hdrs: Mapping[str, str]) -> Optional[_UrlopenRet]: ... class ProxyDigestAuthHandler(BaseHandler, AbstractDigestAuthHandler): def http_error_407(self, req: Request, fp: IO[str], code: int, msg: int, hdrs: Mapping[str, str]) -> Optional[_UrlopenRet]: ... class HTTPHandler(BaseHandler): def http_open(self, req: Request) -> _UrlopenRet: ... class HTTPSHandler(BaseHandler): def __init__(self, debuglevel: int = ..., context: Optional[ssl.SSLContext] = ..., check_hostname: bool = ...) -> None: ... def https_open(self, req: Request) -> _UrlopenRet: ... class FileHandler(BaseHandler): def file_open(self, req: Request) -> _UrlopenRet: ... class DataHandler(BaseHandler): def data_open(self, req: Request) -> _UrlopenRet: ... class FTPHandler(BaseHandler): def ftp_open(self, req: Request) -> _UrlopenRet: ... class CacheFTPHandler(FTPHandler): def setTimeout(self, t: float) -> None: ... def setMaxConns(self, m: int) -> None: ... class UnknownHandler(BaseHandler): def unknown_open(self, req: Request) -> _UrlopenRet: ... class HTTPErrorProcessor(BaseHandler): def http_response(self) -> _UrlopenRet: ... def https_response(self) -> _UrlopenRet: ... def urlretrieve(url: str, filename: Optional[str] = ..., reporthook: Optional[Callable[[int, int, int], None]] = ..., data: Optional[bytes] = ...) -> Tuple[str, HTTPMessage]: ... def urlcleanup() -> None: ... class URLopener: version = ... # type: str def __init__(self, proxies: Optional[Dict[str, str]] = ..., **x509: str) -> None: ... def open(self, fullurl: str, data: Optional[bytes] = ...) -> _UrlopenRet: ... def open_unknown(self, fullurl: str, data: Optional[bytes] = ...) -> _UrlopenRet: ... def retrieve(self, url: str, filename: Optional[str] = ..., reporthook: Optional[Callable[[int, int, int], None]] = ..., data: Optional[bytes] = ...) -> Tuple[str, Optional[Message]]: ... class FancyURLopener(URLopener): def prompt_user_passwd(self, host: str, realm: str) -> Tuple[str, str]: ... mypy-0.560/typeshed/stdlib/3/urllib/response.pyi0000644€tŠÔÚ€2›s®0000000051613215007212026011 0ustar jukkaDROPBOX\Domain Users00000000000000# private module, we only expose what's needed from typing import BinaryIO, Mapping, Optional from types import TracebackType class addinfourl(BinaryIO): headers = ... # type: Mapping[str, str] url = ... # type: str code = ... # type: int def info(self) -> Mapping[str, str]: ... def geturl(self) -> str: ... mypy-0.560/typeshed/stdlib/3/urllib/robotparser.pyi0000644€tŠÔÚ€2›s®0000000130513215007212026512 0ustar jukkaDROPBOX\Domain Users00000000000000# Stubs for urllib.robotparser (Python 3.4) from typing import Iterable, NamedTuple, Optional import sys _RequestRate = NamedTuple('_RequestRate', [('requests', int), ('seconds', int)]) class RobotFileParser: def __init__(self, url: str = ...) -> None: ... def set_url(self, url: str) -> None: ... def read(self) -> None: ... def parse(self, lines: Iterable[str]) -> None: ... def can_fetch(self, user_agent: str, url: str) -> bool: ... def mtime(self) -> int: ... def modified(self) -> None: ... if sys.version_info >= (3, 6): def crawl_delay(self, useragent: str) -> Optional[str]: ... def request_rate(self, useragent: str) -> Optional[_RequestRate]: ... mypy-0.560/typeshed/stdlib/3/wsgiref/0000755€tŠÔÚ€2›s®0000000000013215007244023610 5ustar jukkaDROPBOX\Domain Users00000000000000mypy-0.560/typeshed/stdlib/3/wsgiref/__init__.pyi0000644€tŠÔÚ€2›s®0000000000013215007212026053 0ustar jukkaDROPBOX\Domain Users00000000000000mypy-0.560/typeshed/stdlib/3/wsgiref/types.pyi0000644€tŠÔÚ€2›s®0000000232513215007212025474 0ustar jukkaDROPBOX\Domain Users00000000000000# Type declaration for a WSGI Function in Python 3 # # wsgiref/types.py doesn't exist and neither does WSGIApplication, it's a type # provided for type checking purposes. # # This means you cannot simply import wsgiref.types in your code. Instead, # use the `TYPE_CHECKING` flag from the typing module: # # from typing import TYPE_CHECKING # # if TYPE_CHECKING: # from wsgiref.types import WSGIApplication # # This import is now only taken into account by the type checker. Consequently, # you need to use 'WSGIApplication' and not simply WSGIApplication when type # hinting your code. Otherwise Python will raise NameErrors. from typing import Callable, Dict, Iterable, List, Optional, Tuple, Type, Union, Any from types import TracebackType _exc_info = Tuple[Optional[Type[BaseException]], Optional[BaseException], Optional[TracebackType]] WSGIEnvironment = Dict[str, Any] WSGIApplication = Callable[ [ WSGIEnvironment, Union[ Callable[[str, List[Tuple[str, str]]], Callable[[Union[bytes, str]], None]], Callable[[str, List[Tuple[str, str]], _exc_info], Callable[[Union[bytes, str]], None]] ] ], Iterable[Union[bytes, str]] ] mypy-0.560/typeshed/stdlib/3/wsgiref/validate.pyi0000644€tŠÔÚ€2›s®0000000216413215007212026122 0ustar jukkaDROPBOX\Domain Users00000000000000from typing import Any class WSGIWarning(Warning): ... def validator(application): ... class InputWrapper: input = ... # type: Any def __init__(self, wsgi_input): ... def read(self, *args): ... def readline(self, *args): ... def readlines(self, *args): ... def __iter__(self): ... def close(self): ... class ErrorWrapper: errors = ... # type: Any def __init__(self, wsgi_errors): ... def write(self, s): ... def flush(self): ... def writelines(self, seq): ... def close(self): ... class WriteWrapper: writer = ... # type: Any def __init__(self, wsgi_writer): ... def __call__(self, s): ... class PartialIteratorWrapper: iterator = ... # type: Any def __init__(self, wsgi_iterator): ... def __iter__(self): ... class IteratorWrapper: original_iterator = ... # type: Any iterator = ... # type: Any closed = ... # type: Any check_start_response = ... # type: Any def __init__(self, wsgi_iterator, check_start_response): ... def __iter__(self): ... def __next__(self): ... def close(self): ... def __del__(self): ... mypy-0.560/typeshed/stdlib/3.3/0000755€tŠÔÚ€2›s®0000000000013215007244022303 5ustar jukkaDROPBOX\Domain Users00000000000000mypy-0.560/typeshed/stdlib/3.3/ipaddress.pyi0000644€tŠÔÚ€2›s®0000001200113215007212024771 0ustar jukkaDROPBOX\Domain Users00000000000000import sys from typing import (Any, Container, Generic, Iterable, Iterator, Optional, overload, SupportsInt, Tuple, TypeVar, Union) # Undocumented length constants IPV4LENGTH: int IPV6LENGTH: int _A = TypeVar("_A", IPv4Address, IPv6Address) _N = TypeVar("_N", IPv4Network, IPv6Network) _T = TypeVar("_T") def ip_address(address: object) -> Union[IPv4Address, IPv6Address]: ... def ip_network(address: object, strict: bool = ...) -> Union[IPv4Network, IPv6Network]: ... def ip_interface(address: object) -> Union[IPv4Interface, IPv6Interface]: ... class _IPAddressBase: def __eq__(self, other: Any) -> bool: ... def __ge__(self: _T, other: _T) -> bool: ... def __gt__(self: _T, other: _T) -> bool: ... def __le__(self: _T, other: _T) -> bool: ... def __lt__(self: _T, other: _T) -> bool: ... def __ne__(self, other: Any) -> bool: ... @property def compressed(self) -> str: ... @property def exploded(self) -> str: ... if sys.version_info >= (3, 5): @property def reverse_pointer(self) -> str: ... @property def version(self) -> int: ... class _BaseAddress(_IPAddressBase, SupportsInt): def __init__(self, address: object) -> None: ... def __add__(self: _T, other: int) -> _T: ... def __hash__(self) -> int: ... def __int__(self) -> int: ... def __sub__(self: _T, other: int) -> _T: ... if sys.version_info >= (3, 4): @property def is_global(self) -> bool: ... @property def is_link_local(self) -> bool: ... @property def is_loopback(self) -> bool: ... @property def is_multicast(self) -> bool: ... @property def is_private(self) -> bool: ... @property def is_reserved(self) -> bool: ... @property def is_unspecified(self) -> bool: ... @property def max_prefixlen(self) -> int: ... @property def packed(self) -> bytes: ... class _BaseNetwork(_IPAddressBase, Container, Iterable[_A], Generic[_A]): network_address: _A netmask: _A def __init__(self, address: object, strict: bool = ...) -> None: ... def __contains__(self, other: Any) -> bool: ... def __getitem__(self, n: int) -> _A: ... def __iter__(self) -> Iterator[_A]: ... def address_exclude(self: _T, other: _T) -> Iterator[_T]: ... @property def broadcast_address(self) -> _A: ... def compare_networks(self: _T, other: _T) -> int: ... def hosts(self) -> Iterator[_A]: ... @property def is_global(self) -> bool: ... @property def is_link_local(self) -> bool: ... @property def is_loopback(self) -> bool: ... @property def is_multicast(self) -> bool: ... @property def is_private(self) -> bool: ... @property def is_reserved(self) -> bool: ... @property def is_unspecified(self) -> bool: ... @property def max_prefixlen(self) -> int: ... @property def num_addresses(self) -> int: ... def overlaps(self: _T, other: _T) -> bool: ... @property def prefixlen(self) -> int: ... def subnets(self: _T, prefixlen_diff: int = ..., new_prefix: Optional[int] = ...) -> Iterator[_T]: ... def supernet(self: _T, prefixlen_diff: int = ..., new_prefix: Optional[int] = ...) -> Iterator[_T]: ... @property def with_hostmask(self) -> str: ... @property def with_netmask(self) -> str: ... @property def with_prefixlen(self) -> str: ... class _BaseInterface(_BaseAddress, Generic[_A, _N]): hostmask: _A netmask: _A network: _N @property def ip(self) -> _A: ... @property def with_hostmask(self) -> str: ... @property def with_netmask(self) -> str: ... @property def with_prefixlen(self) -> str: ... class IPv4Address(_BaseAddress): ... class IPv4Network(_BaseNetwork[IPv4Address]): ... class IPv4Interface(IPv4Address, _BaseInterface[IPv4Address, IPv4Network]): ... class IPv6Address(_BaseAddress): @property def ipv4_mapped(self) -> Optional[IPv4Address]: ... @property def is_site_local(self) -> bool: ... @property def sixtofour(self) -> Optional[IPv4Address]: ... @property def teredo(self) -> Optional[Tuple[IPv4Address, IPv4Address]]: ... class IPv6Network(_BaseNetwork[IPv6Address]): @property def is_site_local(self) -> bool: ... class IPv6Interface(IPv6Address, _BaseInterface[IPv6Address, IPv6Network]): ... def v4_int_to_packed(address: int) -> bytes: ... def v6_int_to_packed(address: int) -> bytes: ... @overload def summarize_address_range(first: IPv4Address, last: IPv4Address) -> Iterator[IPv4Network]: ... @overload def summarize_address_range(first: IPv6Address, last: IPv6Address) -> Iterator[IPv6Network]: ... def collapse_addresses(addresses: Iterable[_N]) -> Iterator[_N]: ... @overload def get_mixed_type_key(obj: _A) -> Tuple[int, _A]: ... @overload def get_mixed_type_key(obj: IPv4Network) -> Tuple[int, IPv4Address, IPv4Address]: ... @overload def get_mixed_type_key(obj: IPv6Network) -> Tuple[int, IPv6Address, IPv6Address]: ... class AddressValueError(ValueError): ... class NetmaskValueError(ValueError): ... mypy-0.560/typeshed/stdlib/3.4/0000755€tŠÔÚ€2›s®0000000000013215007244022304 5ustar jukkaDROPBOX\Domain Users00000000000000mypy-0.560/typeshed/stdlib/3.4/_stat.pyi0000644€tŠÔÚ€2›s®0000000362013215007212024135 0ustar jukkaDROPBOX\Domain Users00000000000000"""Stub file for the '_stat' module.""" SF_APPEND = ... # type: int SF_ARCHIVED = ... # type: int SF_IMMUTABLE = ... # type: int SF_NOUNLINK = ... # type: int SF_SNAPSHOT = ... # type: int ST_ATIME = ... # type: int ST_CTIME = ... # type: int ST_DEV = ... # type: int ST_GID = ... # type: int ST_INO = ... # type: int ST_MODE = ... # type: int ST_MTIME = ... # type: int ST_NLINK = ... # type: int ST_SIZE = ... # type: int ST_UID = ... # type: int S_ENFMT = ... # type: int S_IEXEC = ... # type: int S_IFBLK = ... # type: int S_IFCHR = ... # type: int S_IFDIR = ... # type: int S_IFDOOR = ... # type: int S_IFIFO = ... # type: int S_IFLNK = ... # type: int S_IFPORT = ... # type: int S_IFREG = ... # type: int S_IFSOCK = ... # type: int S_IFWHT = ... # type: int S_IREAD = ... # type: int S_IRGRP = ... # type: int S_IROTH = ... # type: int S_IRUSR = ... # type: int S_IRWXG = ... # type: int S_IRWXO = ... # type: int S_IRWXU = ... # type: int S_ISGID = ... # type: int S_ISUID = ... # type: int S_ISVTX = ... # type: int S_IWGRP = ... # type: int S_IWOTH = ... # type: int S_IWRITE = ... # type: int S_IWUSR = ... # type: int S_IXGRP = ... # type: int S_IXOTH = ... # type: int S_IXUSR = ... # type: int UF_APPEND = ... # type: int UF_COMPRESSED = ... # type: int UF_HIDDEN = ... # type: int UF_IMMUTABLE = ... # type: int UF_NODUMP = ... # type: int UF_NOUNLINK = ... # type: int UF_OPAQUE = ... # type: int def S_IMODE(mode: int) -> int: ... def S_IFMT(mode: int) -> int: ... def S_ISBLK(mode: int) -> bool: ... def S_ISCHR(mode: int) -> bool: ... def S_ISDIR(mode: int) -> bool: ... def S_ISDOOR(mode: int) -> bool: ... def S_ISFIFO(mode: int) -> bool: ... def S_ISLNK(mode: int) -> bool: ... def S_ISPORT(mode: int) -> bool: ... def S_ISREG(mode: int) -> bool: ... def S_ISSOCK(mode: int) -> bool: ... def S_ISWHT(mode: int) -> bool: ... def filemode(mode: int) -> str: ... mypy-0.560/typeshed/stdlib/3.4/_tracemalloc.pyi0000644€tŠÔÚ€2›s®0000000123313215007212025446 0ustar jukkaDROPBOX\Domain Users00000000000000"""Stub file for the '_tracemalloc' module.""" # This is an autogenerated file. It serves as a starting point # for a more precise manual annotation of this module. # Feel free to edit the source below, but remove this header when you do. from typing import Any, List, Tuple, Dict, Generic def _get_object_traceback(*args, **kwargs) -> Any: ... def _get_traces() -> Any: raise MemoryError() def clear_traces() -> None: ... def get_traceback_limit() -> int: ... def get_traced_memory() -> tuple: ... def get_tracemalloc_memory() -> Any: ... def is_tracing() -> bool: ... def start(*args, **kwargs) -> None: raise ValueError() def stop() -> None: ... mypy-0.560/typeshed/stdlib/3.4/asyncio/0000755€tŠÔÚ€2›s®0000000000013215007244023751 5ustar jukkaDROPBOX\Domain Users00000000000000mypy-0.560/typeshed/stdlib/3.4/asyncio/__init__.pyi0000644€tŠÔÚ€2›s®0000000660713215007212026237 0ustar jukkaDROPBOX\Domain Users00000000000000import sys from typing import List, Type from asyncio.coroutines import ( coroutine as coroutine, iscoroutinefunction as iscoroutinefunction, iscoroutine as iscoroutine, ) from asyncio.protocols import ( BaseProtocol as BaseProtocol, Protocol as Protocol, DatagramProtocol as DatagramProtocol, SubprocessProtocol as SubprocessProtocol, ) from asyncio.streams import ( StreamReader as StreamReader, StreamWriter as StreamWriter, StreamReaderProtocol as StreamReaderProtocol, open_connection as open_connection, start_server as start_server, IncompleteReadError as IncompleteReadError, LimitOverrunError as LimitOverrunError, ) from asyncio.subprocess import ( create_subprocess_exec as create_subprocess_exec, create_subprocess_shell as create_subprocess_shell, ) from asyncio.transports import ( BaseTransport as BaseTransport, ReadTransport as ReadTransport, WriteTransport as WriteTransport, Transport as Transport, DatagramTransport as DatagramTransport, SubprocessTransport as SubprocessTransport, ) from asyncio.futures import ( Future as Future, CancelledError as CancelledError, TimeoutError as TimeoutError, InvalidStateError as InvalidStateError, wrap_future as wrap_future, ) from asyncio.tasks import ( FIRST_COMPLETED as FIRST_COMPLETED, FIRST_EXCEPTION as FIRST_EXCEPTION, ALL_COMPLETED as ALL_COMPLETED, as_completed as as_completed, ensure_future as ensure_future, async as async, gather as gather, run_coroutine_threadsafe as run_coroutine_threadsafe, shield as shield, sleep as sleep, wait as wait, wait_for as wait_for, Task as Task, ) from asyncio.events import ( AbstractEventLoopPolicy as AbstractEventLoopPolicy, AbstractEventLoop as AbstractEventLoop, AbstractServer as AbstractServer, Handle as Handle, TimerHandle as TimerHandle, get_event_loop_policy as get_event_loop_policy, set_event_loop_policy as set_event_loop_policy, get_event_loop as get_event_loop, set_event_loop as set_event_loop, new_event_loop as new_event_loop, get_child_watcher as get_child_watcher, set_child_watcher as set_child_watcher, ) from asyncio.queues import ( Queue as Queue, PriorityQueue as PriorityQueue, LifoQueue as LifoQueue, QueueFull as QueueFull, QueueEmpty as QueueEmpty, ) from asyncio.locks import ( Lock as Lock, Event as Event, Condition as Condition, Semaphore as Semaphore, BoundedSemaphore as BoundedSemaphore, ) if sys.version_info < (3, 5): from asyncio.queues import JoinableQueue as JoinableQueue else: from asyncio.futures import isfuture as isfuture from asyncio.events import ( _set_running_loop as _set_running_loop, _get_running_loop as _get_running_loop, ) if sys.platform != 'win32': from asyncio.streams import ( open_unix_connection as open_unix_connection, start_unix_server as start_unix_server, ) # TODO: It should be possible to instantiate these classes, but mypy # currently disallows this. # See https://github.com/python/mypy/issues/1843 SelectorEventLoop = ... # type: Type[AbstractEventLoop] if sys.platform == 'win32': ProactorEventLoop = ... # type: Type[AbstractEventLoop] DefaultEventLoopPolicy = ... # type: Type[AbstractEventLoopPolicy] # TODO: AbstractChildWatcher (UNIX only) __all__: List[str] mypy-0.560/typeshed/stdlib/3.4/asyncio/coroutines.pyi0000644€tŠÔÚ€2›s®0000000040713215007212026662 0ustar jukkaDROPBOX\Domain Users00000000000000from typing import Any, Callable, Generator, List, TypeVar __all__: List[str] _F = TypeVar('_F', bound=Callable[..., Any]) def coroutine(func: _F) -> _F: ... def iscoroutinefunction(func: Callable[..., Any]) -> bool: ... def iscoroutine(obj: Any) -> bool: ... mypy-0.560/typeshed/stdlib/3.4/asyncio/events.pyi0000644€tŠÔÚ€2›s®0000002420613215007212025777 0ustar jukkaDROPBOX\Domain Users00000000000000import selectors from socket import socket import ssl import sys from typing import Any, Awaitable, Callable, Dict, Generator, List, Optional, Sequence, Tuple, TypeVar, Union, overload from abc import ABCMeta, abstractmethod from asyncio.futures import Future from asyncio.coroutines import coroutine from asyncio.protocols import BaseProtocol from asyncio.tasks import Task from asyncio.transports import BaseTransport __all__: List[str] _T = TypeVar('_T') _Context = Dict[str, Any] _ExceptionHandler = Callable[[AbstractEventLoop, _Context], Any] _ProtocolFactory = Callable[[], BaseProtocol] _SSLContext = Union[bool, None, ssl.SSLContext] _TransProtPair = Tuple[BaseTransport, BaseProtocol] class Handle: _cancelled = False _args = ... # type: List[Any] def __init__(self, callback: Callable[..., Any], args: List[Any], loop: AbstractEventLoop) -> None: ... def __repr__(self) -> str: ... def cancel(self) -> None: ... def _run(self) -> None: ... class TimerHandle(Handle): def __init__(self, when: float, callback: Callable[..., Any], args: List[Any], loop: AbstractEventLoop) -> None: ... def __hash__(self) -> int: ... class AbstractServer: def close(self) -> None: ... @coroutine def wait_closed(self) -> Generator[Any, None, None]: ... class AbstractEventLoop(metaclass=ABCMeta): @abstractmethod def run_forever(self) -> None: ... # Can't use a union, see mypy issue # 1873. @overload @abstractmethod def run_until_complete(self, future: Generator[Any, None, _T]) -> _T: ... @overload @abstractmethod def run_until_complete(self, future: Awaitable[_T]) -> _T: ... @abstractmethod def stop(self) -> None: ... @abstractmethod def is_running(self) -> bool: ... @abstractmethod def is_closed(self) -> bool: ... @abstractmethod def close(self) -> None: ... if sys.version_info >= (3, 6): @abstractmethod @coroutine def shutdown_asyncgens(self) -> Generator[Any, None, None]: ... # Methods scheduling callbacks. All these return Handles. @abstractmethod def call_soon(self, callback: Callable[..., Any], *args: Any) -> Handle: ... @abstractmethod def call_later(self, delay: Union[int, float], callback: Callable[..., Any], *args: Any) -> Handle: ... @abstractmethod def call_at(self, when: float, callback: Callable[..., Any], *args: Any) -> Handle: ... @abstractmethod def time(self) -> float: ... # Future methods if sys.version_info >= (3, 5): @abstractmethod def create_future(self) -> Future[Any]: ... # Tasks methods @abstractmethod def create_task(self, coro: Union[Awaitable[_T], Generator[Any, None, _T]]) -> Task[_T]: ... @abstractmethod def set_task_factory(self, factory: Optional[Callable[[AbstractEventLoop, Generator[Any, None, _T]], Future[_T]]]) -> None: ... @abstractmethod def get_task_factory(self) -> Optional[Callable[[AbstractEventLoop, Generator[Any, None, _T]], Future[_T]]]: ... # Methods for interacting with threads @abstractmethod def call_soon_threadsafe(self, callback: Callable[..., Any], *args: Any) -> Handle: ... @abstractmethod @coroutine def run_in_executor(self, executor: Any, callback: Callable[..., _T], *args: Any) -> Generator[Any, None, _T]: ... @abstractmethod def set_default_executor(self, executor: Any) -> None: ... # Network I/O methods returning Futures. @abstractmethod @coroutine # TODO the "Tuple[Any, ...]" should be "Union[Tuple[str, int], Tuple[str, int, int, int]]" but that triggers # https://github.com/python/mypy/issues/2509 def getaddrinfo(self, host: str, port: int, *, family: int = ..., type: int = ..., proto: int = ..., flags: int = ...) -> Generator[Any, None, List[Tuple[int, int, int, str, Tuple[Any, ...]]]]: ... @abstractmethod @coroutine def getnameinfo(self, sockaddr: tuple, flags: int = ...) -> Generator[Any, None, Tuple[str, int]]: ... @abstractmethod @coroutine def create_connection(self, protocol_factory: _ProtocolFactory, host: str = ..., port: int = ..., *, ssl: _SSLContext = ..., family: int = ..., proto: int = ..., flags: int = ..., sock: Optional[socket] = ..., local_addr: str = ..., server_hostname: str = ...) -> Generator[Any, None, _TransProtPair]: ... @abstractmethod @coroutine def create_server(self, protocol_factory: _ProtocolFactory, host: Union[str, Sequence[str]] = ..., port: int = ..., *, family: int = ..., flags: int = ..., sock: Optional[socket] = ..., backlog: int = ..., ssl: _SSLContext = ..., reuse_address: Optional[bool] = ..., reuse_port: Optional[bool] = ...) -> Generator[Any, None, AbstractServer]: ... @abstractmethod @coroutine def create_unix_connection(self, protocol_factory: _ProtocolFactory, path: str, *, ssl: _SSLContext = ..., sock: Optional[socket] = ..., server_hostname: str = ...) -> Generator[Any, None, _TransProtPair]: ... @abstractmethod @coroutine def create_unix_server(self, protocol_factory: _ProtocolFactory, path: str, *, sock: Optional[socket] = ..., backlog: int = ..., ssl: _SSLContext = ...) -> Generator[Any, None, AbstractServer]: ... @abstractmethod @coroutine def create_datagram_endpoint(self, protocol_factory: _ProtocolFactory, local_addr: Optional[Tuple[str, int]] = ..., remote_addr: Optional[Tuple[str, int]] = ..., *, family: int = ..., proto: int = ..., flags: int = ..., reuse_address: Optional[bool] = ..., reuse_port: Optional[bool] = ..., allow_broadcast: Optional[bool] = ..., sock: Optional[socket] = ...) -> Generator[Any, None, _TransProtPair]: ... @abstractmethod @coroutine def connect_accepted_socket(self, protocol_factory: _ProtocolFactory, sock: socket, *, ssl: _SSLContext = ...) -> Generator[Any, None, _TransProtPair]: ... # Pipes and subprocesses. @abstractmethod @coroutine def connect_read_pipe(self, protocol_factory: _ProtocolFactory, pipe: Any) -> Generator[Any, None, _TransProtPair]: ... @abstractmethod @coroutine def connect_write_pipe(self, protocol_factory: _ProtocolFactory, pipe: Any) -> Generator[Any, None, _TransProtPair]: ... @abstractmethod @coroutine def subprocess_shell(self, protocol_factory: _ProtocolFactory, cmd: Union[bytes, str], *, stdin: Any = ..., stdout: Any = ..., stderr: Any = ..., **kwargs: Any) -> Generator[Any, None, _TransProtPair]: ... @abstractmethod @coroutine def subprocess_exec(self, protocol_factory: _ProtocolFactory, *args: Any, stdin: Any = ..., stdout: Any = ..., stderr: Any = ..., **kwargs: Any) -> Generator[Any, None, _TransProtPair]: ... @abstractmethod def add_reader(self, fd: selectors._FileObject, callback: Callable[..., Any], *args: Any) -> None: ... @abstractmethod def remove_reader(self, fd: selectors._FileObject) -> None: ... @abstractmethod def add_writer(self, fd: selectors._FileObject, callback: Callable[..., Any], *args: Any) -> None: ... @abstractmethod def remove_writer(self, fd: selectors._FileObject) -> None: ... # Completion based I/O methods returning Futures. @abstractmethod @coroutine def sock_recv(self, sock: socket, nbytes: int) -> Generator[Any, None, bytes]: ... @abstractmethod @coroutine def sock_sendall(self, sock: socket, data: bytes) -> Generator[Any, None, None]: ... @abstractmethod @coroutine def sock_connect(self, sock: socket, address: str) -> Generator[Any, None, None]: ... @abstractmethod @coroutine def sock_accept(self, sock: socket) -> Generator[Any, None, Tuple[socket, Any]]: ... # Signal handling. @abstractmethod def add_signal_handler(self, sig: int, callback: Callable[..., Any], *args: Any) -> None: ... @abstractmethod def remove_signal_handler(self, sig: int) -> None: ... # Error handlers. @abstractmethod def set_exception_handler(self, handler: _ExceptionHandler) -> None: ... @abstractmethod def get_exception_handler(self) -> _ExceptionHandler: ... @abstractmethod def default_exception_handler(self, context: _Context) -> None: ... @abstractmethod def call_exception_handler(self, context: _Context) -> None: ... # Debug flag management. @abstractmethod def get_debug(self) -> bool: ... @abstractmethod def set_debug(self, enabled: bool) -> None: ... class AbstractEventLoopPolicy(metaclass=ABCMeta): @abstractmethod def get_event_loop(self) -> AbstractEventLoop: ... @abstractmethod def set_event_loop(self, loop: AbstractEventLoop) -> None: ... @abstractmethod def new_event_loop(self) -> AbstractEventLoop: ... # Child processes handling (Unix only). @abstractmethod def get_child_watcher(self) -> Any: ... # TODO: unix_events.AbstractChildWatcher @abstractmethod def set_child_watcher(self, watcher: Any) -> None: ... # TODO: unix_events.AbstractChildWatcher class BaseDefaultEventLoopPolicy(AbstractEventLoopPolicy): def __init__(self) -> None: ... def get_event_loop(self) -> AbstractEventLoop: ... def set_event_loop(self, loop: AbstractEventLoop) -> None: ... def new_event_loop(self) -> AbstractEventLoop: ... def get_event_loop_policy() -> AbstractEventLoopPolicy: ... def set_event_loop_policy(policy: AbstractEventLoopPolicy) -> None: ... def get_event_loop() -> AbstractEventLoop: ... def set_event_loop(loop: AbstractEventLoop) -> None: ... def new_event_loop() -> AbstractEventLoop: ... def get_child_watcher() -> Any: ... # TODO: unix_events.AbstractChildWatcher def set_child_watcher(watcher: Any) -> None: ... # TODO: unix_events.AbstractChildWatcher def _set_running_loop(loop: AbstractEventLoop) -> None: ... def _get_running_loop() -> AbstractEventLoop: ... mypy-0.560/typeshed/stdlib/3.4/asyncio/futures.pyi0000644€tŠÔÚ€2›s®0000000353213215007212026167 0ustar jukkaDROPBOX\Domain Users00000000000000import sys from typing import Any, Union, Callable, TypeVar, Type, List, Generic, Iterable, Generator, Awaitable from .events import AbstractEventLoop from concurrent.futures import ( CancelledError as CancelledError, TimeoutError as TimeoutError, Future as _ConcurrentFuture, Error, ) __all__: List[str] _T = TypeVar('_T') _S = TypeVar('_S', bound=Future) class InvalidStateError(Error): ... class _TracebackLogger: exc = ... # type: BaseException tb = ... # type: List[str] def __init__(self, exc: Any, loop: AbstractEventLoop) -> None: ... def activate(self) -> None: ... def clear(self) -> None: ... def __del__(self) -> None: ... if sys.version_info >= (3, 5): def isfuture(obj: object) -> bool: ... class Future(Iterable[_T], Awaitable[_T], Generic[_T]): _state = ... # type: str _exception = ... # type: BaseException _blocking = False _log_traceback = False _tb_logger = ... # type: Type[_TracebackLogger] def __init__(self, *, loop: AbstractEventLoop = ...) -> None: ... def __repr__(self) -> str: ... def __del__(self) -> None: ... def cancel(self) -> bool: ... def _schedule_callbacks(self) -> None: ... def cancelled(self) -> bool: ... def done(self) -> bool: ... def result(self) -> _T: ... def exception(self) -> BaseException: ... def add_done_callback(self: _S, fn: Callable[[_S], Any]) -> None: ... def remove_done_callback(self: _S, fn: Callable[[_S], Any]) -> int: ... def set_result(self, result: _T) -> None: ... def set_exception(self, exception: Union[type, BaseException]) -> None: ... def _copy_state(self, other: Any) -> None: ... def __iter__(self) -> Generator[Any, None, _T]: ... def __await__(self) -> Generator[Any, None, _T]: ... def wrap_future(f: Union[_ConcurrentFuture[_T], Future[_T]]) -> Future[_T]: ... mypy-0.560/typeshed/stdlib/3.4/asyncio/locks.pyi0000644€tŠÔÚ€2›s®0000000445013215007212025605 0ustar jukkaDROPBOX\Domain Users00000000000000from typing import Any, Callable, Generator, Iterable, Iterator, List, Type, TypeVar, Union, Optional, Awaitable from .coroutines import coroutine from .events import AbstractEventLoop from .futures import Future from types import TracebackType _T = TypeVar('_T') __all__: List[str] class _ContextManager: def __init__(self, lock: Union[Lock, Semaphore]) -> None: ... def __enter__(self) -> object: ... def __exit__(self, *args: Any) -> None: ... class _ContextManagerMixin(Future[_ContextManager]): # Apparently this exists to *prohibit* use as a context manager. def __enter__(self) -> object: ... def __exit__(self, *args: Any) -> None: ... def __aenter__(self) -> Awaitable[None]: ... def __aexit__(self, exc_type: Optional[Type[BaseException]], exc: Optional[BaseException], tb: Optional[TracebackType]) -> Awaitable[None]: ... class Lock(_ContextManagerMixin): def __init__(self, *, loop: Optional[AbstractEventLoop] = ...) -> None: ... def locked(self) -> bool: ... @coroutine def acquire(self) -> Generator[Any, None, bool]: ... def release(self) -> None: ... class Event: def __init__(self, *, loop: Optional[AbstractEventLoop] = ...) -> None: ... def is_set(self) -> bool: ... def set(self) -> None: ... def clear(self) -> None: ... @coroutine def wait(self) -> Generator[Any, None, bool]: ... class Condition(_ContextManagerMixin): def __init__(self, lock: Optional[Lock] = ..., *, loop: Optional[AbstractEventLoop] = ...) -> None: ... def locked(self) -> bool: ... @coroutine def acquire(self) -> Generator[Any, None, bool]: ... def release(self) -> None: ... @coroutine def wait(self) -> Generator[Any, None, bool]: ... @coroutine def wait_for(self, predicate: Callable[[], _T]) -> Generator[Any, None, _T]: ... def notify(self, n: int = ...) -> None: ... def notify_all(self) -> None: ... class Semaphore(_ContextManagerMixin): def __init__(self, value: int = ..., *, loop: Optional[AbstractEventLoop] = ...) -> None: ... def locked(self) -> bool: ... @coroutine def acquire(self) -> Generator[Any, None, bool]: ... def release(self) -> None: ... class BoundedSemaphore(Semaphore): def __init__(self, value: int = ..., *, loop: Optional[AbstractEventLoop] = ...) -> None: ... mypy-0.560/typeshed/stdlib/3.4/asyncio/protocols.pyi0000644€tŠÔÚ€2›s®0000000153713215007212026521 0ustar jukkaDROPBOX\Domain Users00000000000000from asyncio import transports from typing import AnyStr, List, Tuple __all__: List[str] class BaseProtocol: def connection_made(self, transport: transports.BaseTransport) -> None: ... def connection_lost(self, exc: Exception) -> None: ... def pause_writing(self) -> None: ... def resume_writing(self) -> None: ... class Protocol(BaseProtocol): def data_received(self, data: bytes) -> None: ... def eof_received(self) -> bool: ... class DatagramProtocol(BaseProtocol): def datagram_received(self, data: AnyStr, addr: Tuple[str, int]) -> None: ... def error_received(self, exc: Exception) -> None: ... class SubprocessProtocol(BaseProtocol): def pipe_data_received(self, fd: int, data: AnyStr) -> None: ... def pipe_connection_lost(self, fd: int, exc: Exception) -> None: ... def process_exited(self) -> None: ... mypy-0.560/typeshed/stdlib/3.4/asyncio/queues.pyi0000644€tŠÔÚ€2›s®0000000273713215007212026007 0ustar jukkaDROPBOX\Domain Users00000000000000import sys from asyncio.events import AbstractEventLoop from .coroutines import coroutine from .futures import Future from typing import Any, Generator, Generic, List, TypeVar __all__: List[str] class QueueEmpty(Exception): ... class QueueFull(Exception): ... _T = TypeVar('_T') class Queue(Generic[_T]): def __init__(self, maxsize: int = ..., *, loop: AbstractEventLoop = ...) -> None: ... def _init(self, maxsize: int) -> None: ... def _get(self) -> _T: ... def _put(self, item: _T) -> None: ... def __repr__(self) -> str: ... def __str__(self) -> str: ... def _format(self) -> str: ... def _consume_done_getters(self) -> None: ... def _consume_done_putters(self) -> None: ... def qsize(self) -> int: ... @property def maxsize(self) -> int: ... def empty(self) -> bool: ... def full(self) -> bool: ... @coroutine def put(self, item: _T) -> Generator[Any, None, None]: ... def put_nowait(self, item: _T) -> None: ... @coroutine def get(self) -> Generator[Any, None, _T]: ... def get_nowait(self) -> _T: ... if sys.version_info >= (3, 4): @coroutine def join(self) -> Generator[Any, None, bool]: ... def task_done(self) -> None: ... class PriorityQueue(Queue[_T]): ... class LifoQueue(Queue[_T]): ... if sys.version_info < (3, 5): class JoinableQueue(Queue[_T]): def task_done(self) -> None: ... @coroutine def join(self) -> Generator[Any, None, bool]: ... mypy-0.560/typeshed/stdlib/3.4/asyncio/streams.pyi0000644€tŠÔÚ€2›s®0000000730713215007212026154 0ustar jukkaDROPBOX\Domain Users00000000000000import sys from typing import Any, Awaitable, Callable, Generator, Iterable, List, Optional, Tuple from . import coroutines from . import events from . import protocols from . import transports _ClientConnectedCallback = Callable[[StreamReader, StreamWriter], Optional[Awaitable[None]]] __all__: List[str] class IncompleteReadError(EOFError): expected = ... # type: Optional[int] partial = ... # type: bytes def __init__(self, partial: bytes, expected: Optional[int]) -> None: ... class LimitOverrunError(Exception): consumed = ... # type: int def __init__(self, message: str, consumed: int) -> None: ... @coroutines.coroutine def open_connection( host: str = ..., port: int = ..., *, loop: Optional[events.AbstractEventLoop] = ..., limit: int = ..., **kwds: Any ) -> Generator[Any, None, Tuple[StreamReader, StreamWriter]]: ... @coroutines.coroutine def start_server( client_connected_cb: _ClientConnectedCallback, host: str = ..., port: int = ..., *, loop: Optional[events.AbstractEventLoop] = ..., limit: int = ..., **kwds: Any ) -> Generator[Any, None, events.AbstractServer]: ... if sys.platform != 'win32': @coroutines.coroutine def open_unix_connection( path: str = ..., *, loop: Optional[events.AbstractEventLoop] = ..., limit: int = ..., **kwds: Any ) -> Generator[Any, None, Tuple[StreamReader, StreamWriter]]: ... @coroutines.coroutine def start_unix_server( client_connected_cb: _ClientConnectedCallback, path: str = ..., *, loop: int = ..., limit: int = ..., **kwds: Any) -> Generator[Any, None, events.AbstractServer]: ... class FlowControlMixin(protocols.Protocol): ... class StreamReaderProtocol(FlowControlMixin, protocols.Protocol): def __init__(self, stream_reader: StreamReader, client_connected_cb: _ClientConnectedCallback = ..., loop: Optional[events.AbstractEventLoop] = ...) -> None: ... def connection_made(self, transport: transports.BaseTransport) -> None: ... def connection_lost(self, exc: Exception) -> None: ... def data_received(self, data: bytes) -> None: ... def eof_received(self) -> bool: ... class StreamWriter: def __init__(self, transport: transports.BaseTransport, protocol: protocols.BaseProtocol, reader: StreamReader, loop: events.AbstractEventLoop) -> None: ... @property def transport(self) -> transports.BaseTransport: ... def write(self, data: bytes) -> None: ... def writelines(self, data: Iterable[bytes]) -> None: ... def write_eof(self) -> None: ... def can_write_eof(self) -> bool: ... def close(self) -> None: ... def get_extra_info(self, name: str, default: Any = ...) -> Any: ... @coroutines.coroutine def drain(self) -> Generator[Any, None, None]: ... class StreamReader: def __init__(self, limit: int = ..., loop: Optional[events.AbstractEventLoop] = ...) -> None: ... def exception(self) -> Exception: ... def set_exception(self, exc: Exception) -> None: ... def set_transport(self, transport: transports.BaseTransport) -> None: ... def feed_eof(self) -> None: ... def at_eof(self) -> bool: ... def feed_data(self, data: bytes) -> None: ... @coroutines.coroutine def readline(self) -> Generator[Any, None, bytes]: ... @coroutines.coroutine def readuntil(self, separator: bytes = ...) -> Generator[Any, None, bytes]: ... @coroutines.coroutine def read(self, n: int = ...) -> Generator[Any, None, bytes]: ... @coroutines.coroutine def readexactly(self, n: int) -> Generator[Any, None, bytes]: ... mypy-0.560/typeshed/stdlib/3.4/asyncio/subprocess.pyi0000644€tŠÔÚ€2›s®0000000470113215007212026661 0ustar jukkaDROPBOX\Domain Users00000000000000from asyncio import events from asyncio import protocols from asyncio import streams from asyncio import transports from asyncio.coroutines import coroutine from typing import Any, AnyStr, Generator, List, Optional, Tuple, Union, IO __all__: List[str] PIPE = ... # type: int STDOUT = ... # type: int DEVNULL = ... # type: int class SubprocessStreamProtocol(streams.FlowControlMixin, protocols.SubprocessProtocol): stdin = ... # type: Optional[streams.StreamWriter] stdout = ... # type: Optional[streams.StreamReader] stderr = ... # type: Optional[streams.StreamReader] def __init__(self, limit: int, loop: events.AbstractEventLoop) -> None: ... def connection_made(self, transport: transports.BaseTransport) -> None: ... def pipe_data_received(self, fd: int, data: AnyStr) -> None: ... def pipe_connection_lost(self, fd: int, exc: Exception) -> None: ... def process_exited(self) -> None: ... class Process: stdin = ... # type: Optional[streams.StreamWriter] stdout = ... # type: Optional[streams.StreamReader] stderr = ... # type: Optional[streams.StreamReader] pid = ... # type: int def __init__(self, transport: transports.BaseTransport, protocol: protocols.BaseProtocol, loop: events.AbstractEventLoop) -> None: ... @property def returncode(self) -> int: ... @coroutine def wait(self) -> Generator[Any, None, int]: ... def send_signal(self, signal: int) -> None: ... def terminate(self) -> None: ... def kill(self) -> None: ... @coroutine def communicate(self, input: Optional[bytes] = ...) -> Generator[Any, None, Tuple[bytes, bytes]]: ... @coroutine def create_subprocess_shell( *Args: Union[str, bytes], # Union used instead of AnyStr due to mypy issue #1236 stdin: Union[int, IO[Any], None] = ..., stdout: Union[int, IO[Any], None] = ..., stderr: Union[int, IO[Any], None] = ..., loop: events.AbstractEventLoop = ..., limit: int = ..., **kwds: Any ) -> Generator[Any, None, Process]: ... @coroutine def create_subprocess_exec( program: Union[str, bytes], # Union used instead of AnyStr due to mypy issue #1236 *args: Any, stdin: Union[int, IO[Any], None] = ..., stdout: Union[int, IO[Any], None] = ..., stderr: Union[int, IO[Any], None] = ..., loop: events.AbstractEventLoop = ..., limit: int = ..., **kwds: Any ) -> Generator[Any, None, Process]: ... mypy-0.560/typeshed/stdlib/3.4/asyncio/tasks.pyi0000644€tŠÔÚ€2›s®0000000776613215007212025634 0ustar jukkaDROPBOX\Domain Users00000000000000from typing import (Any, TypeVar, Set, Dict, List, TextIO, Union, Tuple, Generic, Callable, Coroutine, Generator, Iterable, Awaitable, overload, Sequence, Iterator, Optional) import concurrent.futures from .events import AbstractEventLoop from .futures import Future __all__: List[str] _T = TypeVar('_T') _T1 = TypeVar('_T1') _T2 = TypeVar('_T2') _T3 = TypeVar('_T3') _T4 = TypeVar('_T4') _T5 = TypeVar('_T5') _FutureT = Union[Future[_T], Generator[Any, None, _T], Awaitable[_T]] FIRST_EXCEPTION = 'FIRST_EXCEPTION' FIRST_COMPLETED = 'FIRST_COMPLETED' ALL_COMPLETED = 'ALL_COMPLETED' def as_completed(fs: Sequence[_FutureT[_T]], *, loop: AbstractEventLoop = ..., timeout: Optional[float] = ...) -> Iterator[Generator[Any, None, _T]]: ... def ensure_future(coro_or_future: _FutureT[_T], *, loop: AbstractEventLoop = ...) -> Future[_T]: ... async = ensure_future @overload def gather(coro_or_future1: _FutureT[_T1], *, loop: AbstractEventLoop = ..., return_exceptions: bool = ...) -> Future[Tuple[_T1]]: ... @overload def gather(coro_or_future1: _FutureT[_T1], coro_or_future2: _FutureT[_T2], *, loop: AbstractEventLoop = ..., return_exceptions: bool = ...) -> Future[Tuple[_T1, _T2]]: ... @overload def gather(coro_or_future1: _FutureT[_T1], coro_or_future2: _FutureT[_T2], coro_or_future3: _FutureT[_T3], *, loop: AbstractEventLoop = ..., return_exceptions: bool = ...) -> Future[Tuple[_T1, _T2, _T3]]: ... @overload def gather(coro_or_future1: _FutureT[_T1], coro_or_future2: _FutureT[_T2], coro_or_future3: _FutureT[_T3], coro_or_future4: _FutureT[_T4], *, loop: AbstractEventLoop = ..., return_exceptions: bool = ...) -> Future[Tuple[_T1, _T2, _T3, _T4]]: ... @overload def gather(coro_or_future1: _FutureT[_T1], coro_or_future2: _FutureT[_T2], coro_or_future3: _FutureT[_T3], coro_or_future4: _FutureT[_T4], coro_or_future5: _FutureT[_T5], *, loop: AbstractEventLoop = ..., return_exceptions: bool = ...) -> Future[Tuple[_T1, _T2, _T3, _T4, _T5]]: ... @overload def gather(coro_or_future1: _FutureT[Any], coro_or_future2: _FutureT[Any], coro_or_future3: _FutureT[Any], coro_or_future4: _FutureT[Any], coro_or_future5: _FutureT[Any], coro_or_future6: _FutureT[Any], *coros_or_futures: _FutureT[Any], loop: AbstractEventLoop = ..., return_exceptions: bool = ...) -> Future[Tuple[Any, ...]]: ... def run_coroutine_threadsafe(coro: _FutureT[_T], loop: AbstractEventLoop) -> concurrent.futures.Future[_T]: ... def shield(arg: _FutureT[_T], *, loop: AbstractEventLoop = ...) -> Future[_T]: ... def sleep(delay: float, result: _T = ..., loop: AbstractEventLoop = ...) -> Future[_T]: ... def wait(fs: Iterable[_FutureT[_T]], *, loop: AbstractEventLoop = ..., timeout: float = ..., return_when: str = ...) -> Future[Tuple[Set[Future[_T]], Set[Future[_T]]]]: ... def wait_for(fut: _FutureT[_T], timeout: Optional[float], *, loop: AbstractEventLoop = ...) -> Future[_T]: ... class Task(Future[_T], Generic[_T]): _all_tasks = ... # type: Set[Task] _current_tasks = ... # type: Dict[AbstractEventLoop, Task] @classmethod def current_task(cls, loop: AbstractEventLoop = ...) -> Task: ... @classmethod def all_tasks(cls, loop: AbstractEventLoop = ...) -> Set[Task]: ... # Can't use a union, see mypy issue #1873. @overload def __init__(self, coro: Generator[Any, None, _T], *, loop: AbstractEventLoop = ...) -> None: ... @overload def __init__(self, coro: Awaitable[_T], *, loop: AbstractEventLoop = ...) -> None: ... def __repr__(self) -> str: ... def get_stack(self, *, limit: int = ...) -> List[Any]: ... # return List[stackframe] def print_stack(self, *, limit: int = ..., file: TextIO = ...) -> None: ... def cancel(self) -> bool: ... def _step(self, value: Any = ..., exc: Exception = ...) -> None: ... def _wakeup(self, future: Future[Any]) -> None: ... mypy-0.560/typeshed/stdlib/3.4/asyncio/transports.pyi0000644€tŠÔÚ€2›s®0000000251613215007212026712 0ustar jukkaDROPBOX\Domain Users00000000000000from typing import Dict, Any, TypeVar, Mapping, List, Optional, Tuple __all__: List[str] class BaseTransport: def __init__(self, extra: Mapping[Any, Any] = ...) -> None: ... def get_extra_info(self, name: Any, default: Any = ...) -> Any: ... def is_closing(self) -> bool: ... def close(self) -> None: ... class ReadTransport(BaseTransport): def pause_reading(self) -> None: ... def resume_reading(self) -> None: ... class WriteTransport(BaseTransport): def set_write_buffer_limits( self, high: int = ..., low: int = ... ) -> None: ... def get_write_buffer_size(self) -> int: ... def write(self, data: Any) -> None: ... def writelines(self, list_of_data: List[Any]) -> None: ... def write_eof(self) -> None: ... def can_write_eof(self) -> bool: ... def abort(self) -> None: ... class Transport(ReadTransport, WriteTransport): ... class DatagramTransport(BaseTransport): def sendto(self, data: Any, addr: Optional[Tuple[str, int]] = ...) -> None: ... def abort(self) -> None: ... class SubprocessTransport(BaseTransport): def get_pid(self) -> int: ... def get_returncode(self) -> int: ... def get_pipe_transport(self, fd: int) -> BaseTransport: ... def send_signal(self, signal: int) -> int: ... def terminate(self) -> None: ... def kill(self) -> None: ... mypy-0.560/typeshed/stdlib/3.4/enum.pyi0000644€tŠÔÚ€2›s®0000000466713215007212024003 0ustar jukkaDROPBOX\Domain Users00000000000000import sys from typing import List, Any, TypeVar, Union, Iterable, Iterator, TypeVar, Generic, Type, Sized, Reversible, Container, Mapping from abc import ABCMeta _T = TypeVar('_T') _S = TypeVar('_S', bound=Type[Enum]) # Note: EnumMeta actually subclasses type directly, not ABCMeta. # This is a temporary workaround to allow multiple creation of enums with builtins # such as str as mixins, which due to the handling of ABCs of builtin types, cause # spurious inconsistent metaclass structure. See #1595. class EnumMeta(ABCMeta, Iterable[Enum], Sized, Reversible[Enum], Container[Enum]): def __iter__(self: Type[_T]) -> Iterator[_T]: ... def __reversed__(self: Type[_T]) -> Iterator[_T]: ... def __contains__(self, member: Any) -> bool: ... def __getitem__(self: Type[_T], name: str) -> _T: ... @property def __members__(self: Type[_T]) -> Mapping[str, _T]: ... class Enum(metaclass=EnumMeta): def __new__(cls: Type[_T], value: Any) -> _T: ... def __repr__(self) -> str: ... def __str__(self) -> str: ... def __dir__(self) -> List[str]: ... def __format__(self, format_spec: str) -> str: ... def __hash__(self) -> Any: ... def __reduce_ex__(self, proto: Any) -> Any: ... name = ... # type: str value = ... # type: Any class IntEnum(int, Enum): value = ... # type: int def unique(enumeration: _S) -> _S: ... if sys.version_info >= (3, 6): _auto_null = ... # type: Any # subclassing IntFlag so it picks up all implemented base functions, best modeling behavior of enum.auto() class auto(IntFlag): value = ... # type: Any class Flag(Enum): def __contains__(self: _T, other: _T) -> bool: ... def __repr__(self) -> str: ... def __str__(self) -> str: ... def __bool__(self) -> bool: ... def __or__(self: _T, other: _T) -> _T: ... def __and__(self: _T, other: _T) -> _T: ... def __xor__(self: _T, other: _T) -> _T: ... def __invert__(self: _T) -> _T: ... # The `type: ignore` comment is needed because mypy considers the type # signatures of several methods defined in int and Flag to be incompatible. class IntFlag(int, Flag): # type: ignore def __or__(self: _T, other: Union[int, _T]) -> _T: ... def __and__(self: _T, other: Union[int, _T]) -> _T: ... def __xor__(self: _T, other: Union[int, _T]) -> _T: ... __ror__ = __or__ __rand__ = __and__ __rxor__ = __xor__ mypy-0.560/typeshed/stdlib/3.4/pathlib.pyi0000644€tŠÔÚ€2›s®0000001103213215007212024442 0ustar jukkaDROPBOX\Domain Users00000000000000# Stubs for pathlib (Python 3.4) from typing import Any, Generator, IO, Optional, Sequence, Tuple, Type, TypeVar, Union, List import os import sys _P = TypeVar('_P', bound='PurePath') if sys.version_info >= (3, 6): _PurePathBase = os.PathLike[str] else: _PurePathBase = object class PurePath(_PurePathBase): parts = ... # type: Tuple[str, ...] drive = ... # type: str root = ... # type: str anchor = ... # type: str name = ... # type: str suffix = ... # type: str suffixes = ... # type: List[str] stem = ... # type: str if sys.version_info < (3, 5): def __init__(self, *pathsegments: str) -> None: ... elif sys.version_info < (3, 6): def __new__(cls: Type[_P], *args: Union[str, PurePath]) -> _P: ... else: def __new__(cls: Type[_P], *args: Union[str, os.PathLike[str]]) -> _P: ... def __hash__(self) -> int: ... def __lt__(self, other: PurePath) -> bool: ... def __le__(self, other: PurePath) -> bool: ... def __gt__(self, other: PurePath) -> bool: ... def __ge__(self, other: PurePath) -> bool: ... def __truediv__(self: _P, key: Union[str, PurePath]) -> _P: ... def __bytes__(self) -> bytes: ... def as_posix(self) -> str: ... def as_uri(self) -> str: ... def is_absolute(self) -> bool: ... def is_reserved(self) -> bool: ... def match(self, path_pattern: str) -> bool: ... def relative_to(self: _P, *other: Union[str, PurePath]) -> _P: ... def with_name(self: _P, name: str) -> _P: ... def with_suffix(self: _P, suffix: str) -> _P: ... def joinpath(self: _P, *other: Union[str, PurePath]) -> _P: ... @property def parents(self: _P) -> Sequence[_P]: ... @property def parent(self: _P) -> _P: ... class PurePosixPath(PurePath): ... class PureWindowsPath(PurePath): ... class Path(PurePath): @classmethod def cwd(cls: Type[_P]) -> _P: ... def stat(self) -> os.stat_result: ... def chmod(self, mode: int) -> None: ... def exists(self) -> bool: ... def glob(self, pattern: str) -> Generator[Path, None, None]: ... def group(self) -> str: ... def is_dir(self) -> bool: ... def is_file(self) -> bool: ... def is_symlink(self) -> bool: ... def is_socket(self) -> bool: ... def is_fifo(self) -> bool: ... def is_block_device(self) -> bool: ... def is_char_device(self) -> bool: ... def iterdir(self) -> Generator[Path, None, None]: ... def lchmod(self, mode: int) -> None: ... def lstat(self) -> os.stat_result: ... if sys.version_info < (3, 5): def mkdir(self, mode: int = ..., parents: bool = ...) -> None: ... else: def mkdir(self, mode: int = ..., parents: bool = ..., exist_ok: bool = ...) -> None: ... def open(self, mode: str = ..., buffering: int = ..., encoding: Optional[str] = ..., errors: Optional[str] = ..., newline: Optional[str] = ...) -> IO[Any]: ... def owner(self) -> str: ... def rename(self, target: Union[str, PurePath]) -> None: ... def replace(self, target: Union[str, PurePath]) -> None: ... if sys.version_info < (3, 6): def resolve(self: _P) -> _P: ... else: def resolve(self: _P, strict: bool = ...) -> _P: ... def rglob(self, pattern: str) -> Generator[Path, None, None]: ... def rmdir(self) -> None: ... def symlink_to(self, target: Union[str, Path], target_is_directory: bool = ...) -> None: ... def touch(self, mode: int = ..., exist_ok: bool = ...) -> None: ... def unlink(self) -> None: ... if sys.version_info >= (3, 5): @classmethod def home(cls: Type[_P]) -> _P: ... if sys.version_info < (3, 6): def __new__(cls: Type[_P], *args: Union[str, PurePath], **kwargs: Any) -> _P: ... else: def __new__(cls: Type[_P], *args: Union[str, os.PathLike[str]], **kwargs: Any) -> _P: ... def absolute(self: _P) -> _P: ... def expanduser(self: _P) -> _P: ... def read_bytes(self) -> bytes: ... def read_text(self, encoding: Optional[str] = ..., errors: Optional[str] = ...) -> str: ... def samefile(self, other_path: Union[str, bytes, int, Path]) -> bool: ... def write_bytes(self, data: bytes) -> int: ... def write_text(self, data: str, encoding: Optional[str] = ..., errors: Optional[str] = ...) -> int: ... class PosixPath(Path, PurePosixPath): ... class WindowsPath(Path, PureWindowsPath): ... mypy-0.560/typeshed/stdlib/3.4/selectors.pyi0000644€tŠÔÚ€2›s®0000000720213215007212025026 0ustar jukkaDROPBOX\Domain Users00000000000000# Stubs for selector # See https://docs.python.org/3/library/selectors.html from typing import Any, List, NamedTuple, Mapping, Tuple, Optional, Union from abc import ABCMeta, abstractmethod import socket # Type aliases added mainly to preserve some context # # See https://github.com/python/typeshed/issues/482 # for details regarding how _FileObject is typed. _FileObject = Union[int, socket.socket] _FileDescriptor = int _EventMask = int EVENT_READ = ... # type: _EventMask EVENT_WRITE = ... # type: _EventMask SelectorKey = NamedTuple('SelectorKey', [ ('fileobj', _FileObject), ('fd', _FileDescriptor), ('events', _EventMask), ('data', Any) ]) class BaseSelector(metaclass=ABCMeta): @abstractmethod def register(self, fileobj: _FileObject, events: _EventMask, data: Any = ...) -> SelectorKey: ... @abstractmethod def unregister(self, fileobj: _FileObject) -> SelectorKey: ... def modify(self, fileobj: _FileObject, events: _EventMask, data: Any = ...) -> SelectorKey: ... @abstractmethod def select(self, timeout: Optional[int] = ...) -> List[Tuple[SelectorKey, _EventMask]]: ... def close(self) -> None: ... def get_key(self, fileobj: _FileObject) -> SelectorKey: ... @abstractmethod def get_map(self) -> Mapping[_FileObject, SelectorKey]: ... def __enter__(self) -> BaseSelector: ... def __exit__(self, *args: Any) -> None: ... class SelectSelector(BaseSelector): def register(self, fileobj: _FileObject, events: _EventMask, data: Any = ...) -> SelectorKey: ... def unregister(self, fileobj: _FileObject) -> SelectorKey: ... def select(self, timeout: Optional[int] = ...) -> List[Tuple[SelectorKey, _EventMask]]: ... def get_map(self) -> Mapping[_FileObject, SelectorKey]: ... class PollSelector(BaseSelector): def register(self, fileobj: _FileObject, events: _EventMask, data: Any = ...) -> SelectorKey: ... def unregister(self, fileobj: _FileObject) -> SelectorKey: ... def select(self, timeout: Optional[int] = ...) -> List[Tuple[SelectorKey, _EventMask]]: ... def get_map(self) -> Mapping[_FileObject, SelectorKey]: ... class EpollSelector(BaseSelector): def fileno(self) -> int: ... def register(self, fileobj: _FileObject, events: _EventMask, data: Any = ...) -> SelectorKey: ... def unregister(self, fileobj: _FileObject) -> SelectorKey: ... def select(self, timeout: Optional[int] = ...) -> List[Tuple[SelectorKey, _EventMask]]: ... def get_map(self) -> Mapping[_FileObject, SelectorKey]: ... class DevpollSelector(BaseSelector): def fileno(self) -> int: ... def register(self, fileobj: _FileObject, events: _EventMask, data: Any = ...) -> SelectorKey: ... def unregister(self, fileobj: _FileObject) -> SelectorKey: ... def select(self, timeout: Optional[int] = ...) -> List[Tuple[SelectorKey, _EventMask]]: ... def get_map(self) -> Mapping[_FileObject, SelectorKey]: ... class KqueueSelector(BaseSelector): def fileno(self) -> int: ... def register(self, fileobj: _FileObject, events: _EventMask, data: Any = ...) -> SelectorKey: ... def unregister(self, fileobj: _FileObject) -> SelectorKey: ... def select(self, timeout: Optional[int] = ...) -> List[Tuple[SelectorKey, _EventMask]]: ... def get_map(self) -> Mapping[_FileObject, SelectorKey]: ... class DefaultSelector(BaseSelector): def register(self, fileobj: _FileObject, events: _EventMask, data: Any = ...) -> SelectorKey: ... def unregister(self, fileobj: _FileObject) -> SelectorKey: ... def select(self, timeout: Optional[int] = ...) -> List[Tuple[SelectorKey, _EventMask]]: ... def get_map(self) -> Mapping[_FileObject, SelectorKey]: ... mypy-0.560/typeshed/stdlib/3.4/statistics.pyi0000644€tŠÔÚ€2›s®0000000206213215007212025214 0ustar jukkaDROPBOX\Domain Users00000000000000# Stubs for statistics from decimal import Decimal from fractions import Fraction import sys from typing import Iterable, Optional, TypeVar # Most functions in this module accept homogeneous collections of one of these types _Number = TypeVar('_Number', float, Decimal, Fraction) class StatisticsError(ValueError): ... def mean(data: Iterable[_Number]) -> _Number: ... if sys.version_info >= (3, 6): def harmonic_mean(data: Iterable[_Number]) -> _Number: ... def median(data: Iterable[_Number]) -> _Number: ... def median_low(data: Iterable[_Number]) -> _Number: ... def median_high(data: Iterable[_Number]) -> _Number: ... def median_grouped(data: Iterable[_Number]) -> _Number: ... def mode(data: Iterable[_Number]) -> _Number: ... def pstdev(data: Iterable[_Number], mu: Optional[_Number] = ...) -> _Number: ... def pvariance(data: Iterable[_Number], mu: Optional[_Number] = ...) -> _Number: ... def stdev(data: Iterable[_Number], xbar: Optional[_Number] = ...) -> _Number: ... def variance(data: Iterable[_Number], xbar: Optional[_Number] = ...) -> _Number: ... mypy-0.560/typeshed/stdlib/3.4/tracemalloc.pyi0000644€tŠÔÚ€2›s®0000000432713215007212025316 0ustar jukkaDROPBOX\Domain Users00000000000000# Stubs for tracemalloc (Python 3.4+) import sys from typing import Any, List, Optional, Sequence, Tuple, Union def clear_traces() -> None: ... def get_object_traceback(obj: object) -> Optional[Traceback]: ... def get_traceback_limit() -> int: ... def get_traced_memory() -> Tuple[int, int]: ... def get_tracemalloc_memory() -> int: ... def is_tracing() -> bool: ... def start(nframe: int = ...) -> None: ... def stop() -> None: ... def take_snapshot() -> Snapshot: ... if sys.version_info >= (3, 6): class DomainFilter: inclusive = ... # type: bool domain = ... # type: int def __init__(self, inclusive: bool, domain: int) -> None: ... class Filter: if sys.version_info >= (3, 6): domain = ... # type: Optional[int] inclusive = ... # type: bool lineno = ... # type: Optional[int] filename_pattern = ... # type: str all_frames = ... # type: bool def __init__(self, inclusive: bool, filename_pattern: str, lineno: Optional[int] = ..., all_frames: bool = ..., domain: Optional[int] = ...) -> None: ... class Frame: filename = ... # type: str lineno = ... # type: int class Snapshot: def compare_to(self, old_snapshot: Snapshot, key_type: str, cumulative: bool = ...) -> List[StatisticDiff]: ... def dump(self, filename: str) -> None: ... if sys.version_info >= (3, 6): def filter_traces(self, filters: Sequence[Union[DomainFilter, Filter]]) -> Snapshot: ... else: def filter_traces(self, filters: Sequence[Filter]) -> Snapshot: ... @classmethod def load(cls, filename: str) -> Snapshot: ... def statistics(self, key_type: str, cumulative: bool = ...) -> List[Statistic]: ... traceback_limit = ... # type: int traces = ... # type: Sequence[Trace] class Statistic: count = ... # type: int size = ... # type: int traceback = ... # type: Traceback class StatisticDiff: count = ... # type: int count_diff = ... # type: int size = ... # type: int size_diff = ... # type: int traceback = ... # type: Traceback class Trace: size = ... # type: int traceback = ... # type: Traceback class Traceback(Sequence[Frame]): def format(self, limit: Optional[int] = ...) -> List[str]: ... mypy-0.560/typeshed/stdlib/3.5/0000755€tŠÔÚ€2›s®0000000000013215007244022305 5ustar jukkaDROPBOX\Domain Users00000000000000mypy-0.560/typeshed/stdlib/3.5/zipapp.pyi0000644€tŠÔÚ€2›s®0000000055313215007212024331 0ustar jukkaDROPBOX\Domain Users00000000000000# Stubs for zipapp (Python 3.5+) from pathlib import Path from typing import BinaryIO, Optional, Union _Path = Union[str, Path, BinaryIO] class ZipAppError(Exception): ... def create_archive(source: _Path, target: Optional[_Path] = ..., interpreter: Optional[str] = ..., main: Optional[str] = ...) -> None: ... def get_interpreter(archive: _Path) -> str: ... mypy-0.560/typeshed/stdlib/3.6/0000755€tŠÔÚ€2›s®0000000000013215007244022306 5ustar jukkaDROPBOX\Domain Users00000000000000mypy-0.560/typeshed/stdlib/3.6/secrets.pyi0000644€tŠÔÚ€2›s®0000000074313215007212024500 0ustar jukkaDROPBOX\Domain Users00000000000000# Stubs for secrets (Python 3.6) from typing import Optional, Sequence, TypeVar from hmac import compare_digest as compare_digest from random import SystemRandom as SystemRandom _T = TypeVar('_T') def randbelow(exclusive_upper_bound: int) -> int: ... def randbits(k: int) -> int: ... def choice(seq: Sequence[_T]) -> _T: ... def token_bytes(nbytes: Optional[int]) -> bytes: ... def token_hex(nbytes: Optional[int]) -> str: ... def token_urlsafe(nbytes: Optional[int]) -> str: ... mypy-0.560/typeshed/tests/0000755€tŠÔÚ€2›s®0000000000013215007244021661 5ustar jukkaDROPBOX\Domain Users00000000000000mypy-0.560/typeshed/tests/mypy_selftest.py0000755€tŠÔÚ€2›s®0000000225113215007212025140 0ustar jukkaDROPBOX\Domain Users00000000000000#!/usr/bin/env python3 """Script to run mypy's test suite against this version of typeshed.""" from pathlib import Path import shutil import subprocess import sys import tempfile if __name__ == '__main__': with tempfile.TemporaryDirectory() as tempdir: dirpath = Path(tempdir) subprocess.run(['python2.7', '-m', 'pip', 'install', '--user', 'typing'], check=True) subprocess.run(['git', 'clone', '--depth', '1', 'git://github.com/python/mypy', str(dirpath / 'mypy')], check=True) subprocess.run([sys.executable, '-m', 'pip', 'install', '-U', '-r', str(dirpath / 'mypy/test-requirements.txt')], check=True) shutil.copytree('stdlib', str(dirpath / 'mypy/typeshed/stdlib')) shutil.copytree('third_party', str(dirpath / 'mypy/typeshed/third_party')) try: subprocess.run(['./runtests.py', '-j12'], cwd=str(dirpath / 'mypy'), check=True) except subprocess.CalledProcessError as e: print('mypy tests failed', file=sys.stderr) sys.exit(e.returncode) else: print('mypy tests succeeded', file=sys.stderr) sys.exit(0) mypy-0.560/typeshed/tests/mypy_test.py0000755€tŠÔÚ€2›s®0000001271313215007212024272 0ustar jukkaDROPBOX\Domain Users00000000000000#!/usr/bin/env python3 """Test runner for typeshed. Depends on mypy being installed. Approach: 1. Parse sys.argv 2. Compute appropriate arguments for mypy 3. Stuff those arguments into sys.argv 4. Run mypy.main('') 5. Repeat steps 2-4 for other mypy runs (e.g. --py2) """ import os import re import sys import argparse parser = argparse.ArgumentParser(description="Test runner for typeshed. " "Patterns are unanchored regexps on the full path.") parser.add_argument('-v', '--verbose', action='count', default=0, help="More output") parser.add_argument('-n', '--dry-run', action='store_true', help="Don't actually run mypy") parser.add_argument('-x', '--exclude', type=str, nargs='*', help="Exclude pattern") parser.add_argument('-p', '--python-version', type=str, nargs='*', help="These versions only (major[.minor])") parser.add_argument('--no-implicit-optional', action='store_true', help="Run mypy with --no-implicit-optional (causes lots of errors)") parser.add_argument('--warn-unused-ignores', action='store_true', help="Run mypy with --warn-unused-ignores " "(hint: only git rid of warnings that are " "unused for all platforms and Python versions)") parser.add_argument('filter', type=str, nargs='*', help="Include pattern (default all)") def log(args, *varargs): if args.verbose >= 2: print(*varargs) def match(fn, args, blacklist): if blacklist.match(fn): log(args, fn, 'exluded by blacklist') return False if not args.filter and not args.exclude: log(args, fn, 'accept by default') return True if args.exclude: for f in args.exclude: if re.search(f, fn): log(args, fn, 'excluded by pattern', f) return False if args.filter: for f in args.filter: if re.search(f, fn): log(args, fn, 'accepted by pattern', f) return True if args.filter: log(args, fn, 'rejected (no pattern matches)') return False log(args, fn, 'accepted (no exclude pattern matches)') return True def libpath(major, minor): versions = ['%d.%d' % (major, minor) for minor in reversed(range(minor + 1))] versions.append(str(major)) versions.append('2and3') paths = [] for v in versions: for top in ['stdlib', 'third_party']: p = os.path.join(top, v) if os.path.isdir(p): paths.append(p) return paths def main(): args = parser.parse_args() with open(os.path.join(os.path.dirname(__file__), "mypy_blacklist.txt")) as f: blacklist = re.compile("(%s)$" % "|".join( re.findall(r"^\s*([^\s#]+)\s*(?:#.*)?$", f.read(), flags=re.M))) try: from mypy.main import main as mypy_main except ImportError: print("Cannot import mypy. Did you install it?") sys.exit(1) versions = [(3, 6), (3, 5), (3, 4), (3, 3), (2, 7)] if args.python_version: versions = [v for v in versions if any(('%d.%d' % v).startswith(av) for av in args.python_version)] if not versions: print("--- no versions selected ---") sys.exit(1) code = 0 runs = 0 for major, minor in versions: roots = libpath(major, minor) files = [] seen = {'__builtin__', 'builtins', 'typing'} # Always ignore these. for root in roots: names = os.listdir(root) for name in names: full = os.path.join(root, name) mod, ext = os.path.splitext(name) if mod in seen or mod.startswith('.'): continue if ext in ['.pyi', '.py']: if match(full, args, blacklist): seen.add(mod) files.append(full) elif (os.path.isfile(os.path.join(full, '__init__.pyi')) or os.path.isfile(os.path.join(full, '__init__.py'))): for r, ds, fs in os.walk(full): ds.sort() fs.sort() for f in fs: m, x = os.path.splitext(f) if x in ['.pyi', '.py']: fn = os.path.join(r, f) if match(fn, args, blacklist): seen.add(mod) files.append(fn) if files: runs += 1 flags = ['--python-version', '%d.%d' % (major, minor)] flags.append('--strict-optional') if args.no_implicit_optional: flags.append('--no-implicit-optional') if args.warn_unused_ignores: flags.append('--warn-unused-ignores') sys.argv = ['mypy'] + flags + files if args.verbose: print("running", ' '.join(sys.argv)) else: print("running mypy", ' '.join(flags), "# with", len(files), "files") try: if not args.dry_run: mypy_main('') except SystemExit as err: code = max(code, err.code) if code: print("--- exit status", code, "---") sys.exit(code) if not runs: print("--- nothing to do; exit 1 ---") sys.exit(1) if __name__ == '__main__': main() mypy-0.560/typeshed/tests/pytype_test.py0000755€tŠÔÚ€2›s®0000001105613215007212024625 0ustar jukkaDROPBOX\Domain Users00000000000000#!/usr/bin/env python """Test runner for typeshed. Depends on mypy and pytype being installed. If pytype is installed: 1. For every pyi, do nothing if it is in pytype_blacklist.txt. 2. If the blacklist line has a "# parse only" comment run "pytd " in a separate process. 3. If the file is not in the blacklist run "pytype --typeshed-location=typeshed_location --module-name=foo \ --convert-to-pickle=tmp_file . Option two will parse the file, mostly syntactical correctness. Option three will load the file and all the builtins, typeshed dependencies. This will also discover incorrect usage of imported modules. """ import os import re import sys import argparse import subprocess import collections parser = argparse.ArgumentParser(description="Pytype tests.") parser.add_argument('-n', '--dry-run', action='store_true', help="Don't actually run tests") parser.add_argument('--num-parallel', type=int, default=1, help="Number of test processes to spawn") def main(): args = parser.parse_args() code, runs = pytype_test(args) if code: print('--- exit status %d ---' % code) sys.exit(code) if not runs: print('--- nothing to do; exit 1 ---') sys.exit(1) def load_blacklist(): filename = os.path.join(os.path.dirname(__file__), "pytype_blacklist.txt") skip_re = re.compile(r'^\s*([^\s#]+)\s*(?:#.*)?$') parse_only_re = re.compile(r'^\s*([^\s#]+)\s*#\s*parse only\s*') skip = [] parse_only = [] with open(filename) as f: for line in f: parse_only_match = parse_only_re.match(line) skip_match = skip_re.match(line) if parse_only_match: parse_only.append(parse_only_match.group(1)) elif skip_match: skip.append(skip_match.group(1)) return skip, parse_only class BinaryRun(object): def __init__(self, args, dry_run=False): self.args = args self.dry_run = dry_run self.results = None if dry_run: self.results = (0, '', '') else: self.proc = subprocess.Popen( self.args, stdout=subprocess.PIPE, stderr=subprocess.PIPE) def communicate(self): if self.results: return self.results stdout, stderr = self.proc.communicate() self.results = self.proc.returncode, stdout, stderr return self.results def _get_module_name(filename): """Converts a filename stdblib/m.n/module/foo to module.foo.""" return '.'.join(filename.split(os.path.sep)[2:]).replace( '.pyi', '').replace('.__init__', '') def pytype_test(args): try: BinaryRun(['pytd', '-h']).communicate() except OSError: print('Cannot run pytd. Did you install pytype?') return 0, 0 skip, parse_only = load_blacklist() wanted = re.compile(r'stdlib/.*\.pyi$') skipped = re.compile('(%s)$' % '|'.join(skip)) parse_only = re.compile('(%s)$' % '|'.join(parse_only)) pytype_run = [] pytd_run = [] for root, _, filenames in os.walk('stdlib'): for f in sorted(filenames): f = os.path.join(root, f) if wanted.search(f): if parse_only.search(f): pytd_run.append(f) elif not skipped.search(f): pytype_run.append(f) running_tests = collections.deque() max_code, runs, errors = 0, 0, 0 files = pytype_run + pytd_run while 1: while files and len(running_tests) < args.num_parallel: f = files.pop() if f in pytype_run: test_run = BinaryRun( ['pytype', '--typeshed-location=%s' % os.getcwd(), '--module-name=%s' % _get_module_name(f), '--convert-to-pickle=%s' % os.devnull, f], dry_run=args.dry_run) elif f in pytd_run: test_run = BinaryRun(['pytd', f], dry_run=args.dry_run) else: raise ValueError('Unknown action for file: %s' % f) running_tests.append(test_run) if not running_tests: break test_run = running_tests.popleft() code, stdout, stderr = test_run.communicate() max_code = max(max_code, code) runs += 1 if code: print(stderr) errors += 1 print('Ran pytype with %d pyis, got %d errors.' % (runs, errors)) return max_code, runs if __name__ == '__main__': main() mypy-0.560/typeshed/third_party/0000755€tŠÔÚ€2›s®0000000000013215007242023046 5ustar jukkaDROPBOX\Domain Users00000000000000mypy-0.560/typeshed/third_party/2/0000755€tŠÔÚ€2›s®0000000000013215007244023211 5ustar jukkaDROPBOX\Domain Users00000000000000mypy-0.560/typeshed/third_party/2/concurrent/0000755€tŠÔÚ€2›s®0000000000013215007244025373 5ustar jukkaDROPBOX\Domain Users00000000000000mypy-0.560/typeshed/third_party/2/concurrent/__init__.pyi0000644€tŠÔÚ€2›s®0000000000013215007212027636 0ustar jukkaDROPBOX\Domain Users00000000000000mypy-0.560/typeshed/third_party/2/concurrent/futures/0000755€tŠÔÚ€2›s®0000000000013215007244027070 5ustar jukkaDROPBOX\Domain Users00000000000000mypy-0.560/typeshed/third_party/2/concurrent/futures/__init__.pyi0000644€tŠÔÚ€2›s®0000000365413215007212031355 0ustar jukkaDROPBOX\Domain Users00000000000000# Stubs for concurrent.futures (Python 2) from typing import TypeVar, Generic, Any, Iterable, Iterator, Callable, Optional, Set, Tuple, Union from types import TracebackType _T = TypeVar('_T') class Error(Exception): ... class CancelledError(Error): ... class TimeoutError(Error): ... class Future(Generic[_T]): def cancel(self) -> bool: ... def cancelled(self) -> bool: ... def running(self) -> bool: ... def done(self) -> bool: ... def result(self, timeout: Optional[float] = ...) -> _T: ... def exception(self, timeout: Optional[float] = ...) -> Any: ... def exception_info(self, timeout: Optional[float] = ...) -> Tuple[Any, Optional[TracebackType]]: ... def add_done_callback(self, fn: Callable[[Future], Any]) -> None: ... def set_running_or_notify_cancel(self) -> bool: ... def set_result(self, result: _T) -> None: ... def set_exception(self, exception: Any) -> None: ... def set_exception_info(self, exception: Any, traceback: TracebackType) -> None: ... class Executor: def submit(self, fn: Callable[..., _T], *args: Any, **kwargs: Any) -> Future[_T]: ... def map(self, func: Callable[..., _T], *iterables: Iterable[Any], timeout: Optional[float] = ...) -> Iterator[_T]: ... def shutdown(self, wait: bool = ...) -> None: ... def __enter__(self) -> Executor: ... def __exit__(self, exc_type: Any, exc_val: Any, exc_tb: Any) -> bool: ... class ThreadPoolExecutor(Executor): def __init__(self, max_workers: Optional[int] = ...) -> None: ... class ProcessPoolExecutor(Executor): def __init__(self, max_workers: Optional[int] = ...) -> None: ... def wait(fs: Iterable[Future], timeout: Optional[float] = ..., return_when: str = ...) -> Tuple[Set[Future], Set[Future]]: ... FIRST_COMPLETED = ... # type: str FIRST_EXCEPTION = ... # type: str ALL_COMPLETED = ... # type: str def as_completed(fs: Iterable[Future], timeout: Optional[float] = ...) -> Iterator[Future]: ... mypy-0.560/typeshed/third_party/2/cryptography/0000755€tŠÔÚ€2›s®0000000000013215007244025744 5ustar jukkaDROPBOX\Domain Users00000000000000mypy-0.560/typeshed/third_party/2/cryptography/__init__.pyi0000644€tŠÔÚ€2›s®0000000000013215007212030207 0ustar jukkaDROPBOX\Domain Users00000000000000mypy-0.560/typeshed/third_party/2/cryptography/hazmat/0000755€tŠÔÚ€2›s®0000000000013215007244027230 5ustar jukkaDROPBOX\Domain Users00000000000000mypy-0.560/typeshed/third_party/2/cryptography/hazmat/__init__.pyi0000644€tŠÔÚ€2›s®0000000000013215007212031473 0ustar jukkaDROPBOX\Domain Users00000000000000mypy-0.560/typeshed/third_party/2/cryptography/hazmat/primitives/0000755€tŠÔÚ€2›s®0000000000013215007244031423 5ustar jukkaDROPBOX\Domain Users00000000000000mypy-0.560/typeshed/third_party/2/cryptography/hazmat/primitives/__init__.pyi0000644€tŠÔÚ€2›s®0000000000013215007212033666 0ustar jukkaDROPBOX\Domain Users00000000000000mypy-0.560/typeshed/third_party/2/cryptography/hazmat/primitives/asymmetric/0000755€tŠÔÚ€2›s®0000000000013215007244033600 5ustar jukkaDROPBOX\Domain Users00000000000000mypy-0.560/typeshed/third_party/2/cryptography/hazmat/primitives/asymmetric/__init__.pyi0000644€tŠÔÚ€2›s®0000000000013215007212036043 0ustar jukkaDROPBOX\Domain Users00000000000000mypy-0.560/typeshed/third_party/2/cryptography/hazmat/primitives/asymmetric/dsa.pyi0000644€tŠÔÚ€2›s®0000000017113215007212035064 0ustar jukkaDROPBOX\Domain Users00000000000000# Minimal stub expressing only the classes required by OpenSSL.crypto. class DSAPrivateKey: ... class DSAPublicKey: ... mypy-0.560/typeshed/third_party/2/cryptography/hazmat/primitives/asymmetric/rsa.pyi0000644€tŠÔÚ€2›s®0000000017113215007212035102 0ustar jukkaDROPBOX\Domain Users00000000000000# Minimal stub expressing only the classes required by OpenSSL.crypto. class RSAPrivateKey: ... class RSAPublicKey: ... mypy-0.560/typeshed/third_party/2/cryptography/hazmat/primitives/serialization.pyi0000644€tŠÔÚ€2›s®0000000134213215007212035016 0ustar jukkaDROPBOX\Domain Users00000000000000from typing import Any from enum import Enum def load_pem_private_key(data, password, backend): ... def load_pem_public_key(data, backend): ... def load_der_private_key(data, password, backend): ... def load_der_public_key(data, backend): ... def load_ssh_public_key(data, backend): ... class Encoding(Enum): PEM = ... # type: str DER = ... # type: str class PrivateFormat(Enum): PKCS8 = ... # type: str TraditionalOpenSSL = ... # type: str class PublicFormat(Enum): SubjectPublicKeyInfo = ... # type: str PKCS1 = ... # type: str class KeySerializationEncryption: ... class BestAvailableEncryption: password = ... # type: Any def __init__(self, password) -> None: ... class NoEncryption: ... mypy-0.560/typeshed/third_party/2/dateutil/0000755€tŠÔÚ€2›s®0000000000013215007244025024 5ustar jukkaDROPBOX\Domain Users00000000000000mypy-0.560/typeshed/third_party/2/dateutil/__init__.pyi0000644€tŠÔÚ€2›s®0000000000013215007212027267 0ustar jukkaDROPBOX\Domain Users00000000000000mypy-0.560/typeshed/third_party/2/dateutil/parser.pyi0000644€tŠÔÚ€2›s®0000000345013215007212027040 0ustar jukkaDROPBOX\Domain Users00000000000000from typing import List, Tuple, Optional, Callable, Union, IO, Any, Dict, Mapping, Text from datetime import datetime, tzinfo _FileOrStr = Union[bytes, Text, IO[str], IO[Any]] __all__ = ... # type: List[str] class parserinfo(object): JUMP = ... # type: List[str] WEEKDAYS = ... # type: List[Tuple[str, str]] MONTHS = ... # type: List[Tuple[str, str]] HMS = ... # type: List[Tuple[str, str, str]] AMPM = ... # type: List[Tuple[str, str]] UTCZONE = ... # type: List[str] PERTAIN = ... # type: List[str] TZOFFSET = ... # type: Dict[str, int] def __init__(self, dayfirst: bool=..., yearfirst: bool=...) -> None: ... def jump(self, name: Text) -> bool: ... def weekday(self, name: Text) -> Optional[int]: ... def month(self, name: Text) -> Optional[int]: ... def hms(self, name: Text) -> Optional[int]: ... def ampm(self, name: Text) -> Optional[int]: ... def pertain(self, name: Text) -> bool: ... def utczone(self, name: Text) -> bool: ... def tzoffset(self, name: Text) -> Optional[int]: ... def convertyear(self, year: int) -> int: ... def validate(self, res: datetime) -> bool: ... class parser(object): def __init__(self, info: Optional[parserinfo] = ...) -> None: ... def parse(self, timestr: _FileOrStr, default: Optional[datetime] = ..., ignoretz: bool = ..., tzinfos: Optional[Mapping[Text, tzinfo]] = ..., **kwargs: Any) -> datetime: ... DEFAULTPARSER = ... # type: parser def parse(timestr: _FileOrStr, parserinfo: Optional[parserinfo] = ..., **kwargs: Any) -> datetime: ... class _tzparser: ... DEFAULTTZPARSER = ... # type: _tzparser class InvalidDatetimeError(ValueError): ... class InvalidDateError(InvalidDatetimeError): ... class InvalidTimeError(InvalidDatetimeError): ... mypy-0.560/typeshed/third_party/2/dateutil/relativedelta.pyi0000644€tŠÔÚ€2›s®0000000614113215007212030371 0ustar jukkaDROPBOX\Domain Users00000000000000from typing import overload, Any, List, Optional, SupportsFloat, TypeVar, Union from datetime import date, datetime, timedelta __all__ = ... # type: List[str] _SelfT = TypeVar('_SelfT', bound=relativedelta) _DateT = TypeVar('_DateT', date, datetime) class weekday(object): def __init__(self, weekday: int, n: Optional[int]=...) -> None: ... def __call__(self, n: int) -> 'weekday': ... def __eq__(self, other) -> bool: ... def __repr__(self) -> str: ... weekday = ... # type: int n = ... # type: int MO = ... # type: weekday TU = ... # type: weekday WE = ... # type: weekday TH = ... # type: weekday FR = ... # type: weekday SA = ... # type: weekday SU = ... # type: weekday class relativedelta(object): def __init__(self, dt1: Optional[date]=..., dt2: Optional[date]=..., years: Optional[int]=..., months: Optional[int]=..., days: Optional[int]=..., leapdays: Optional[int]=..., weeks: Optional[int]=..., hours: Optional[int]=..., minutes: Optional[int]=..., seconds: Optional[int]=..., microseconds: Optional[int]=..., year: Optional[int]=..., month: Optional[int]=..., day: Optional[int]=..., weekday: Optional[Union[int, weekday]]=..., yearday: Optional[int]=..., nlyearday: Optional[int]=..., hour: Optional[int]=..., minute: Optional[int]=..., second: Optional[int]=..., microsecond: Optional[int]=...) -> None: ... @property def weeks(self) -> int: ... @weeks.setter def weeks(self, value: int) -> None: ... def normalized(self: _SelfT) -> _SelfT: ... # TODO: use Union when mypy will handle it properly in overloaded operator # methods (#2129, #1442, #1264 in mypy) @overload def __add__(self: _SelfT, other: relativedelta) -> _SelfT: ... @overload def __add__(self: _SelfT, other: timedelta) -> _SelfT: ... @overload def __add__(self, other: _DateT) -> _DateT: ... @overload def __radd__(self: _SelfT, other: relativedelta) -> _SelfT: ... @overload def __radd__(self: _SelfT, other: timedelta) -> _SelfT: ... @overload def __radd__(self, other: _DateT) -> _DateT: ... @overload def __rsub__(self: _SelfT, other: relativedelta) -> _SelfT: ... @overload def __rsub__(self: _SelfT, other: timedelta) -> _SelfT: ... @overload def __rsub__(self, other: _DateT) -> _DateT: ... def __sub__(self: _SelfT, other: relativedelta) -> _SelfT: ... def __neg__(self: _SelfT) -> _SelfT: ... def __bool__(self) -> bool: ... def __nonzero__(self) -> bool: ... def __mul__(self: _SelfT, other: SupportsFloat) -> _SelfT: ... def __rmul__(self: _SelfT, other: SupportsFloat) -> _SelfT: ... def __eq__(self, other) -> bool: ... def __ne__(self, other: object) -> bool: ... def __div__(self: _SelfT, other: SupportsFloat) -> _SelfT: ... def __truediv__(self: _SelfT, other: SupportsFloat) -> _SelfT: ... def __repr__(self) -> str: ... mypy-0.560/typeshed/third_party/2/dateutil/tz/0000755€tŠÔÚ€2›s®0000000000013215007244025461 5ustar jukkaDROPBOX\Domain Users00000000000000mypy-0.560/typeshed/third_party/2/dateutil/tz/__init__.pyi0000644€tŠÔÚ€2›s®0000000043413215007212027737 0ustar jukkaDROPBOX\Domain Users00000000000000from .tz import ( tzutc as tzutc, tzoffset as tzoffset, tzlocal as tzlocal, tzfile as tzfile, tzrange as tzrange, tzstr as tzstr, tzical as tzical, gettz as gettz, datetime_exists as datetime_exists, datetime_ambiguous as datetime_ambiguous, ) mypy-0.560/typeshed/third_party/2/dateutil/tz/_common.pyi0000644€tŠÔÚ€2›s®0000000151613215007212027631 0ustar jukkaDROPBOX\Domain Users00000000000000from typing import Any, Optional from datetime import datetime, tzinfo, timedelta def tzname_in_python2(namefunc): ... def enfold(dt: datetime, fold: int = ...): ... class _DatetimeWithFold(datetime): @property def fold(self): ... class _tzinfo(tzinfo): def is_ambiguous(self, dt: datetime) -> bool: ... def fromutc(self, dt: datetime) -> datetime: ... class tzrangebase(_tzinfo): def __init__(self) -> None: ... def utcoffset(self, dt: Optional[datetime]) -> Optional[timedelta]: ... def dst(self, dt: Optional[datetime]) -> Optional[timedelta]: ... def tzname(self, dt: Optional[datetime]) -> str: ... def fromutc(self, dt: datetime) -> datetime: ... def is_ambiguous(self, dt: datetime) -> bool: ... __hash__ = ... # type: Any def __ne__(self, other): ... __reduce__ = ... # type: Any mypy-0.560/typeshed/third_party/2/dateutil/tz/tz.pyi0000644€tŠÔÚ€2›s®0000000733413215007212026643 0ustar jukkaDROPBOX\Domain Users00000000000000from typing import Any, Optional, Union, IO, Text, Tuple, List import datetime from ._common import tzname_in_python2 as tzname_in_python2, _tzinfo as _tzinfo from ._common import tzrangebase as tzrangebase, enfold as enfold from ..relativedelta import relativedelta _FileObj = Union[str, Text, IO[str], IO[Text]] ZERO = ... # type: datetime.timedelta EPOCH = ... # type: datetime.datetime EPOCHORDINAL = ... # type: int class tzutc(datetime.tzinfo): def utcoffset(self, dt: Optional[datetime.datetime]) -> Optional[datetime.timedelta]: ... def dst(self, dt: Optional[datetime.datetime]) -> Optional[datetime.timedelta]: ... def tzname(self, dt: Optional[datetime.datetime]) -> str: ... def is_ambiguous(self, dt: Optional[datetime.datetime]) -> bool: ... def __eq__(self, other): ... __hash__ = ... # type: Any def __ne__(self, other): ... __reduce__ = ... # type: Any class tzoffset(datetime.tzinfo): def __init__(self, name, offset) -> None: ... def utcoffset(self, dt: Optional[datetime.datetime]) -> Optional[datetime.timedelta]: ... def dst(self, dt: Optional[datetime.datetime]) -> Optional[datetime.timedelta]: ... def is_ambiguous(self, dt: Optional[datetime.datetime]) -> bool: ... def tzname(self, dt: Optional[datetime.datetime]) -> str: ... def __eq__(self, other): ... __hash__ = ... # type: Any def __ne__(self, other): ... __reduce__ = ... # type: Any class tzlocal(_tzinfo): def __init__(self) -> None: ... def utcoffset(self, dt: Optional[datetime.datetime]) -> Optional[datetime.timedelta]: ... def dst(self, dt: Optional[datetime.datetime]) -> Optional[datetime.timedelta]: ... def tzname(self, dt: Optional[datetime.datetime]) -> str: ... def is_ambiguous(self, dt: Optional[datetime.datetime]) -> bool: ... def __eq__(self, other): ... __hash__ = ... # type: Any def __ne__(self, other): ... __reduce__ = ... # type: Any class _ttinfo: def __init__(self) -> None: ... def __eq__(self, other): ... __hash__ = ... # type: Any def __ne__(self, other): ... class tzfile(_tzinfo): def __init__(self, fileobj: _FileObj, filename: Optional[Text] = ...) -> None: ... def is_ambiguous(self, dt: Optional[datetime.datetime], idx: Optional[int] = ...) -> bool: ... def utcoffset(self, dt: Optional[datetime.datetime]) -> Optional[datetime.timedelta]: ... def dst(self, dt: Optional[datetime.datetime]) -> Optional[datetime.timedelta]: ... def tzname(self, dt: Optional[datetime.datetime]) -> str: ... def __eq__(self, other): ... __hash__ = ... # type: Any def __ne__(self, other): ... def __reduce__(self): ... def __reduce_ex__(self, protocol): ... class tzrange(tzrangebase): hasdst = ... # type: bool def __init__(self, stdabbr: Text, stdoffset: Union[int, datetime.timedelta, None] = ..., dstabbr: Optional[Text] = ..., dstoffset: Union[int, datetime.timedelta, None] = ..., start: Optional[relativedelta] = ..., end: Optional[relativedelta] = ...) -> None: ... def transitions(self, year: int) -> Tuple[datetime.datetime, datetime.datetime]: ... def __eq__(self, other): ... class tzstr(tzrange): hasdst = ... # type: bool def __init__(self, s: Union[bytes, _FileObj], posix_offset: bool = ...) -> None: ... class tzical: def __init__(self, fileobj: _FileObj) -> None: ... def keys(self): ... def get(self, tzid: Optional[Any] = ...): ... TZFILES = ... # type: List[str] TZPATHS = ... # type: List[str] def gettz(name: Optional[Text] = ...) -> Optional[datetime.tzinfo]: ... def datetime_exists(dt: datetime.datetime, tz: Optional[datetime.tzinfo] = ...) -> bool: ... def datetime_ambiguous(dt: datetime.datetime, tz: Optional[datetime.tzinfo] = ...) -> bool: ... mypy-0.560/typeshed/third_party/2/enum.pyi0000644€tŠÔÚ€2›s®0000000251513215007212024676 0ustar jukkaDROPBOX\Domain Users00000000000000from typing import List, Any, TypeVar, Union, Iterable, Iterator, TypeVar, Generic, Type, Sized, Reversible, Container, Mapping from abc import ABCMeta _T = TypeVar('_T', bound=Enum) _S = TypeVar('_S', bound=Type[Enum]) # Note: EnumMeta actually subclasses type directly, not ABCMeta. # This is a temporary workaround to allow multiple creation of enums with builtins # such as str as mixins, which due to the handling of ABCs of builtin types, cause # spurious inconsistent metaclass structure. See #1595. class EnumMeta(ABCMeta, Iterable[Enum], Sized, Reversible[Enum], Container[Enum]): def __iter__(self: Type[_T]) -> Iterator[_T]: ... def __reversed__(self: Type[_T]) -> Iterator[_T]: ... def __contains__(self, member: Any) -> bool: ... def __getitem__(self: Type[_T], name: str) -> _T: ... @property def __members__(self: Type[_T]) -> Mapping[str, _T]: ... class Enum(metaclass=EnumMeta): def __new__(cls: Type[_T], value: Any) -> _T: ... def __repr__(self) -> str: ... def __str__(self) -> str: ... def __dir__(self) -> List[str]: ... def __format__(self, format_spec: str) -> str: ... def __hash__(self) -> Any: ... def __reduce_ex__(self, proto: Any) -> Any: ... name = ... # type: str value = ... # type: Any class IntEnum(int, Enum): ... def unique(enumeration: _S) -> _S: ... mypy-0.560/typeshed/third_party/2/fb303/0000755€tŠÔÚ€2›s®0000000000013215007244024026 5ustar jukkaDROPBOX\Domain Users00000000000000mypy-0.560/typeshed/third_party/2/fb303/__init__.pyi0000644€tŠÔÚ€2›s®0000000000013215007212026271 0ustar jukkaDROPBOX\Domain Users00000000000000mypy-0.560/typeshed/third_party/2/fb303/FacebookService.pyi0000644€tŠÔÚ€2›s®0000002200313215007212027573 0ustar jukkaDROPBOX\Domain Users00000000000000from typing import Any from thrift.Thrift import TProcessor fastbinary = ... # type: Any class Iface: def getName(self): ... def getVersion(self): ... def getStatus(self): ... def getStatusDetails(self): ... def getCounters(self): ... def getCounter(self, key): ... def setOption(self, key, value): ... def getOption(self, key): ... def getOptions(self): ... def getCpuProfile(self, profileDurationInSec): ... def aliveSince(self): ... def reinitialize(self): ... def shutdown(self): ... class Client(Iface): def __init__(self, iprot, oprot=...) -> None: ... def getName(self): ... def send_getName(self): ... def recv_getName(self): ... def getVersion(self): ... def send_getVersion(self): ... def recv_getVersion(self): ... def getStatus(self): ... def send_getStatus(self): ... def recv_getStatus(self): ... def getStatusDetails(self): ... def send_getStatusDetails(self): ... def recv_getStatusDetails(self): ... def getCounters(self): ... def send_getCounters(self): ... def recv_getCounters(self): ... def getCounter(self, key): ... def send_getCounter(self, key): ... def recv_getCounter(self): ... def setOption(self, key, value): ... def send_setOption(self, key, value): ... def recv_setOption(self): ... def getOption(self, key): ... def send_getOption(self, key): ... def recv_getOption(self): ... def getOptions(self): ... def send_getOptions(self): ... def recv_getOptions(self): ... def getCpuProfile(self, profileDurationInSec): ... def send_getCpuProfile(self, profileDurationInSec): ... def recv_getCpuProfile(self): ... def aliveSince(self): ... def send_aliveSince(self): ... def recv_aliveSince(self): ... def reinitialize(self): ... def send_reinitialize(self): ... def shutdown(self): ... def send_shutdown(self): ... class Processor(Iface, TProcessor): def __init__(self, handler) -> None: ... def process(self, iprot, oprot): ... def process_getName(self, seqid, iprot, oprot): ... def process_getVersion(self, seqid, iprot, oprot): ... def process_getStatus(self, seqid, iprot, oprot): ... def process_getStatusDetails(self, seqid, iprot, oprot): ... def process_getCounters(self, seqid, iprot, oprot): ... def process_getCounter(self, seqid, iprot, oprot): ... def process_setOption(self, seqid, iprot, oprot): ... def process_getOption(self, seqid, iprot, oprot): ... def process_getOptions(self, seqid, iprot, oprot): ... def process_getCpuProfile(self, seqid, iprot, oprot): ... def process_aliveSince(self, seqid, iprot, oprot): ... def process_reinitialize(self, seqid, iprot, oprot): ... def process_shutdown(self, seqid, iprot, oprot): ... class getName_args: thrift_spec = ... # type: Any def read(self, iprot): ... def write(self, oprot): ... def validate(self): ... def __eq__(self, other): ... def __ne__(self, other): ... class getName_result: thrift_spec = ... # type: Any success = ... # type: Any def __init__(self, success=...) -> None: ... def read(self, iprot): ... def write(self, oprot): ... def validate(self): ... def __eq__(self, other): ... def __ne__(self, other): ... class getVersion_args: thrift_spec = ... # type: Any def read(self, iprot): ... def write(self, oprot): ... def validate(self): ... def __eq__(self, other): ... def __ne__(self, other): ... class getVersion_result: thrift_spec = ... # type: Any success = ... # type: Any def __init__(self, success=...) -> None: ... def read(self, iprot): ... def write(self, oprot): ... def validate(self): ... def __eq__(self, other): ... def __ne__(self, other): ... class getStatus_args: thrift_spec = ... # type: Any def read(self, iprot): ... def write(self, oprot): ... def validate(self): ... def __eq__(self, other): ... def __ne__(self, other): ... class getStatus_result: thrift_spec = ... # type: Any success = ... # type: Any def __init__(self, success=...) -> None: ... def read(self, iprot): ... def write(self, oprot): ... def validate(self): ... def __eq__(self, other): ... def __ne__(self, other): ... class getStatusDetails_args: thrift_spec = ... # type: Any def read(self, iprot): ... def write(self, oprot): ... def validate(self): ... def __eq__(self, other): ... def __ne__(self, other): ... class getStatusDetails_result: thrift_spec = ... # type: Any success = ... # type: Any def __init__(self, success=...) -> None: ... def read(self, iprot): ... def write(self, oprot): ... def validate(self): ... def __eq__(self, other): ... def __ne__(self, other): ... class getCounters_args: thrift_spec = ... # type: Any def read(self, iprot): ... def write(self, oprot): ... def validate(self): ... def __eq__(self, other): ... def __ne__(self, other): ... class getCounters_result: thrift_spec = ... # type: Any success = ... # type: Any def __init__(self, success=...) -> None: ... def read(self, iprot): ... def write(self, oprot): ... def validate(self): ... def __eq__(self, other): ... def __ne__(self, other): ... class getCounter_args: thrift_spec = ... # type: Any key = ... # type: Any def __init__(self, key=...) -> None: ... def read(self, iprot): ... def write(self, oprot): ... def validate(self): ... def __eq__(self, other): ... def __ne__(self, other): ... class getCounter_result: thrift_spec = ... # type: Any success = ... # type: Any def __init__(self, success=...) -> None: ... def read(self, iprot): ... def write(self, oprot): ... def validate(self): ... def __eq__(self, other): ... def __ne__(self, other): ... class setOption_args: thrift_spec = ... # type: Any key = ... # type: Any value = ... # type: Any def __init__(self, key=..., value=...) -> None: ... def read(self, iprot): ... def write(self, oprot): ... def validate(self): ... def __eq__(self, other): ... def __ne__(self, other): ... class setOption_result: thrift_spec = ... # type: Any def read(self, iprot): ... def write(self, oprot): ... def validate(self): ... def __eq__(self, other): ... def __ne__(self, other): ... class getOption_args: thrift_spec = ... # type: Any key = ... # type: Any def __init__(self, key=...) -> None: ... def read(self, iprot): ... def write(self, oprot): ... def validate(self): ... def __eq__(self, other): ... def __ne__(self, other): ... class getOption_result: thrift_spec = ... # type: Any success = ... # type: Any def __init__(self, success=...) -> None: ... def read(self, iprot): ... def write(self, oprot): ... def validate(self): ... def __eq__(self, other): ... def __ne__(self, other): ... class getOptions_args: thrift_spec = ... # type: Any def read(self, iprot): ... def write(self, oprot): ... def validate(self): ... def __eq__(self, other): ... def __ne__(self, other): ... class getOptions_result: thrift_spec = ... # type: Any success = ... # type: Any def __init__(self, success=...) -> None: ... def read(self, iprot): ... def write(self, oprot): ... def validate(self): ... def __eq__(self, other): ... def __ne__(self, other): ... class getCpuProfile_args: thrift_spec = ... # type: Any profileDurationInSec = ... # type: Any def __init__(self, profileDurationInSec=...) -> None: ... def read(self, iprot): ... def write(self, oprot): ... def validate(self): ... def __eq__(self, other): ... def __ne__(self, other): ... class getCpuProfile_result: thrift_spec = ... # type: Any success = ... # type: Any def __init__(self, success=...) -> None: ... def read(self, iprot): ... def write(self, oprot): ... def validate(self): ... def __eq__(self, other): ... def __ne__(self, other): ... class aliveSince_args: thrift_spec = ... # type: Any def read(self, iprot): ... def write(self, oprot): ... def validate(self): ... def __eq__(self, other): ... def __ne__(self, other): ... class aliveSince_result: thrift_spec = ... # type: Any success = ... # type: Any def __init__(self, success=...) -> None: ... def read(self, iprot): ... def write(self, oprot): ... def validate(self): ... def __eq__(self, other): ... def __ne__(self, other): ... class reinitialize_args: thrift_spec = ... # type: Any def read(self, iprot): ... def write(self, oprot): ... def validate(self): ... def __eq__(self, other): ... def __ne__(self, other): ... class shutdown_args: thrift_spec = ... # type: Any def read(self, iprot): ... def write(self, oprot): ... def validate(self): ... def __eq__(self, other): ... def __ne__(self, other): ... mypy-0.560/typeshed/third_party/2/gflags.pyi0000644€tŠÔÚ€2›s®0000002212413215007212025173 0ustar jukkaDROPBOX\Domain Users00000000000000from typing import Any, Callable, Dict, Iterable, IO, List, Union from types import ModuleType class FlagsError(Exception): ... class DuplicateFlag(FlagsError): ... class CantOpenFlagFileError(FlagsError): ... class DuplicateFlagCannotPropagateNoneToSwig(DuplicateFlag): ... class DuplicateFlagError(DuplicateFlag): def __init__(self, flagname: str, flag_values: FlagValues, other_flag_values: FlagValues = ...) -> None: ... class IllegalFlagValue(FlagsError): ... class UnrecognizedFlag(FlagsError): ... class UnrecognizedFlagError(UnrecognizedFlag): def __init__(self, flagname: str, flagvalue: str = ...) -> None: ... def GetHelpWidth() -> int: ... def CutCommonSpacePrefix(text) -> str: ... def TextWrap(text: str, length: int = ..., indent: str = ..., firstline_indent: str = ..., tabs: str = ...) -> str: ... def DocToHelp(doc: str) -> str: ... class FlagValues: def __init__(self) -> None: ... def UseGnuGetOpt(self, use_gnu_getopt: bool = ...) -> None: ... def IsGnuGetOpt(self) -> bool: ... # TODO dict type def FlagDict(self) -> dict: ... def FlagsByModuleDict(self) -> Dict[str, List[Flag]]: ... def FlagsByModuleIdDict(self) -> Dict[int, List[Flag]]: ... def KeyFlagsByModuleDict(self) -> Dict[str, List[Flag]]: ... def FindModuleDefiningFlag(self, flagname: str, default: str = ...) -> str: ... def FindModuleIdDefiningFlag(self, flagname: str, default: int = ...) -> int: ... def AppendFlagValues(self, flag_values: FlagValues) -> None: ... def RemoveFlagValues(self, flag_values: FlagValues) -> None: ... def __setitem__(self, name: str, flag: Flag) -> None: ... def __getitem__(self, name: str) -> Flag: ... def __getattr__(self, name: str) -> Any: ... def __setattr__(self, name: str, value: Any): ... def __delattr__(self, flag_name: str) -> None: ... def SetDefault(self, name: str, value: Any) -> None: ... def __contains__(self, name: str) -> bool: ... has_key = __contains__ def __iter__(self) -> Iterable[str]: ... def __call__(self, argv: List[str]) -> List[str]: ... def Reset(self) -> None: ... def RegisteredFlags(self) -> List[str]: ... def FlagValuesDict(self) -> Dict[str, Any]: ... def __str__(self) -> str: ... def GetHelp(self, prefix: str = ...) -> str: ... def ModuleHelp(self, module: Union[ModuleType, str]) -> str: ... def MainModuleHelp(self) -> str: ... def get(self, name: str, default: Any) -> Any: ... def ShortestUniquePrefixes(self, fl: Dict[str, Flag]) -> Dict[str, str]: ... def ExtractFilename(self, flagfile_str: str) -> str: ... def ReadFlagsFromFiles(self, argv: List[str], force_gnu: bool = ...) -> List[str]: ... def FlagsIntoString(self) -> str: ... def AppendFlagsIntoFile(self, filename: str) -> None: ... def WriteHelpInXMLFormat(self, outfile: IO[str] = ...) -> None: ... # TODO validator: gflags_validators.Validator def AddValidator(self, validator: Any) -> None: ... FLAGS = ... # type: FlagValues class Flag: name = ... # type: str default = ... # type: Any default_as_str = ... # type: str value = ... # type: Any help = ... # type: str short_name = ... # type: str boolean = False present = False parser = ... # type: ArgumentParser serializer = ... # type: ArgumentSerializer allow_override = False def __init__(self, parser: ArgumentParser, serializer: ArgumentSerializer, name: str, default: str, help_string: str, short_name: str = ..., boolean: bool = ..., allow_override: bool = ...) -> None: ... def Parse(self, argument: Any) -> Any: ... def Unparse(self) -> None: ... def Serialize(self) -> str: ... def SetDefault(self, value: Any) -> None: ... def Type(self) -> str: ... def WriteInfoInXMLFormat(self, outfile: IO[str], module_name: str, is_key: bool = ..., indent: str = ...) -> None: ... class ArgumentParser(object): syntactic_help = ... # type: str # TODO what is this def Parse(self, argument: Any) -> Any: ... def Type(self) -> str: ... def WriteCustomInfoInXMLFormat(self, outfile: IO[str], indent: str) -> None: ... class ArgumentSerializer: def Serialize(self, value: Any) -> unicode: ... class ListSerializer(ArgumentSerializer): def __init__(self, list_sep: str) -> None: ... def Serialize(self, value: List[Any]) -> str: ... def RegisterValidator(flag_name: str, checker: Callable[[Any], bool], message: str = ..., flag_values: FlagValues = ...) -> None: ... def MarkFlagAsRequired(flag_name: str, flag_values: FlagValues = ...) -> None: ... def DEFINE(parser: ArgumentParser, name: str, default: Any, help: str, flag_values: FlagValues = ..., serializer: ArgumentSerializer = ..., **args: Any) -> None: ... def DEFINE_flag(flag: Flag, flag_values: FlagValues = ...) -> None: ... def DECLARE_key_flag(flag_name: str, flag_values: FlagValues = ...) -> None: ... def ADOPT_module_key_flags(module: ModuleType, flag_values: FlagValues = ...) -> None: ... def DEFINE_string(name: str, default: str, help: str, flag_values: FlagValues = ..., **args: Any): ... class BooleanParser(ArgumentParser): def Convert(self, argument: Any) -> bool: ... def Parse(self, argument: Any) -> bool: ... def Type(self) -> str: ... class BooleanFlag(Flag): def __init__(self, name: str, default: bool, help: str, short_name=..., **args: Any) -> None: ... def DEFINE_boolean(name: str, default: bool, help: str, flag_values: FlagValues = ..., **args: Any) -> None: ... DEFINE_bool = DEFINE_boolean class HelpFlag(BooleanFlag): def __init__(self) -> None: ... def Parse(self, arg: Any) -> None: ... class HelpXMLFlag(BooleanFlag): def __init__(self) -> None: ... def Parse(self, arg: Any) -> None: ... class HelpshortFlag(BooleanFlag): def __init__(self) -> None: ... def Parse(self, arg: Any) -> None: ... class NumericParser(ArgumentParser): def IsOutsideBounds(self, val: float) -> bool: ... def Parse(self, argument: Any) -> float: ... def WriteCustomInfoInXMLFormat(self, outfile: IO[str], indent: str) -> None: ... def Convert(self, argument: Any) -> Any: ... class FloatParser(NumericParser): number_article = ... # type: str number_name = ... # type: str syntactic_help = ... # type: str def __init__(self, lower_bound: float = ..., upper_bound: float = ...) -> None: ... def Convert(self, argument: Any) -> float: ... def Type(self) -> str: ... def DEFINE_float(name: str, default: float, help: str, lower_bound: float = ..., upper_bound: float = ..., flag_values: FlagValues = ..., **args: Any) -> None: ... class IntegerParser(NumericParser): number_article = ... # type: str number_name = ... # type: str syntactic_help = ... # type: str def __init__(self, lower_bound: int = ..., upper_bound: int = ...) -> None: ... def Convert(self, argument: Any) -> int: ... def Type(self) -> str: ... def DEFINE_integer(name: str, default: int, help: str, lower_bound: int = ..., upper_bound: int = ..., flag_values: FlagValues = ..., **args: Any) -> None: ... class EnumParser(ArgumentParser): def __init__(self, enum_values: List[str]) -> None: ... def Parse(self, argument: Any) -> Any: ... def Type(self) -> str: ... class EnumFlag(Flag): def __init__(self, name: str, default: str, help: str, enum_values: List[str], short_name: str, **args: Any) -> None: ... def DEFINE_enum(name: str, default: str, enum_values: List[str], help: str, flag_values: FlagValues = ..., **args: Any) -> None: ... class BaseListParser(ArgumentParser): def __init__(self, token: str = ..., name: str = ...) -> None: ... def Parse(self, argument: Any) -> list: ... def Type(self) -> str: ... class ListParser(BaseListParser): def __init__(self) -> None: ... def WriteCustomInfoInXMLFormat(self, outfile: IO[str], indent: str): ... class WhitespaceSeparatedListParser(BaseListParser): def __init__(self) -> None: ... def WriteCustomInfoInXMLFormat(self, outfile: IO[str], indent: str): ... def DEFINE_list(name: str, default: List[str], help: str, flag_values: FlagValues = ..., **args: Any) -> None: ... def DEFINE_spaceseplist(name: str, default: List[str], help: str, flag_values: FlagValues = ..., **args: Any) -> None: ... class MultiFlag(Flag): def __init__(self, *args: Any, **kwargs: Any) -> None: ... def Parse(self, arguments: Any) -> None: ... def Serialize(self) -> str: ... def Type(self) -> str: ... def DEFINE_multistring(name: str, default: Union[str, List[str]], help: str, flag_values: FlagValues = ..., **args: Any) -> None: ... def DEFINE_multi_int(name: str, default: Union[int, List[int]], help: str, lower_bound: int = ..., upper_bound: int = ..., flag_values: FlagValues = ..., **args: Any) -> None: ... def DEFINE_multi_float(name: str, default: Union[float, List[float]], help: str, lower_bound: float = ..., upper_bound: float = ..., flag_values: FlagValues = ..., **args: Any) -> None: ... mypy-0.560/typeshed/third_party/2/google/0000755€tŠÔÚ€2›s®0000000000013215007244024465 5ustar jukkaDROPBOX\Domain Users00000000000000mypy-0.560/typeshed/third_party/2/google/__init__.pyi0000644€tŠÔÚ€2›s®0000000000013215007212026730 0ustar jukkaDROPBOX\Domain Users00000000000000mypy-0.560/typeshed/third_party/2/google/protobuf/0000755€tŠÔÚ€2›s®0000000000013215007244026325 5ustar jukkaDROPBOX\Domain Users00000000000000mypy-0.560/typeshed/third_party/2/google/protobuf/__init__.pyi0000644€tŠÔÚ€2›s®0000000003713215007212030602 0ustar jukkaDROPBOX\Domain Users00000000000000__version__ = ... # type: str mypy-0.560/typeshed/third_party/2/google/protobuf/descriptor.pyi0000644€tŠÔÚ€2›s®0000001555113215007212031230 0ustar jukkaDROPBOX\Domain Users00000000000000from typing import Any from .message import Message class Error(Exception): ... class TypeTransformationError(Error): ... class DescriptorMetaclass(type): def __instancecheck__(cls, obj): ... class DescriptorBase: __metaclass__ = DescriptorMetaclass has_options = ... # type: Any def __init__(self, options, options_class_name) -> None: ... def GetOptions(self): ... class _NestedDescriptorBase(DescriptorBase): name = ... # type: Any full_name = ... # type: Any file = ... # type: Any containing_type = ... # type: Any def __init__(self, options, options_class_name, name, full_name, file, containing_type, serialized_start=..., serialized_end=...) -> None: ... def GetTopLevelContainingType(self): ... def CopyToProto(self, proto): ... class Descriptor(_NestedDescriptorBase): def __new__(cls, name, full_name, filename, containing_type, fields, nested_types, enum_types, extensions, options=..., is_extendable=..., extension_ranges=..., oneofs=..., file=..., serialized_start=..., serialized_end=..., syntax=...): ... fields = ... # type: Any fields_by_number = ... # type: Any fields_by_name = ... # type: Any nested_types = ... # type: Any nested_types_by_name = ... # type: Any enum_types = ... # type: Any enum_types_by_name = ... # type: Any enum_values_by_name = ... # type: Any extensions = ... # type: Any extensions_by_name = ... # type: Any is_extendable = ... # type: Any extension_ranges = ... # type: Any oneofs = ... # type: Any oneofs_by_name = ... # type: Any syntax = ... # type: Any def __init__(self, name, full_name, filename, containing_type, fields, nested_types, enum_types, extensions, options=..., is_extendable=..., extension_ranges=..., oneofs=..., file=..., serialized_start=..., serialized_end=..., syntax=...) -> None: ... def EnumValueName(self, enum, value): ... def CopyToProto(self, proto): ... class FieldDescriptor(DescriptorBase): TYPE_DOUBLE = ... # type: Any TYPE_FLOAT = ... # type: Any TYPE_INT64 = ... # type: Any TYPE_UINT64 = ... # type: Any TYPE_INT32 = ... # type: Any TYPE_FIXED64 = ... # type: Any TYPE_FIXED32 = ... # type: Any TYPE_BOOL = ... # type: Any TYPE_STRING = ... # type: Any TYPE_GROUP = ... # type: Any TYPE_MESSAGE = ... # type: Any TYPE_BYTES = ... # type: Any TYPE_UINT32 = ... # type: Any TYPE_ENUM = ... # type: Any TYPE_SFIXED32 = ... # type: Any TYPE_SFIXED64 = ... # type: Any TYPE_SINT32 = ... # type: Any TYPE_SINT64 = ... # type: Any MAX_TYPE = ... # type: Any CPPTYPE_INT32 = ... # type: Any CPPTYPE_INT64 = ... # type: Any CPPTYPE_UINT32 = ... # type: Any CPPTYPE_UINT64 = ... # type: Any CPPTYPE_DOUBLE = ... # type: Any CPPTYPE_FLOAT = ... # type: Any CPPTYPE_BOOL = ... # type: Any CPPTYPE_ENUM = ... # type: Any CPPTYPE_STRING = ... # type: Any CPPTYPE_MESSAGE = ... # type: Any MAX_CPPTYPE = ... # type: Any LABEL_OPTIONAL = ... # type: Any LABEL_REQUIRED = ... # type: Any LABEL_REPEATED = ... # type: Any MAX_LABEL = ... # type: Any MAX_FIELD_NUMBER = ... # type: Any FIRST_RESERVED_FIELD_NUMBER = ... # type: Any LAST_RESERVED_FIELD_NUMBER = ... # type: Any def __new__(cls, name, full_name, index, number, type, cpp_type, label, default_value, message_type, enum_type, containing_type, is_extension, extension_scope, options=..., has_default_value=..., containing_oneof=...): ... name = ... # type: Any full_name = ... # type: Any index = ... # type: Any number = ... # type: Any type = ... # type: Any cpp_type = ... # type: Any label = ... # type: Any has_default_value = ... # type: Any default_value = ... # type: Any containing_type = ... # type: Any message_type = ... # type: Any enum_type = ... # type: Any is_extension = ... # type: Any extension_scope = ... # type: Any containing_oneof = ... # type: Any def __init__(self, name, full_name, index, number, type, cpp_type, label, default_value, message_type, enum_type, containing_type, is_extension, extension_scope, options=..., has_default_value=..., containing_oneof=...) -> None: ... @staticmethod def ProtoTypeToCppProtoType(proto_type): ... class EnumDescriptor(_NestedDescriptorBase): def __new__(cls, name, full_name, filename, values, containing_type=..., options=..., file=..., serialized_start=..., serialized_end=...): ... values = ... # type: Any values_by_name = ... # type: Any values_by_number = ... # type: Any def __init__(self, name, full_name, filename, values, containing_type=..., options=..., file=..., serialized_start=..., serialized_end=...) -> None: ... def CopyToProto(self, proto): ... class EnumValueDescriptor(DescriptorBase): def __new__(cls, name, index, number, type=..., options=...): ... name = ... # type: Any index = ... # type: Any number = ... # type: Any type = ... # type: Any def __init__(self, name, index, number, type=..., options=...) -> None: ... class OneofDescriptor: def __new__(cls, name, full_name, index, containing_type, fields): ... name = ... # type: Any full_name = ... # type: Any index = ... # type: Any containing_type = ... # type: Any fields = ... # type: Any def __init__(self, name, full_name, index, containing_type, fields) -> None: ... class ServiceDescriptor(_NestedDescriptorBase): index = ... # type: Any methods = ... # type: Any def __init__(self, name, full_name, index, methods, options=..., file=..., serialized_start=..., serialized_end=...) -> None: ... def FindMethodByName(self, name): ... def CopyToProto(self, proto): ... class MethodDescriptor(DescriptorBase): name = ... # type: Any full_name = ... # type: Any index = ... # type: Any containing_service = ... # type: Any input_type = ... # type: Any output_type = ... # type: Any def __init__(self, name, full_name, index, containing_service, input_type, output_type, options=...) -> None: ... class FileDescriptor(DescriptorBase): def __new__(cls, name, package, options=..., serialized_pb=..., dependencies=..., syntax=...): ... _options = ... # type: Any message_types_by_name = ... # type: Any name = ... # type: Any package = ... # type: Any syntax = ... # type: Any serialized_pb = ... # type: Any enum_types_by_name = ... # type: Any extensions_by_name = ... # type: Any dependencies = ... # type: Any def __init__(self, name, package, options=..., serialized_pb=..., dependencies=..., syntax=...) -> None: ... def CopyToProto(self, proto): ... def MakeDescriptor(desc_proto, package=..., build_file_if_cpp=..., syntax=...): ... def _ParseOptions(message: Message, string: str) -> Message: ... mypy-0.560/typeshed/third_party/2/google/protobuf/descriptor_pb2.pyi0000644€tŠÔÚ€2›s®0000000007713215007212031770 0ustar jukkaDROPBOX\Domain Users00000000000000class FileOptions(object): ... class FieldOptions(object): ... mypy-0.560/typeshed/third_party/2/google/protobuf/descriptor_pool.pyi0000644€tŠÔÚ€2›s®0000000135013215007212032251 0ustar jukkaDROPBOX\Domain Users00000000000000from typing import Any, Optional class DescriptorPool: def __new__(cls, descriptor_db: Optional[Any] = ...): ... def __init__(self, descriptor_db: Optional[Any] = ...) -> None: ... def Add(self, file_desc_proto): ... def AddSerializedFile(self, serialized_file_desc_proto): ... def AddDescriptor(self, desc): ... def AddEnumDescriptor(self, enum_desc): ... def AddFileDescriptor(self, file_desc): ... def FindFileByName(self, file_name): ... def FindFileContainingSymbol(self, symbol): ... def FindMessageTypeByName(self, full_name): ... def FindEnumTypeByName(self, full_name): ... def FindFieldByName(self, full_name): ... def FindExtensionByName(self, full_name): ... def Default(): ... mypy-0.560/typeshed/third_party/2/google/protobuf/internal/0000755€tŠÔÚ€2›s®0000000000013215007244030141 5ustar jukkaDROPBOX\Domain Users00000000000000mypy-0.560/typeshed/third_party/2/google/protobuf/internal/__init__.pyi0000644€tŠÔÚ€2›s®0000000000013215007212032404 0ustar jukkaDROPBOX\Domain Users00000000000000mypy-0.560/typeshed/third_party/2/google/protobuf/internal/decoder.pyi0000644€tŠÔÚ€2›s®0000000205613215007212032267 0ustar jukkaDROPBOX\Domain Users00000000000000from typing import Any def ReadTag(buffer, pos): ... def EnumDecoder(field_number, is_repeated, is_packed, key, new_default): ... Int32Decoder = ... # type: Any Int64Decoder = ... # type: Any UInt32Decoder = ... # type: Any UInt64Decoder = ... # type: Any SInt32Decoder = ... # type: Any SInt64Decoder = ... # type: Any Fixed32Decoder = ... # type: Any Fixed64Decoder = ... # type: Any SFixed32Decoder = ... # type: Any SFixed64Decoder = ... # type: Any FloatDecoder = ... # type: Any DoubleDecoder = ... # type: Any BoolDecoder = ... # type: Any def StringDecoder(field_number, is_repeated, is_packed, key, new_default): ... def BytesDecoder(field_number, is_repeated, is_packed, key, new_default): ... def GroupDecoder(field_number, is_repeated, is_packed, key, new_default): ... def MessageDecoder(field_number, is_repeated, is_packed, key, new_default): ... MESSAGE_SET_ITEM_TAG = ... # type: Any def MessageSetItemDecoder(extensions_by_number): ... def MapDecoder(field_descriptor, new_default, is_message_map): ... SkipField = ... # type: Any mypy-0.560/typeshed/third_party/2/google/protobuf/internal/encoder.pyi0000644€tŠÔÚ€2›s®0000000234713215007212032304 0ustar jukkaDROPBOX\Domain Users00000000000000from typing import Any Int32Sizer = ... # type: Any UInt32Sizer = ... # type: Any SInt32Sizer = ... # type: Any Fixed32Sizer = ... # type: Any Fixed64Sizer = ... # type: Any BoolSizer = ... # type: Any def StringSizer(field_number, is_repeated, is_packed): ... def BytesSizer(field_number, is_repeated, is_packed): ... def GroupSizer(field_number, is_repeated, is_packed): ... def MessageSizer(field_number, is_repeated, is_packed): ... def MessageSetItemSizer(field_number): ... def MapSizer(field_descriptor): ... def TagBytes(field_number, wire_type): ... Int32Encoder = ... # type: Any UInt32Encoder = ... # type: Any SInt32Encoder = ... # type: Any Fixed32Encoder = ... # type: Any Fixed64Encoder = ... # type: Any SFixed32Encoder = ... # type: Any SFixed64Encoder = ... # type: Any FloatEncoder = ... # type: Any DoubleEncoder = ... # type: Any def BoolEncoder(field_number, is_repeated, is_packed): ... def StringEncoder(field_number, is_repeated, is_packed): ... def BytesEncoder(field_number, is_repeated, is_packed): ... def GroupEncoder(field_number, is_repeated, is_packed): ... def MessageEncoder(field_number, is_repeated, is_packed): ... def MessageSetItemEncoder(field_number): ... def MapEncoder(field_descriptor): ... mypy-0.560/typeshed/third_party/2/google/protobuf/internal/enum_type_wrapper.pyi0000644€tŠÔÚ€2›s®0000000053613215007212034430 0ustar jukkaDROPBOX\Domain Users00000000000000from typing import Any, List, Tuple class EnumTypeWrapper(object): def __init__(self, enum_type: Any) -> None: ... def Name(self, number: int) -> str: ... def Value(self, name: str) -> int: ... def keys(self) -> List[str]: ... def values(self) -> List[int]: ... @classmethod def items(cls) -> List[Tuple[str, int]]: ... mypy-0.560/typeshed/third_party/2/google/protobuf/internal/wire_format.pyi0000644€tŠÔÚ€2›s®0000000343413215007212033201 0ustar jukkaDROPBOX\Domain Users00000000000000from typing import Any TAG_TYPE_BITS = ... # type: Any TAG_TYPE_MASK = ... # type: Any WIRETYPE_VARINT = ... # type: Any WIRETYPE_FIXED64 = ... # type: Any WIRETYPE_LENGTH_DELIMITED = ... # type: Any WIRETYPE_START_GROUP = ... # type: Any WIRETYPE_END_GROUP = ... # type: Any WIRETYPE_FIXED32 = ... # type: Any INT32_MAX = ... # type: Any INT32_MIN = ... # type: Any UINT32_MAX = ... # type: Any INT64_MAX = ... # type: Any INT64_MIN = ... # type: Any UINT64_MAX = ... # type: Any FORMAT_UINT32_LITTLE_ENDIAN = ... # type: Any FORMAT_UINT64_LITTLE_ENDIAN = ... # type: Any FORMAT_FLOAT_LITTLE_ENDIAN = ... # type: Any FORMAT_DOUBLE_LITTLE_ENDIAN = ... # type: Any def PackTag(field_number, wire_type): ... def UnpackTag(tag): ... def ZigZagEncode(value): ... def ZigZagDecode(value): ... def Int32ByteSize(field_number, int32): ... def Int32ByteSizeNoTag(int32): ... def Int64ByteSize(field_number, int64): ... def UInt32ByteSize(field_number, uint32): ... def UInt64ByteSize(field_number, uint64): ... def SInt32ByteSize(field_number, int32): ... def SInt64ByteSize(field_number, int64): ... def Fixed32ByteSize(field_number, fixed32): ... def Fixed64ByteSize(field_number, fixed64): ... def SFixed32ByteSize(field_number, sfixed32): ... def SFixed64ByteSize(field_number, sfixed64): ... def FloatByteSize(field_number, flt): ... def DoubleByteSize(field_number, double): ... def BoolByteSize(field_number, b): ... def EnumByteSize(field_number, enum): ... def StringByteSize(field_number, string): ... def BytesByteSize(field_number, b): ... def GroupByteSize(field_number, message): ... def MessageByteSize(field_number, message): ... def MessageSetItemByteSize(field_number, msg): ... def TagByteSize(field_number): ... NON_PACKABLE_TYPES = ... # type: Any def IsTypePackable(field_type): ... mypy-0.560/typeshed/third_party/2/google/protobuf/message.pyi0000644€tŠÔÚ€2›s®0000000242513215007212030472 0ustar jukkaDROPBOX\Domain Users00000000000000from typing import Any, Sequence, Optional, Text, Tuple from .descriptor import FieldDescriptor class Error(Exception): ... class DecodeError(Error): ... class EncodeError(Error): ... class Message: DESCRIPTOR = ... # type: Any def __deepcopy__(self, memo=...): ... def __eq__(self, other_msg): ... def __ne__(self, other_msg): ... def MergeFrom(self, other_msg: Message) -> None: ... def CopyFrom(self, other_msg: Message) -> None: ... def Clear(self) -> None: ... def SetInParent(self) -> None: ... def IsInitialized(self) -> bool: ... def MergeFromString(self, serialized: Any) -> int: ... # TODO: we need to be able to call buffer() on serialized def ParseFromString(self, serialized: Any) -> None: ... def SerializeToString(self) -> str: ... def SerializePartialToString(self) -> str: ... def ListFields(self) -> Sequence[Tuple[FieldDescriptor, Any]]: ... def HasField(self, field_name: Text) -> bool: ... def ClearField(self, field_name: Text) -> None: ... def WhichOneof(self, oneof_group) -> Optional[str]: ... def HasExtension(self, extension_handle): ... def ClearExtension(self, extension_handle): ... def ByteSize(self) -> int: ... # TODO: check kwargs def __init__(self, **kwargs) -> None: ... mypy-0.560/typeshed/third_party/2/google/protobuf/message_factory.pyi0000644€tŠÔÚ€2›s®0000000101113215007212032207 0ustar jukkaDROPBOX\Domain Users00000000000000from typing import Any, Dict, Iterable, Optional, Type from .message import Message from .descriptor import Descriptor from .descriptor_pool import DescriptorPool class MessageFactory: pool = ... # type: Any def __init__(self, pool: Optional[DescriptorPool] = ...) -> None: ... def GetPrototype(self, descriptor: Descriptor) -> Type[Message]: ... def GetMessages(self, files: Iterable[str]) -> Dict[str, Type[Message]]: ... def GetMessages(file_protos: Iterable[str]) -> Dict[str, Type[Message]]: ... mypy-0.560/typeshed/third_party/2/google/protobuf/reflection.pyi0000644€tŠÔÚ€2›s®0000000034513215007212031177 0ustar jukkaDROPBOX\Domain Users00000000000000class GeneratedProtocolMessageType(type): def __new__(cls, name, bases, dictionary): ... def __init__(cls, name, bases, dictionary) -> None: ... def ParseMessage(descriptor, byte_str): ... def MakeClass(descriptor): ... mypy-0.560/typeshed/third_party/2/google/protobuf/symbol_database.pyi0000644€tŠÔÚ€2›s®0000000120213215007212032167 0ustar jukkaDROPBOX\Domain Users00000000000000from typing import Dict, Iterable, Type from .descriptor import EnumDescriptor, FileDescriptor from .message import Message from .message_factory import MessageFactory class SymbolDatabase(MessageFactory): def RegisterMessage(self, message: Type[Message]) -> Type[Message]: ... def RegisterEnumDescriptor(self, enum_descriptor: Type[EnumDescriptor]) -> EnumDescriptor: ... def RegisterFileDescriptor(self, file_descriptor: Type[FileDescriptor]) -> FileDescriptor: ... def GetSymbol(self, symbol: str) -> Type[Message]: ... def GetMessages(self, files: Iterable[str]) -> Dict[str, Type[Message]]: ... def Default(): ... mypy-0.560/typeshed/third_party/2/itsdangerous.pyi0000644€tŠÔÚ€2›s®0000001646213215007212026447 0ustar jukkaDROPBOX\Domain Users00000000000000from datetime import datetime from itertools import izip from typing import Any, Callable, IO, MutableMapping, Optional, Text, Tuple, Union PY2 = ... # type: bool text_type = unicode int_to_byte = chr number_types = (int, long, float) bytes_like = Union[bytearray, str] class _CompactJSON: def loads(self, payload: Text) -> Any: ... def dumps(self, obj: Any) -> Text: ... compact_json = _CompactJSON EPOCH = ... # type: int def want_bytes(s: str, encoding='', errors='') -> str: ... def is_text_serializer(serializer: Any) -> bool: ... def constant_time_compare(val1: bytes_like, val2: bytes_like) -> bool: ... class BadData(Exception): message = ... # type: str def __init__(self, message: str) -> None: ... class BadPayload(BadData): original_error = ... # type: Optional[Exception] def __init__(self, message: str, original_error: Optional[Exception] = ...) -> None: ... class BadSignature(BadData): payload = ... # type: Optional[Any] def __init__(self, message: str, payload: Optional[Any] = ...) -> None: ... class BadTimeSignature(BadSignature): date_signed = ... # type: Optional[int] def __init__(self, message, payload: Optional[Any] = ..., date_signed: Optional[int] = ...) -> None: ... class BadHeader(BadSignature): header = ... # type: Any original_error = ... # type: Any def __init__(self, message, payload=None, header=None, original_error=None) -> None: ... class SignatureExpired(BadTimeSignature): ... def base64_encode(string: bytes_like) -> str: ... def base64_decode(string: bytes_like) -> str: ... def int_to_bytes(num: int) -> str: ... def bytes_to_int(bytestr: bytes_like) -> int: ... class SigningAlgorithm: def get_signature(self, key: bytes_like, value: bytes_like) -> str: ... def verify_signature(self, key: bytes_like, value: bytes_like, sig: bytes_like) -> bool: ... class NoneAlgorithm(SigningAlgorithm): def get_signature(self, key: bytes_like, value: bytes_like) -> str: ... class HMACAlgorithm(SigningAlgorithm): default_digest_method = ... # type: Callable digest_method = ... # type: Callable def __init__(self, digest_method: Optional[Callable] = ...) -> None: ... def get_signature(self, key: bytes_like, value: bytes_like) -> str: ... class Signer: default_digest_method = ... # type: Callable default_key_derivation = ... # type: str secret_key = ... # type: bytes_like sep = ... # type: str salt = ... # type: bytes_like key_derivation = ... # type: str digest_method = ... # type: Callable algorithm = ... # type: SigningAlgorithm def __init__(self, secret_key: bytes_like, salt: Optional[bytes_like] = ..., sep: Optional[str]='', key_derivation: Optional[str] = ..., digest_method: Optional[Callable] = ..., algorithm: Optional[SigningAlgorithm] = ...) -> None: ... def derive_key(self) -> str: ... def get_signature(self, value: bytes_like) -> str: ... def sign(self, value: bytes_like) -> str: ... def verify_signature(self, value: bytes_like, sig: bytes_like) -> bool: ... def unsign(self, signed_value: str) -> str: ... def validate(self, signed_value: str) -> bool: ... class TimestampSigner(Signer): def get_timestamp(self) -> int: ... def timestamp_to_datetime(self, ts: int) -> datetime: ... def sign(self, value: bytes_like) -> str: ... def unsign(self, value: str, max_age: Optional[int] = ..., return_timestamp=False) -> Any: ... def validate(self, signed_value: str, max_age: Optional[int] = ...) -> bool: ... class Serializer: default_serializer = ... # type: Any default_signer = ... # type: Callable[..., Signer] secret_key = ... # type: Any salt = ... # type: bytes_like serializer = ... # type: Any is_text_serializer = ... # type: bool signer = ... # type: Signer signer_kwargs = ... # type: MutableMapping def __init__(self, secret_key: bytes_like, salt: Optional[bytes_like]=b'', serializer=None, signer: Optional[Callable[..., Signer]] = ..., signer_kwargs: Optional[MutableMapping] = ...) -> None: ... def load_payload(self, payload: Any, serializer=None) -> Any: ... def dump_payload(self, *args, **kwargs) -> str: ... def make_signer(self, salt: Optional[bytes_like] = ...) -> Signer: ... def dumps(self, obj: Any, salt: Optional[bytes_like] = ...) -> str: ... def dump(self, obj: Any, f: IO[str], salt: Optional[bytes_like] = ...) -> None: ... def loads(self, s: str, salt: Optional[bytes_like] = ...) -> Any: ... def load(self, f: IO[str], salt: Optional[bytes_like] = ...): ... def loads_unsafe(self, s, salt: Optional[bytes_like] = ...) -> Tuple[bool, Any]: ... def load_unsafe(self, f: IO[str], *args, **kwargs) -> Tuple[bool, Any]: ... class TimedSerializer(Serializer): default_signer = ... # type: Callable[..., TimestampSigner] def loads(self, s: str, salt: Optional[bytes_like] = ..., max_age: Optional[int] = ..., return_timestamp=False) -> Any: ... def loads_unsafe(self, s: str, salt: Optional[bytes_like] = ..., max_age: Optional[int] = ...) -> Tuple[bool, Any]: ... class JSONWebSignatureSerializer(Serializer): jws_algorithms = ... # type: MutableMapping[str, SigningAlgorithm] default_algorithm = ... # type: str default_serializer = ... # type: Any algorithm_name = ... # type: str algorithm = ... # type: Any def __init__(self, secret_key: bytes_like, salt: Optional[bytes_like] = ..., serializer=None, signer: Optional[Callable[..., Signer]] = ..., signer_kwargs: Optional[MutableMapping] = ..., algorithm_name: Optional[str] = ...) -> None: ... def load_payload(self, payload: Any, return_header=False) -> Any: ... def dump_payload(self, *args, **kwargs) -> str: ... def make_algorithm(self, algorithm_name: str) -> SigningAlgorithm: ... def make_signer(self, salt: Optional[bytes_like] = ..., algorithm_name: Optional[str] = ...) -> Signer: ... def make_header(self, header_fields=Optional[MutableMapping]) -> MutableMapping: ... def dumps(self, obj: Any, salt: Optional[bytes_like] = ..., header_fields=Optional[MutableMapping]) -> str: ... def loads(self, s: str, salt: Optional[bytes_like] = ..., return_header=False) -> Any: ... def loads_unsafe(self, s, salt: Optional[bytes_like] = ..., return_header=False) -> Tuple[bool, Any]: ... class TimedJSONWebSignatureSerializer(JSONWebSignatureSerializer): DEFAULT_EXPIRES_IN = ... # type: int expires_in = ... # type: int def __init__(self, secret_key: bytes_like, expires_in: Optional[int] = ..., **kwargs) -> None: ... def make_header(self, header_fields=Optional[MutableMapping]) -> MutableMapping: ... def loads(self, s: str, salt: Optional[bytes_like] = ..., return_header=False) -> Any: ... def get_issue_date(self, header: MutableMapping) -> Optional[datetime]: ... def now(self) -> int: ... class URLSafeSerializerMixin: def load_payload(self, payload: Any, serializer=None, return_header=False, **kwargs) -> Any: ... # FIXME: This is invalid but works around https://github.com/pallets/itsdangerous/issues/74 def dump_payload(self, *args, **kwargs) -> str: ... class URLSafeSerializer(URLSafeSerializerMixin, Serializer): default_serializer = ... # type: Any class URLSafeTimedSerializer(URLSafeSerializerMixin, TimedSerializer): default_serializer = ... # type: Any mypy-0.560/typeshed/third_party/2/kazoo/0000755€tŠÔÚ€2›s®0000000000013215007244024334 5ustar jukkaDROPBOX\Domain Users00000000000000mypy-0.560/typeshed/third_party/2/kazoo/__init__.pyi0000644€tŠÔÚ€2›s®0000000000013215007212026577 0ustar jukkaDROPBOX\Domain Users00000000000000mypy-0.560/typeshed/third_party/2/kazoo/client.pyi0000644€tŠÔÚ€2›s®0000000730713215007212026337 0ustar jukkaDROPBOX\Domain Users00000000000000from typing import Any string_types = ... # type: Any bytes_types = ... # type: Any LOST_STATES = ... # type: Any ENVI_VERSION = ... # type: Any ENVI_VERSION_KEY = ... # type: Any log = ... # type: Any class KazooClient: logger = ... # type: Any handler = ... # type: Any auth_data = ... # type: Any default_acl = ... # type: Any randomize_hosts = ... # type: Any hosts = ... # type: Any chroot = ... # type: Any state = ... # type: Any state_listeners = ... # type: Any read_only = ... # type: Any retry = ... # type: Any Barrier = ... # type: Any Counter = ... # type: Any DoubleBarrier = ... # type: Any ChildrenWatch = ... # type: Any DataWatch = ... # type: Any Election = ... # type: Any NonBlockingLease = ... # type: Any MultiNonBlockingLease = ... # type: Any Lock = ... # type: Any Party = ... # type: Any Queue = ... # type: Any LockingQueue = ... # type: Any SetPartitioner = ... # type: Any Semaphore = ... # type: Any ShallowParty = ... # type: Any def __init__(self, hosts=..., timeout=..., client_id=..., handler=..., default_acl=..., auth_data=..., read_only=..., randomize_hosts=..., connection_retry=..., command_retry=..., logger=..., **kwargs) -> None: ... @property def client_state(self): ... @property def client_id(self): ... @property def connected(self): ... def set_hosts(self, hosts, randomize_hosts=...): ... def add_listener(self, listener): ... def remove_listener(self, listener): ... def start(self, timeout=...): ... def start_async(self): ... def stop(self): ... def restart(self): ... def close(self): ... def command(self, cmd=...): ... def server_version(self, retries=...): ... def add_auth(self, scheme, credential): ... def add_auth_async(self, scheme, credential): ... def unchroot(self, path): ... def sync_async(self, path): ... def sync(self, path): ... def create(self, path, value=..., acl=..., ephemeral=..., sequence=..., makepath=...): ... def create_async(self, path, value=..., acl=..., ephemeral=..., sequence=..., makepath=...): ... def ensure_path(self, path, acl=...): ... def ensure_path_async(self, path, acl=...): ... def exists(self, path, watch=...): ... def exists_async(self, path, watch=...): ... def get(self, path, watch=...): ... def get_async(self, path, watch=...): ... def get_children(self, path, watch=..., include_data=...): ... def get_children_async(self, path, watch=..., include_data=...): ... def get_acls(self, path): ... def get_acls_async(self, path): ... def set_acls(self, path, acls, version=...): ... def set_acls_async(self, path, acls, version=...): ... def set(self, path, value, version=...): ... def set_async(self, path, value, version=...): ... def transaction(self): ... def delete(self, path, version=..., recursive=...): ... def delete_async(self, path, version=...): ... def reconfig(self, joining, leaving, new_members, from_config=...): ... def reconfig_async(self, joining, leaving, new_members, from_config): ... class TransactionRequest: client = ... # type: Any operations = ... # type: Any committed = ... # type: Any def __init__(self, client) -> None: ... def create(self, path, value=..., acl=..., ephemeral=..., sequence=...): ... def delete(self, path, version=...): ... def set_data(self, path, value, version=...): ... def check(self, path, version): ... def commit_async(self): ... def commit(self): ... def __enter__(self): ... def __exit__(self, exc_type, exc_value, exc_tb): ... class KazooState: ... mypy-0.560/typeshed/third_party/2/kazoo/exceptions.pyi0000644€tŠÔÚ€2›s®0000000443613215007212027242 0ustar jukkaDROPBOX\Domain Users00000000000000from typing import Any class KazooException(Exception): ... class ZookeeperError(KazooException): ... class CancelledError(KazooException): ... class ConfigurationError(KazooException): ... class ZookeeperStoppedError(KazooException): ... class ConnectionDropped(KazooException): ... class LockTimeout(KazooException): ... class WriterNotClosedException(KazooException): ... EXCEPTIONS = ... # type: Any class RolledBackError(ZookeeperError): ... class SystemZookeeperError(ZookeeperError): ... class RuntimeInconsistency(ZookeeperError): ... class DataInconsistency(ZookeeperError): ... class ConnectionLoss(ZookeeperError): ... class MarshallingError(ZookeeperError): ... class UnimplementedError(ZookeeperError): ... class OperationTimeoutError(ZookeeperError): ... class BadArgumentsError(ZookeeperError): ... class NewConfigNoQuorumError(ZookeeperError): ... class ReconfigInProcessError(ZookeeperError): ... class APIError(ZookeeperError): ... class NoNodeError(ZookeeperError): ... class NoAuthError(ZookeeperError): ... class BadVersionError(ZookeeperError): ... class NoChildrenForEphemeralsError(ZookeeperError): ... class NodeExistsError(ZookeeperError): ... class NotEmptyError(ZookeeperError): ... class SessionExpiredError(ZookeeperError): ... class InvalidCallbackError(ZookeeperError): ... class InvalidACLError(ZookeeperError): ... class AuthFailedError(ZookeeperError): ... class SessionMovedError(ZookeeperError): ... class NotReadOnlyCallError(ZookeeperError): ... class ConnectionClosedError(SessionExpiredError): ... ConnectionLossException = ... # type: Any MarshallingErrorException = ... # type: Any SystemErrorException = ... # type: Any RuntimeInconsistencyException = ... # type: Any DataInconsistencyException = ... # type: Any UnimplementedException = ... # type: Any OperationTimeoutException = ... # type: Any BadArgumentsException = ... # type: Any ApiErrorException = ... # type: Any NoNodeException = ... # type: Any NoAuthException = ... # type: Any BadVersionException = ... # type: Any NoChildrenForEphemeralsException = ... # type: Any NodeExistsException = ... # type: Any InvalidACLException = ... # type: Any AuthFailedException = ... # type: Any NotEmptyException = ... # type: Any SessionExpiredException = ... # type: Any InvalidCallbackException = ... # type: Any mypy-0.560/typeshed/third_party/2/kazoo/recipe/0000755€tŠÔÚ€2›s®0000000000013215007244025603 5ustar jukkaDROPBOX\Domain Users00000000000000mypy-0.560/typeshed/third_party/2/kazoo/recipe/__init__.pyi0000644€tŠÔÚ€2›s®0000000000013215007212030046 0ustar jukkaDROPBOX\Domain Users00000000000000mypy-0.560/typeshed/third_party/2/kazoo/recipe/watchers.pyi0000644€tŠÔÚ€2›s®0000000121113215007212030134 0ustar jukkaDROPBOX\Domain Users00000000000000from typing import Any log = ... # type: Any class DataWatch: def __init__(self, client, path, func=..., *args, **kwargs) -> None: ... def __call__(self, func): ... class ChildrenWatch: def __init__(self, client, path, func=..., allow_session_lost=..., send_event=...) -> None: ... def __call__(self, func): ... class PatientChildrenWatch: client = ... # type: Any path = ... # type: Any children = ... # type: Any time_boundary = ... # type: Any children_changed = ... # type: Any def __init__(self, client, path, time_boundary=...) -> None: ... asy = ... # type: Any def start(self): ... mypy-0.560/typeshed/third_party/2/OpenSSL/0000755€tŠÔÚ€2›s®0000000000013215007244024474 5ustar jukkaDROPBOX\Domain Users00000000000000mypy-0.560/typeshed/third_party/2/OpenSSL/__init__.pyi0000644€tŠÔÚ€2›s®0000000000013215007212026737 0ustar jukkaDROPBOX\Domain Users00000000000000mypy-0.560/typeshed/third_party/2/OpenSSL/crypto.pyi0000644€tŠÔÚ€2›s®0000001752113215007212026540 0ustar jukkaDROPBOX\Domain Users00000000000000# Stubs for OpenSSL.crypto (Python 2) from typing import Any, Callable, Iterable, List, Optional, Set, Text, Tuple, Union from cryptography.hazmat.primitives.asymmetric import dsa, rsa FILETYPE_PEM = ... # type: int FILETYPE_ASN1 = ... # type: int FILETYPE_TEXT = ... # type: int TYPE_RSA = ... # type: int TYPE_DSA = ... # type: int class Error(Exception): ... class PKey: def __init__(self) -> None: ... def to_cryptography_key(self) -> Union[rsa.RSAPublicKey, rsa.RSAPrivateKey, dsa.DSAPublicKey, dsa.DSAPrivateKey]: ... @classmethod def from_cryptography_key(cls, crypto_key: Union[rsa.RSAPublicKey, rsa.RSAPrivateKey, dsa.DSAPublicKey, dsa.DSAPrivateKey]): ... def generate_key(self, type: int, bits: int) -> None: ... def check(self) -> bool: ... def type(self) -> int: ... def bits(self) -> int: ... class _EllipticCurve: name = ... # type: Text def get_elliptic_curves() -> Set[_EllipticCurve]: ... def get_elliptic_curve(name: str) -> _EllipticCurve: ... class X509Name: def __init__(self, name: X509Name) -> None: ... countryName = ... # type: Union[str, unicode] stateOrProvinceName = ... # type: Union[str, unicode] localityName = ... # type: Union[str, unicode] organizationName = ... # type: Union[str, unicode] organizationalUnitName = ... # type: Union[str, unicode] commonName = ... # type: Union[str, unicode] emailAddress = ... # type: Union[str, unicode] C = ... # type: Union[str, unicode] ST = ... # type: Union[str, unicode] L = ... # type: Union[str, unicode] O = ... # type: Union[str, unicode] OU = ... # type: Union[str, unicode] CN = ... # type: Union[str, unicode] def hash(self) -> int: ... def der(self) -> bytes: ... def get_components(self) -> List[Tuple[str, str]]: ... class X509Extension: def __init__(self, type_name: bytes, critical: bool, value: bytes, subject: Optional[X509] = ..., issuer: Optional[X509] = ...) -> None: ... def get_critical(self) -> bool: ... def get_short_name(self) -> str: ... def get_data(self) -> str: ... class X509Req: def __init__(self) -> None: ... def set_pubkey(self, pkey: PKey) -> None: ... def get_pubkey(self) -> PKey: ... def set_version(self, version: int) -> None: ... def get_version(self) -> int: ... def get_subject(self) -> X509Name: ... def add_extensions(self, extensions: Iterable[X509Extension]) -> None: ... def get_extensions(self) -> List[X509Extension]: ... def sign(self, pkey: PKey, digest: str) -> None: ... def verify(self, pkey: PKey) -> bool: ... class X509: def __init__(self) -> None: ... def set_version(self, version: int) -> None: ... def get_version(self) -> int: ... def get_pubkey(self) -> PKey: ... def set_pubkey(self, pkey: PKey) -> None: ... def sign(self, pkey: PKey, digest: str) -> None: ... def get_signature_algorithm(self) -> str: ... def digest(self, digest_name: str) -> str: ... def subject_name_hash(self) -> str: ... def set_serial_number(self, serial: int) -> None: ... def get_serial_number(self) -> int: ... def gmtime_adj_notAfter(self, amount: int) -> None: ... def gmtime_adj_notBefore(self, amount: int) -> None: ... def has_expired(self) -> bool: ... def get_notBefore(self) -> str: ... def set_notBefore(self, when: str) -> None: ... def get_notAfter(self) -> str: ... def set_notAfter(self, when: str) -> None: ... def get_issuer(self) -> X509Name: ... def set_issuer(self, issuer: X509Name) -> None: ... def get_subject(self) -> X509Name: ... def set_subject(self, subject: X509Name) -> None: ... def get_extension_count(self) -> int: ... def add_extensions(self, extensions: Iterable[X509Extension]) -> None: ... def get_extension(self, index: int) -> X509Extension: ... class X509StoreFlags: CRL_CHECK = ... # type: int CRL_CHECK_ALL = ... # type: int IGNORE_CRITICAL = ... # type: int X509_STRICT = ... # type: int ALLOW_PROXY_CERTS = ... # type: int POLICY_CHECK = ... # type: int EXPLICIT_POLICY = ... # type: int INHIBIT_MAP = ... # type: int NOTIFY_POLICY = ... # type: int CHECK_SS_SIGNATURE = ... # type: int CB_ISSUER_CHECK = ... # type: int class X509Store: def __init__(self) -> None: ... def add_cert(self, cert: X509) -> None: ... def add_crl(self, crl: CRL) -> None: ... def set_flags(self, flags: int) -> None: ... class X509StoreContextError(Exception): certificate = ... # type: X509 def __init__(self, message: str, certificate: X509) -> None: ... class X509StoreContext: def __init__(self, store: X509Store, certificate: X509) -> None: ... def set_store(self, store: X509Store) -> None: ... def verify_certificate(self) -> None: ... def load_certificate(type: int, buffer: Union[str, unicode]) -> X509: ... def dump_certificate(type: int, cert: X509) -> bytes: ... def dump_publickey(type: int, pkey: PKey) -> bytes: ... def dump_privatekey(type: int, pkey: PKey, cipher: Optional[str] = ..., passphrase: Optional[Union[str, Callable[[int], int]]] = ...) -> bytes: ... class Revoked: def __init__(self) -> None: ... def set_serial(self, hex_str: str) -> None: ... def get_serial(self) -> str: ... def set_reason(self, reason: str) -> None: ... def get_reason(self) -> str: ... def all_reasons(self) -> List[str]: ... def set_rev_date(self, when: str) -> None: ... def get_rev_date(self) -> str: ... class CRL: def __init__(self) -> None: ... def get_revoked(self) -> Tuple[Revoked, ...]: ... def add_revoked(self, revoked: Revoked) -> None: ... def get_issuer(self) -> X509Name: ... def set_version(self, version: int) -> None: ... def set_lastUpdate(self, when: str) -> None: ... def set_nextUpdate(self, when: str) -> None: ... def sign(self, issuer_cert: X509, issuer_key: PKey, digest: str) -> None: ... def export(self, cert: X509, key: PKey, type: int = ..., days: int = ..., digest: str = ...) -> bytes: ... class PKCS7: def type_is_signed(self) -> bool: ... def type_is_enveloped(self) -> bool: ... def type_is_signedAndEnveloped(self) -> bool: ... def type_is_data(self) -> bool: ... def get_type_name(self) -> str: ... class PKCS12: def __init__(self) -> None: ... def get_certificate(self) -> X509: ... def set_certificate(self, cert: X509) -> None: ... def get_privatekey(self) -> PKey: ... def set_privatekey(self, pkey: PKey) -> None: ... def get_ca_certificates(self) -> Tuple[X509, ...]: ... def set_ca_certificates(self, cacerts: Iterable[X509]) -> None: ... def set_friendlyname(self, name: bytes) -> None: ... def get_friendlyname(self) -> bytes: ... def export(self, passphrase: Optional[str] = ..., iter: int = ..., maciter: int = ...): ... class NetscapeSPKI: def __init__(self) -> None: ... def sign(self, pkey: PKey, digest: str) -> None: ... def verify(self, key: PKey) -> bool: ... def b64_encode(self) -> str: ... def get_pubkey(self) -> PKey: ... def set_pubkey(self, pkey: PKey) -> None: ... def load_publickey(type: int, buffer: Union[str, unicode]) -> PKey: ... def load_privatekey(type: int, buffer: bytes, passphrase: Optional[Union[str, Callable[[int], int]]] = ...): ... def dump_certificate_request(type: int, req: X509Req): ... def load_certificate_request(type, buffer: Union[str, unicode]) -> X509Req: ... def sign(pkey: PKey, data: Union[str, unicode], digest: str) -> bytes: ... def verify(cert: X509, signature: bytes, data: Union[str, unicode], digest: str) -> None: ... def dump_crl(type: int, crl: CRL) -> bytes: ... def load_crl(type: int, buffer: Union[str, unicode]) -> CRL: ... def load_pkcs7_data(type: int, buffer: Union[str, unicode]) -> PKCS7: ... def load_pkcs12(buffer: Union[str, unicode], passphrase: Optional[Union[str, Callable[[int], int]]] = ...) -> PKCS12: ... mypy-0.560/typeshed/third_party/2/pycurl.pyi0000644€tŠÔÚ€2›s®0000005012413215007212025247 0ustar jukkaDROPBOX\Domain Users00000000000000# TODO(MichalPokorny): more precise types from typing import Any, Tuple GLOBAL_ACK_EINTR = ... # type: int GLOBAL_ALL = ... # type: int GLOBAL_DEFAULT = ... # type: int GLOBAL_NOTHING = ... # type: int GLOBAL_SSL = ... # type: int GLOBAL_WIN32 = ... # type: int def global_init(option: int) -> None: ... def global_cleanup() -> None: ... version = ... # type: str def version_info() -> Tuple[int, str, int, str, int, str, int, str, tuple, Any, int, Any]: ... class error(Exception): pass class Curl(object): def close(self) -> None: ... def setopt(self, option: int, value: Any) -> None: ... def perform(self) -> None: ... def getinfo(self, info: Any) -> Any: ... def reset(self) -> None: ... def unsetopt(self, option: int) -> Any: ... def pause(self, bitmask: Any) -> Any: ... def errstr(self) -> str: ... # TODO(MichalPokorny): wat? USERPWD = ... # type: int class CurlMulti(object): def close(self) -> None: ... def add_handle(self, obj: Curl) -> None: ... def remove_handle(self, obj: Curl) -> None: ... def perform(self) -> Tuple[Any, int]: ... def fdset(self) -> tuple: ... def select(self, timeout: float = ...) -> int: ... def info_read(self, max_objects: int = ...) -> tuple: ... class CurlShare(object): def close(self) -> None: ... def setopt(self, option: int, value: Any) -> Any: ... ACCEPTTIMEOUT_MS = ... # type: int ACCEPT_ENCODING = ... # type: int ADDRESS_SCOPE = ... # type: int APPCONNECT_TIME = ... # type: int APPEND = ... # type: int AUTOREFERER = ... # type: int BUFFERSIZE = ... # type: int CAINFO = ... # type: int CAPATH = ... # type: int CLOSESOCKETFUNCTION = ... # type: int COMPILE_DATE = ... # type: str COMPILE_LIBCURL_VERSION_NUM = ... # type: int COMPILE_PY_VERSION_HEX = ... # type: int CONDITION_UNMET = ... # type: int CONNECTTIMEOUT = ... # type: int CONNECTTIMEOUT_MS = ... # type: int CONNECT_ONLY = ... # type: int CONNECT_TIME = ... # type: int CONTENT_LENGTH_DOWNLOAD = ... # type: int CONTENT_LENGTH_UPLOAD = ... # type: int CONTENT_TYPE = ... # type: int COOKIE = ... # type: int COOKIEFILE = ... # type: int COOKIEJAR = ... # type: int COOKIELIST = ... # type: int COOKIESESSION = ... # type: int COPYPOSTFIELDS = ... # type: int CRLF = ... # type: int CRLFILE = ... # type: int CSELECT_ERR = ... # type: int CSELECT_IN = ... # type: int CSELECT_OUT = ... # type: int CURL_HTTP_VERSION_1_0 = ... # type: int CURL_HTTP_VERSION_1_1 = ... # type: int CURL_HTTP_VERSION_2 = ... # type: int CURL_HTTP_VERSION_2_0 = ... # type: int CURL_HTTP_VERSION_LAST = ... # type: int CURL_HTTP_VERSION_NONE = ... # type: int CUSTOMREQUEST = ... # type: int DEBUGFUNCTION = ... # type: int DIRLISTONLY = ... # type: int DNS_CACHE_TIMEOUT = ... # type: int DNS_SERVERS = ... # type: int DNS_USE_GLOBAL_CACHE = ... # type: int EFFECTIVE_URL = ... # type: int EGDSOCKET = ... # type: int ENCODING = ... # type: int EXPECT_100_TIMEOUT_MS = ... # type: int FAILONERROR = ... # type: int FILE = ... # type: int FOLLOWLOCATION = ... # type: int FORBID_REUSE = ... # type: int FORM_BUFFER = ... # type: int FORM_BUFFERPTR = ... # type: int FORM_CONTENTS = ... # type: int FORM_CONTENTTYPE = ... # type: int FORM_FILE = ... # type: int FORM_FILENAME = ... # type: int FRESH_CONNECT = ... # type: int FTPAPPEND = ... # type: int FTPAUTH_DEFAULT = ... # type: int FTPAUTH_SSL = ... # type: int FTPAUTH_TLS = ... # type: int FTPLISTONLY = ... # type: int FTPMETHOD_DEFAULT = ... # type: int FTPMETHOD_MULTICWD = ... # type: int FTPMETHOD_NOCWD = ... # type: int FTPMETHOD_SINGLECWD = ... # type: int FTPPORT = ... # type: int FTPSSLAUTH = ... # type: int FTPSSL_ALL = ... # type: int FTPSSL_CONTROL = ... # type: int FTPSSL_NONE = ... # type: int FTPSSL_TRY = ... # type: int FTP_ACCOUNT = ... # type: int FTP_ALTERNATIVE_TO_USER = ... # type: int FTP_CREATE_MISSING_DIRS = ... # type: int FTP_ENTRY_PATH = ... # type: int FTP_FILEMETHOD = ... # type: int FTP_RESPONSE_TIMEOUT = ... # type: int FTP_SKIP_PASV_IP = ... # type: int FTP_SSL = ... # type: int FTP_SSL_CCC = ... # type: int FTP_USE_EPRT = ... # type: int FTP_USE_EPSV = ... # type: int FTP_USE_PRET = ... # type: int GSSAPI_DELEGATION = ... # type: int GSSAPI_DELEGATION_FLAG = ... # type: int GSSAPI_DELEGATION_NONE = ... # type: int GSSAPI_DELEGATION_POLICY_FLAG = ... # type: int HEADER = ... # type: int HEADERFUNCTION = ... # type: int HEADEROPT = ... # type: int HEADER_SEPARATE = ... # type: int HEADER_SIZE = ... # type: int HEADER_UNIFIED = ... # type: int HTTP200ALIASES = ... # type: int HTTPAUTH = ... # type: int HTTPAUTH_ANY = ... # type: int HTTPAUTH_ANYSAFE = ... # type: int HTTPAUTH_AVAIL = ... # type: int HTTPAUTH_BASIC = ... # type: int HTTPAUTH_DIGEST = ... # type: int HTTPAUTH_DIGEST_IE = ... # type: int HTTPAUTH_GSSNEGOTIATE = ... # type: int HTTPAUTH_NEGOTIATE = ... # type: int HTTPAUTH_NONE = ... # type: int HTTPAUTH_NTLM = ... # type: int HTTPAUTH_NTLM_WB = ... # type: int HTTPAUTH_ONLY = ... # type: int HTTPGET = ... # type: int HTTPHEADER = ... # type: int HTTPPOST = ... # type: int HTTPPROXYTUNNEL = ... # type: int HTTP_CODE = ... # type: int HTTP_CONNECTCODE = ... # type: int HTTP_CONTENT_DECODING = ... # type: int HTTP_TRANSFER_DECODING = ... # type: int HTTP_VERSION = ... # type: int IGNORE_CONTENT_LENGTH = ... # type: int INFILE = ... # type: int INFILESIZE = ... # type: int INFILESIZE_LARGE = ... # type: int INFOTYPE_DATA_IN = ... # type: int INFOTYPE_DATA_OUT = ... # type: int INFOTYPE_HEADER_IN = ... # type: int INFOTYPE_HEADER_OUT = ... # type: int INFOTYPE_SSL_DATA_IN = ... # type: int INFOTYPE_SSL_DATA_OUT = ... # type: int INFOTYPE_TEXT = ... # type: int INFO_CERTINFO = ... # type: int INFO_COOKIELIST = ... # type: int INFO_FILETIME = ... # type: int INFO_RTSP_CLIENT_CSEQ = ... # type: int INFO_RTSP_CSEQ_RECV = ... # type: int INFO_RTSP_SERVER_CSEQ = ... # type: int INFO_RTSP_SESSION_ID = ... # type: int INTERFACE = ... # type: int IOCMD_NOP = ... # type: int IOCMD_RESTARTREAD = ... # type: int IOCTLDATA = ... # type: int IOCTLFUNCTION = ... # type: int IOE_FAILRESTART = ... # type: int IOE_OK = ... # type: int IOE_UNKNOWNCMD = ... # type: int IPRESOLVE = ... # type: int IPRESOLVE_V4 = ... # type: int IPRESOLVE_V6 = ... # type: int IPRESOLVE_WHATEVER = ... # type: int ISSUERCERT = ... # type: int KEYPASSWD = ... # type: int KHMATCH_MISMATCH = ... # type: int KHMATCH_MISSING = ... # type: int KHMATCH_OK = ... # type: int KHSTAT_DEFER = ... # type: int KHSTAT_FINE = ... # type: int KHSTAT_FINE_ADD_TO_FILE = ... # type: int KHSTAT_REJECT = ... # type: int KHTYPE_DSS = ... # type: int KHTYPE_RSA = ... # type: int KHTYPE_RSA1 = ... # type: int KHTYPE_UNKNOWN = ... # type: int KRB4LEVEL = ... # type: int KRBLEVEL = ... # type: int LASTSOCKET = ... # type: int LOCALPORT = ... # type: int LOCALPORTRANGE = ... # type: int LOCAL_IP = ... # type: int LOCAL_PORT = ... # type: int LOCK_DATA_COOKIE = ... # type: int LOCK_DATA_DNS = ... # type: int LOCK_DATA_SSL_SESSION = ... # type: int LOGIN_OPTIONS = ... # type: int LOW_SPEED_LIMIT = ... # type: int LOW_SPEED_TIME = ... # type: int MAIL_AUTH = ... # type: int MAIL_FROM = ... # type: int MAIL_RCPT = ... # type: int MAXCONNECTS = ... # type: int MAXFILESIZE = ... # type: int MAXFILESIZE_LARGE = ... # type: int MAXREDIRS = ... # type: int MAX_RECV_SPEED_LARGE = ... # type: int MAX_SEND_SPEED_LARGE = ... # type: int M_CHUNK_LENGTH_PENALTY_SIZE = ... # type: int M_CONTENT_LENGTH_PENALTY_SIZE = ... # type: int M_MAXCONNECTS = ... # type: int M_MAX_HOST_CONNECTIONS = ... # type: int M_MAX_PIPELINE_LENGTH = ... # type: int M_MAX_TOTAL_CONNECTIONS = ... # type: int M_PIPELINING = ... # type: int M_PIPELINING_SERVER_BL = ... # type: int M_PIPELINING_SITE_BL = ... # type: int M_SOCKETFUNCTION = ... # type: int M_TIMERFUNCTION = ... # type: int NAMELOOKUP_TIME = ... # type: int NETRC = ... # type: int NETRC_FILE = ... # type: int NETRC_IGNORED = ... # type: int NETRC_OPTIONAL = ... # type: int NETRC_REQUIRED = ... # type: int NEW_DIRECTORY_PERMS = ... # type: int NEW_FILE_PERMS = ... # type: int NOBODY = ... # type: int NOPROGRESS = ... # type: int NOPROXY = ... # type: int NOSIGNAL = ... # type: int NUM_CONNECTS = ... # type: int OPENSOCKETFUNCTION = ... # type: int OPT_CERTINFO = ... # type: int OPT_FILETIME = ... # type: int OS_ERRNO = ... # type: int PASSWORD = ... # type: int PATH_AS_IS = ... # type: int PAUSE_ALL = ... # type: int PAUSE_CONT = ... # type: int PAUSE_RECV = ... # type: int PAUSE_SEND = ... # type: int PINNEDPUBLICKEY = ... # type: int PIPEWAIT = ... # type: int PIPE_HTTP1 = ... # type: int PIPE_MULTIPLEX = ... # type: int PIPE_NOTHING = ... # type: int POLL_IN = ... # type: int POLL_INOUT = ... # type: int POLL_NONE = ... # type: int POLL_OUT = ... # type: int POLL_REMOVE = ... # type: int PORT = ... # type: int POST = ... # type: int POST301 = ... # type: int POSTFIELDS = ... # type: int POSTFIELDSIZE = ... # type: int POSTFIELDSIZE_LARGE = ... # type: int POSTQUOTE = ... # type: int POSTREDIR = ... # type: int PREQUOTE = ... # type: int PRETRANSFER_TIME = ... # type: int PRIMARY_IP = ... # type: int PRIMARY_PORT = ... # type: int PROGRESSFUNCTION = ... # type: int PROTOCOLS = ... # type: int PROTO_ALL = ... # type: int PROTO_DICT = ... # type: int PROTO_FILE = ... # type: int PROTO_FTP = ... # type: int PROTO_FTPS = ... # type: int PROTO_GOPHER = ... # type: int PROTO_HTTP = ... # type: int PROTO_HTTPS = ... # type: int PROTO_IMAP = ... # type: int PROTO_IMAPS = ... # type: int PROTO_LDAP = ... # type: int PROTO_LDAPS = ... # type: int PROTO_POP3 = ... # type: int PROTO_POP3S = ... # type: int PROTO_RTMP = ... # type: int PROTO_RTMPE = ... # type: int PROTO_RTMPS = ... # type: int PROTO_RTMPT = ... # type: int PROTO_RTMPTE = ... # type: int PROTO_RTMPTS = ... # type: int PROTO_RTSP = ... # type: int PROTO_SCP = ... # type: int PROTO_SFTP = ... # type: int PROTO_SMB = ... # type: int PROTO_SMBS = ... # type: int PROTO_SMTP = ... # type: int PROTO_SMTPS = ... # type: int PROTO_TELNET = ... # type: int PROTO_TFTP = ... # type: int PROXY = ... # type: int PROXYAUTH = ... # type: int PROXYAUTH_AVAIL = ... # type: int PROXYHEADER = ... # type: int PROXYPASSWORD = ... # type: int PROXYPORT = ... # type: int PROXYTYPE = ... # type: int PROXYTYPE_HTTP = ... # type: int PROXYTYPE_HTTP_1_0 = ... # type: int PROXYTYPE_SOCKS4 = ... # type: int PROXYTYPE_SOCKS4A = ... # type: int PROXYTYPE_SOCKS5 = ... # type: int PROXYTYPE_SOCKS5_HOSTNAME = ... # type: int PROXYUSERNAME = ... # type: int PROXYUSERPWD = ... # type: int PROXY_SERVICE_NAME = ... # type: int PROXY_TRANSFER_MODE = ... # type: int PUT = ... # type: int QUOTE = ... # type: int RANDOM_FILE = ... # type: int RANGE = ... # type: int READDATA = ... # type: int READFUNCTION = ... # type: int READFUNC_ABORT = ... # type: int READFUNC_PAUSE = ... # type: int REDIRECT_COUNT = ... # type: int REDIRECT_TIME = ... # type: int REDIRECT_URL = ... # type: int REDIR_POST_301 = ... # type: int REDIR_POST_302 = ... # type: int REDIR_POST_303 = ... # type: int REDIR_POST_ALL = ... # type: int REDIR_PROTOCOLS = ... # type: int REFERER = ... # type: int REQUEST_SIZE = ... # type: int RESOLVE = ... # type: int RESPONSE_CODE = ... # type: int RESUME_FROM = ... # type: int RESUME_FROM_LARGE = ... # type: int SASL_IR = ... # type: int SEEKFUNCTION = ... # type: int SEEKFUNC_CANTSEEK = ... # type: int SEEKFUNC_FAIL = ... # type: int SEEKFUNC_OK = ... # type: int SERVICE_NAME = ... # type: int SHARE = ... # type: int SH_SHARE = ... # type: int SH_UNSHARE = ... # type: int SIZE_DOWNLOAD = ... # type: int SIZE_UPLOAD = ... # type: int SOCKET_TIMEOUT = ... # type: int SOCKOPTFUNCTION = ... # type: int SOCKOPT_ALREADY_CONNECTED = ... # type: int SOCKOPT_ERROR = ... # type: int SOCKOPT_OK = ... # type: int SOCKS5_GSSAPI_NEC = ... # type: int SOCKS5_GSSAPI_SERVICE = ... # type: int SOCKTYPE_ACCEPT = ... # type: int SOCKTYPE_IPCXN = ... # type: int SPEED_DOWNLOAD = ... # type: int SPEED_UPLOAD = ... # type: int SSH_AUTH_ANY = ... # type: int SSH_AUTH_DEFAULT = ... # type: int SSH_AUTH_HOST = ... # type: int SSH_AUTH_KEYBOARD = ... # type: int SSH_AUTH_NONE = ... # type: int SSH_AUTH_PASSWORD = ... # type: int SSH_AUTH_PUBLICKEY = ... # type: int SSH_AUTH_TYPES = ... # type: int SSH_HOST_PUBLIC_KEY_MD5 = ... # type: int SSH_KEYFUNCTION = ... # type: int SSH_KNOWNHOSTS = ... # type: int SSH_PRIVATE_KEYFILE = ... # type: int SSH_PUBLIC_KEYFILE = ... # type: int SSLCERT = ... # type: int SSLCERTPASSWD = ... # type: int SSLCERTTYPE = ... # type: int SSLENGINE = ... # type: int SSLENGINE_DEFAULT = ... # type: int SSLKEY = ... # type: int SSLKEYPASSWD = ... # type: int SSLKEYTYPE = ... # type: int SSLOPT_ALLOW_BEAST = ... # type: int SSLVERSION = ... # type: int SSLVERSION_DEFAULT = ... # type: int SSLVERSION_SSLv2 = ... # type: int SSLVERSION_SSLv3 = ... # type: int SSLVERSION_TLSv1 = ... # type: int SSLVERSION_TLSv1_0 = ... # type: int SSLVERSION_TLSv1_1 = ... # type: int SSLVERSION_TLSv1_2 = ... # type: int SSL_CIPHER_LIST = ... # type: int SSL_ENABLE_ALPN = ... # type: int SSL_ENABLE_NPN = ... # type: int SSL_ENGINES = ... # type: int SSL_FALSESTART = ... # type: int SSL_OPTIONS = ... # type: int SSL_SESSIONID_CACHE = ... # type: int SSL_VERIFYHOST = ... # type: int SSL_VERIFYPEER = ... # type: int SSL_VERIFYRESULT = ... # type: int SSL_VERIFYSTATUS = ... # type: int STARTTRANSFER_TIME = ... # type: int STDERR = ... # type: int TCP_KEEPALIVE = ... # type: int TCP_KEEPIDLE = ... # type: int TCP_KEEPINTVL = ... # type: int TCP_NODELAY = ... # type: int TELNETOPTIONS = ... # type: int TFTP_BLKSIZE = ... # type: int TIMECONDITION = ... # type: int TIMECONDITION_IFMODSINCE = ... # type: int TIMECONDITION_IFUNMODSINCE = ... # type: int TIMECONDITION_LASTMOD = ... # type: int TIMECONDITION_NONE = ... # type: int TIMEOUT = ... # type: int TIMEOUT_MS = ... # type: int TIMEVALUE = ... # type: int TLSAUTH_PASSWORD = ... # type: int TLSAUTH_TYPE = ... # type: int TLSAUTH_USERNAME = ... # type: int TOTAL_TIME = ... # type: int TRANSFERTEXT = ... # type: int TRANSFER_ENCODING = ... # type: int UNIX_SOCKET_PATH = ... # type: int UNRESTRICTED_AUTH = ... # type: int UPLOAD = ... # type: int URL = ... # type: int USERAGENT = ... # type: int USERNAME = ... # type: int USERPWD = ... # type: int USESSL_ALL = ... # type: int USESSL_CONTROL = ... # type: int USESSL_NONE = ... # type: int USESSL_TRY = ... # type: int USE_SSL = ... # type: int VERBOSE = ... # type: int VERSION_ASYNCHDNS = ... # type: int VERSION_CONV = ... # type: int VERSION_CURLDEBUG = ... # type: int VERSION_DEBUG = ... # type: int VERSION_GSSAPI = ... # type: int VERSION_GSSNEGOTIATE = ... # type: int VERSION_HTTP2 = ... # type: int VERSION_IDN = ... # type: int VERSION_IPV6 = ... # type: int VERSION_KERBEROS4 = ... # type: int VERSION_KERBEROS5 = ... # type: int VERSION_LARGEFILE = ... # type: int VERSION_LIBZ = ... # type: int VERSION_NTLM = ... # type: int VERSION_NTLM_WB = ... # type: int VERSION_SPNEGO = ... # type: int VERSION_SSL = ... # type: int VERSION_SSPI = ... # type: int VERSION_TLSAUTH_SRP = ... # type: int VERSION_UNIX_SOCKETS = ... # type: int WILDCARDMATCH = ... # type: int WRITEDATA = ... # type: int WRITEFUNCTION = ... # type: int WRITEFUNC_PAUSE = ... # type: int WRITEHEADER = ... # type: int XFERINFOFUNCTION = ... # type: int XOAUTH2_BEARER = ... # type: int E_ABORTED_BY_CALLBACK = ... # type: int E_AGAIN = ... # type: int E_ALREADY_COMPLETE = ... # type: int E_BAD_CALLING_ORDER = ... # type: int E_BAD_CONTENT_ENCODING = ... # type: int E_BAD_DOWNLOAD_RESUME = ... # type: int E_BAD_FUNCTION_ARGUMENT = ... # type: int E_BAD_PASSWORD_ENTERED = ... # type: int E_CALL_MULTI_PERFORM = ... # type: int E_CHUNK_FAILED = ... # type: int E_CONV_FAILED = ... # type: int E_CONV_REQD = ... # type: int E_COULDNT_CONNECT = ... # type: int E_COULDNT_RESOLVE_HOST = ... # type: int E_COULDNT_RESOLVE_PROXY = ... # type: int E_FAILED_INIT = ... # type: int E_FILESIZE_EXCEEDED = ... # type: int E_FILE_COULDNT_READ_FILE = ... # type: int E_FTP_ACCEPT_FAILED = ... # type: int E_FTP_ACCEPT_TIMEOUT = ... # type: int E_FTP_ACCESS_DENIED = ... # type: int E_FTP_BAD_DOWNLOAD_RESUME = ... # type: int E_FTP_BAD_FILE_LIST = ... # type: int E_FTP_CANT_GET_HOST = ... # type: int E_FTP_CANT_RECONNECT = ... # type: int E_FTP_COULDNT_GET_SIZE = ... # type: int E_FTP_COULDNT_RETR_FILE = ... # type: int E_FTP_COULDNT_SET_ASCII = ... # type: int E_FTP_COULDNT_SET_BINARY = ... # type: int E_FTP_COULDNT_SET_TYPE = ... # type: int E_FTP_COULDNT_STOR_FILE = ... # type: int E_FTP_COULDNT_USE_REST = ... # type: int E_FTP_PARTIAL_FILE = ... # type: int E_FTP_PORT_FAILED = ... # type: int E_FTP_PRET_FAILED = ... # type: int E_FTP_QUOTE_ERROR = ... # type: int E_FTP_SSL_FAILED = ... # type: int E_FTP_USER_PASSWORD_INCORRECT = ... # type: int E_FTP_WEIRD_227_FORMAT = ... # type: int E_FTP_WEIRD_PASS_REPLY = ... # type: int E_FTP_WEIRD_PASV_REPLY = ... # type: int E_FTP_WEIRD_SERVER_REPLY = ... # type: int E_FTP_WEIRD_USER_REPLY = ... # type: int E_FTP_WRITE_ERROR = ... # type: int E_FUNCTION_NOT_FOUND = ... # type: int E_GOT_NOTHING = ... # type: int E_HTTP2 = ... # type: int E_HTTP_NOT_FOUND = ... # type: int E_HTTP_PORT_FAILED = ... # type: int E_HTTP_POST_ERROR = ... # type: int E_HTTP_RANGE_ERROR = ... # type: int E_HTTP_RETURNED_ERROR = ... # type: int E_INTERFACE_FAILED = ... # type: int E_LDAP_CANNOT_BIND = ... # type: int E_LDAP_INVALID_URL = ... # type: int E_LDAP_SEARCH_FAILED = ... # type: int E_LIBRARY_NOT_FOUND = ... # type: int E_LOGIN_DENIED = ... # type: int E_MALFORMAT_USER = ... # type: int E_MULTI_ADDED_ALREADY = ... # type: int E_MULTI_BAD_EASY_HANDLE = ... # type: int E_MULTI_BAD_HANDLE = ... # type: int E_MULTI_BAD_SOCKET = ... # type: int E_MULTI_CALL_MULTI_PERFORM = ... # type: int E_MULTI_CALL_MULTI_SOCKET = ... # type: int E_MULTI_INTERNAL_ERROR = ... # type: int E_MULTI_OK = ... # type: int E_MULTI_OUT_OF_MEMORY = ... # type: int E_MULTI_UNKNOWN_OPTION = ... # type: int E_NOT_BUILT_IN = ... # type: int E_NO_CONNECTION_AVAILABLE = ... # type: int E_OK = ... # type: int E_OPERATION_TIMEDOUT = ... # type: int E_OPERATION_TIMEOUTED = ... # type: int E_OUT_OF_MEMORY = ... # type: int E_PARTIAL_FILE = ... # type: int E_PEER_FAILED_VERIFICATION = ... # type: int E_QUOTE_ERROR = ... # type: int E_RANGE_ERROR = ... # type: int E_READ_ERROR = ... # type: int E_RECV_ERROR = ... # type: int E_REMOTE_ACCESS_DENIED = ... # type: int E_REMOTE_DISK_FULL = ... # type: int E_REMOTE_FILE_EXISTS = ... # type: int E_REMOTE_FILE_NOT_FOUND = ... # type: int E_RTSP_CSEQ_ERROR = ... # type: int E_RTSP_SESSION_ERROR = ... # type: int E_SEND_ERROR = ... # type: int E_SEND_FAIL_REWIND = ... # type: int E_SHARE_IN_USE = ... # type: int E_SSH = ... # type: int E_SSL_CACERT = ... # type: int E_SSL_CACERT_BADFILE = ... # type: int E_SSL_CERTPROBLEM = ... # type: int E_SSL_CIPHER = ... # type: int E_SSL_CONNECT_ERROR = ... # type: int E_SSL_CRL_BADFILE = ... # type: int E_SSL_ENGINE_INITFAILED = ... # type: int E_SSL_ENGINE_NOTFOUND = ... # type: int E_SSL_ENGINE_SETFAILED = ... # type: int E_SSL_INVALIDCERTSTATUS = ... # type: int E_SSL_ISSUER_ERROR = ... # type: int E_SSL_PEER_CERTIFICATE = ... # type: int E_SSL_PINNEDPUBKEYNOTMATCH = ... # type: int E_SSL_SHUTDOWN_FAILED = ... # type: int E_TELNET_OPTION_SYNTAX = ... # type: int E_TFTP_DISKFULL = ... # type: int E_TFTP_EXISTS = ... # type: int E_TFTP_ILLEGAL = ... # type: int E_TFTP_NOSUCHUSER = ... # type: int E_TFTP_NOTFOUND = ... # type: int E_TFTP_PERM = ... # type: int E_TFTP_UNKNOWNID = ... # type: int E_TOO_MANY_REDIRECTS = ... # type: int E_UNKNOWN_OPTION = ... # type: int E_UNKNOWN_TELNET_OPTION = ... # type: int E_UNSUPPORTED_PROTOCOL = ... # type: int E_UPLOAD_FAILED = ... # type: int E_URL_MALFORMAT = ... # type: int E_URL_MALFORMAT_USER = ... # type: int E_USE_SSL_FAILED = ... # type: int E_WRITE_ERROR = ... # type: int mypy-0.560/typeshed/third_party/2/pymssql.pyi0000644€tŠÔÚ€2›s®0000000352613215007212025445 0ustar jukkaDROPBOX\Domain Users00000000000000from datetime import datetime, date, time from typing import Any, Dict, Tuple, Iterable, List, Optional, Union, Sequence Scalar = Union[int, float, str, datetime, date, time] Result = Union[Tuple[Scalar, ...], Dict[str, Scalar]] class Connection(object): def __init__(self, user, password, host, database, timeout, login_timeout, charset, as_dict) -> None: ... def autocommit(self, status: bool) -> None: ... def close(self) -> None: ... def commit(self) -> None: ... def cursor(self) -> 'Cursor': ... def rollback(self) -> None: ... class Cursor(object): def __init__(self) -> None: ... def __iter__(self): ... def __next__(self) -> Any: ... def callproc(self, procname: str, **kwargs) -> None: ... def close(self) -> None: ... def execute(self, stmt: str, params: Optional[Union[Scalar, Tuple[Scalar, ...], Dict[str, Scalar]]]) -> None: ... def executemany(self, stmt: str, params: Optional[Sequence[Tuple[Scalar, ...]]]) -> None: ... def fetchall(self) -> List[Result]: ... def fetchmany(self, size: Optional[int]) -> List[Result]: ... def fetchone(self) -> Result: ... def connect(server: Optional[str], user: Optional[str], password: Optional[str], database: Optional[str], timeout: Optional[int], login_timeout: Optional[int], charset: Optional[str], as_dict: Optional[bool], host: Optional[str], appname: Optional[str], port: Optional[str], conn_properties: Optional[Union[str, Sequence[str]]], autocommit: Optional[bool], tds_version: Optional[str]) -> Connection: ... def get_max_connections() -> int: ... def set_max_connections(n: int) -> None: ... mypy-0.560/typeshed/third_party/2/redis/0000755€tŠÔÚ€2›s®0000000000013215007244024317 5ustar jukkaDROPBOX\Domain Users00000000000000mypy-0.560/typeshed/third_party/2/redis/__init__.pyi0000644€tŠÔÚ€2›s®0000000154413215007212026600 0ustar jukkaDROPBOX\Domain Users00000000000000from . import client from . import connection from . import utils from . import exceptions Redis = client.Redis StrictRedis = client.StrictRedis BlockingConnectionPool = connection.BlockingConnectionPool ConnectionPool = connection.ConnectionPool Connection = connection.Connection SSLConnection = connection.SSLConnection UnixDomainSocketConnection = connection.UnixDomainSocketConnection from_url = utils.from_url AuthenticationError = exceptions.AuthenticationError BusyLoadingError = exceptions.BusyLoadingError ConnectionError = exceptions.ConnectionError DataError = exceptions.DataError InvalidResponse = exceptions.InvalidResponse PubSubError = exceptions.PubSubError ReadOnlyError = exceptions.ReadOnlyError RedisError = exceptions.RedisError ResponseError = exceptions.ResponseError TimeoutError = exceptions.TimeoutError WatchError = exceptions.WatchError mypy-0.560/typeshed/third_party/2/redis/client.pyi0000644€tŠÔÚ€2›s®0000003006713215007212026321 0ustar jukkaDROPBOX\Domain Users00000000000000from typing import Any SYM_EMPTY = ... # type: Any def list_or_args(keys, args): ... def timestamp_to_datetime(response): ... def string_keys_to_dict(key_string, callback): ... def dict_merge(*dicts): ... def parse_debug_object(response): ... def parse_object(response, infotype): ... def parse_info(response): ... SENTINEL_STATE_TYPES = ... # type: Any def parse_sentinel_state(item): ... def parse_sentinel_master(response): ... def parse_sentinel_masters(response): ... def parse_sentinel_slaves_and_sentinels(response): ... def parse_sentinel_get_master(response): ... def pairs_to_dict(response): ... def pairs_to_dict_typed(response, type_info): ... def zset_score_pairs(response, **options): ... def sort_return_tuples(response, **options): ... def int_or_none(response): ... def float_or_none(response): ... def bool_ok(response): ... def parse_client_list(response, **options): ... def parse_config_get(response, **options): ... def parse_scan(response, **options): ... def parse_hscan(response, **options): ... def parse_zscan(response, **options): ... def parse_slowlog_get(response, **options): ... class StrictRedis: RESPONSE_CALLBACKS = ... # type: Any @classmethod def from_url(cls, url, db=..., **kwargs): ... connection_pool = ... # type: Any response_callbacks = ... # type: Any def __init__(self, host=..., port=..., db=..., password=..., socket_timeout=..., socket_connect_timeout=..., socket_keepalive=..., socket_keepalive_options=..., connection_pool=..., unix_socket_path=..., encoding=..., encoding_errors=..., charset=..., errors=..., decode_responses=..., retry_on_timeout=..., ssl=..., ssl_keyfile=..., ssl_certfile=..., ssl_cert_reqs=..., ssl_ca_certs=...) -> None: ... def set_response_callback(self, command, callback): ... def pipeline(self, transaction=..., shard_hint=...): ... def transaction(self, func, *watches, **kwargs): ... def lock(self, name, timeout=..., sleep=..., blocking_timeout=..., lock_class=..., thread_local=...): ... def pubsub(self, **kwargs): ... def execute_command(self, *args, **options): ... def parse_response(self, connection, command_name, **options): ... def bgrewriteaof(self): ... def bgsave(self): ... def client_kill(self, address): ... def client_list(self): ... def client_getname(self): ... def client_setname(self, name): ... def config_get(self, pattern=...): ... def config_set(self, name, value): ... def config_resetstat(self): ... def config_rewrite(self): ... def dbsize(self): ... def debug_object(self, key): ... def echo(self, value): ... def flushall(self): ... def flushdb(self): ... def info(self, section=...): ... def lastsave(self): ... def object(self, infotype, key): ... def ping(self): ... def save(self): ... def sentinel(self, *args): ... def sentinel_get_master_addr_by_name(self, service_name): ... def sentinel_master(self, service_name): ... def sentinel_masters(self): ... def sentinel_monitor(self, name, ip, port, quorum): ... def sentinel_remove(self, name): ... def sentinel_sentinels(self, service_name): ... def sentinel_set(self, name, option, value): ... def sentinel_slaves(self, service_name): ... def shutdown(self): ... def slaveof(self, host=..., port=...): ... def slowlog_get(self, num=...): ... def slowlog_len(self): ... def slowlog_reset(self): ... def time(self): ... def append(self, key, value): ... def bitcount(self, key, start=..., end=...): ... def bitop(self, operation, dest, *keys): ... def bitpos(self, key, bit, start=..., end=...): ... def decr(self, name, amount=...): ... def delete(self, *names): ... def __delitem__(self, name): ... def dump(self, name): ... def exists(self, name): ... __contains__ = ... # type: Any def expire(self, name, time): ... def expireat(self, name, when): ... def get(self, name): ... def __getitem__(self, name): ... def getbit(self, name, offset): ... def getrange(self, key, start, end): ... def getset(self, name, value): ... def incr(self, name, amount=...): ... def incrby(self, name, amount=...): ... def incrbyfloat(self, name, amount=...): ... def keys(self, pattern=...): ... def mget(self, keys, *args): ... def mset(self, *args, **kwargs): ... def msetnx(self, *args, **kwargs): ... def move(self, name, db): ... def persist(self, name): ... def pexpire(self, name, time): ... def pexpireat(self, name, when): ... def psetex(self, name, time_ms, value): ... def pttl(self, name): ... def randomkey(self): ... def rename(self, src, dst): ... def renamenx(self, src, dst): ... def restore(self, name, ttl, value): ... def set(self, name, value, ex=..., px=..., nx=..., xx=...): ... def __setitem__(self, name, value): ... def setbit(self, name, offset, value): ... def setex(self, name, time, value): ... def setnx(self, name, value): ... def setrange(self, name, offset, value): ... def strlen(self, name): ... def substr(self, name, start, end=...): ... def ttl(self, name): ... def type(self, name): ... def watch(self, *names): ... def unwatch(self): ... def blpop(self, keys, timeout=...): ... def brpop(self, keys, timeout=...): ... def brpoplpush(self, src, dst, timeout=...): ... def lindex(self, name, index): ... def linsert(self, name, where, refvalue, value): ... def llen(self, name): ... def lpop(self, name): ... def lpush(self, name, *values): ... def lpushx(self, name, value): ... def lrange(self, name, start, end): ... def lrem(self, name, count, value): ... def lset(self, name, index, value): ... def ltrim(self, name, start, end): ... def rpop(self, name): ... def rpoplpush(self, src, dst): ... def rpush(self, name, *values): ... def rpushx(self, name, value): ... def sort(self, name, start=..., num=..., by=..., get=..., desc=..., alpha=..., store=..., groups=...): ... def scan(self, cursor=..., match=..., count=...): ... def scan_iter(self, match=..., count=...): ... def sscan(self, name, cursor=..., match=..., count=...): ... def sscan_iter(self, name, match=..., count=...): ... def hscan(self, name, cursor=..., match=..., count=...): ... def hscan_iter(self, name, match=..., count=...): ... def zscan(self, name, cursor=..., match=..., count=..., score_cast_func=...): ... def zscan_iter(self, name, match=..., count=..., score_cast_func=...): ... def sadd(self, name, *values): ... def scard(self, name): ... def sdiff(self, keys, *args): ... def sdiffstore(self, dest, keys, *args): ... def sinter(self, keys, *args): ... def sinterstore(self, dest, keys, *args): ... def sismember(self, name, value): ... def smembers(self, name): ... def smove(self, src, dst, value): ... def spop(self, name): ... def srandmember(self, name, number=...): ... def srem(self, name, *values): ... def sunion(self, keys, *args): ... def sunionstore(self, dest, keys, *args): ... def zadd(self, name, *args, **kwargs): ... def zcard(self, name): ... def zcount(self, name, min, max): ... def zincrby(self, name, value, amount=...): ... def zinterstore(self, dest, keys, aggregate=...): ... def zlexcount(self, name, min, max): ... def zrange(self, name, start, end, desc=..., withscores=..., score_cast_func=...): ... def zrangebylex(self, name, min, max, start=..., num=...): ... def zrangebyscore(self, name, min, max, start=..., num=..., withscores=..., score_cast_func=...): ... def zrank(self, name, value): ... def zrem(self, name, *values): ... def zremrangebylex(self, name, min, max): ... def zremrangebyrank(self, name, min, max): ... def zremrangebyscore(self, name, min, max): ... def zrevrange(self, name, start, end, withscores=..., score_cast_func=...): ... def zrevrangebyscore(self, name, max, min, start=..., num=..., withscores=..., score_cast_func=...): ... def zrevrank(self, name, value): ... def zscore(self, name, value): ... def zunionstore(self, dest, keys, aggregate=...): ... def pfadd(self, name, *values): ... def pfcount(self, name): ... def pfmerge(self, dest, *sources): ... def hdel(self, name, *keys): ... def hexists(self, name, key): ... def hget(self, name, key): ... def hgetall(self, name): ... def hincrby(self, name, key, amount=...): ... def hincrbyfloat(self, name, key, amount=...): ... def hkeys(self, name): ... def hlen(self, name): ... def hset(self, name, key, value): ... def hsetnx(self, name, key, value): ... def hmset(self, name, mapping): ... def hmget(self, name, keys, *args): ... def hvals(self, name): ... def publish(self, channel, message): ... def eval(self, script, numkeys, *keys_and_args): ... def evalsha(self, sha, numkeys, *keys_and_args): ... def script_exists(self, *args): ... def script_flush(self): ... def script_kill(self): ... def script_load(self, script): ... def register_script(self, script): ... class Redis(StrictRedis): RESPONSE_CALLBACKS = ... # type: Any def pipeline(self, transaction=..., shard_hint=...): ... def setex(self, name, value, time): ... def lrem(self, name, value, num=...): ... def zadd(self, name, *args, **kwargs): ... class PubSub: PUBLISH_MESSAGE_TYPES = ... # type: Any UNSUBSCRIBE_MESSAGE_TYPES = ... # type: Any connection_pool = ... # type: Any shard_hint = ... # type: Any ignore_subscribe_messages = ... # type: Any connection = ... # type: Any encoding = ... # type: Any encoding_errors = ... # type: Any decode_responses = ... # type: Any def __init__(self, connection_pool, shard_hint=..., ignore_subscribe_messages=...) -> None: ... def __del__(self): ... channels = ... # type: Any patterns = ... # type: Any def reset(self): ... def close(self): ... def on_connect(self, connection): ... def encode(self, value): ... @property def subscribed(self): ... def execute_command(self, *args, **kwargs): ... def parse_response(self, block=...): ... def psubscribe(self, *args, **kwargs): ... def punsubscribe(self, *args): ... def subscribe(self, *args, **kwargs): ... def unsubscribe(self, *args): ... def listen(self): ... def get_message(self, ignore_subscribe_messages=...): ... def handle_message(self, response, ignore_subscribe_messages=...): ... def run_in_thread(self, sleep_time=...): ... class BasePipeline: UNWATCH_COMMANDS = ... # type: Any connection_pool = ... # type: Any connection = ... # type: Any response_callbacks = ... # type: Any transaction = ... # type: Any shard_hint = ... # type: Any watching = ... # type: Any def __init__(self, connection_pool, response_callbacks, transaction, shard_hint) -> None: ... def __enter__(self): ... def __exit__(self, exc_type, exc_value, traceback): ... def __del__(self): ... def __len__(self): ... command_stack = ... # type: Any scripts = ... # type: Any explicit_transaction = ... # type: Any def reset(self): ... def multi(self): ... def execute_command(self, *args, **kwargs): ... def immediate_execute_command(self, *args, **options): ... def pipeline_execute_command(self, *args, **options): ... def raise_first_error(self, commands, response): ... def annotate_exception(self, exception, number, command): ... def parse_response(self, connection, command_name, **options): ... def load_scripts(self): ... def execute(self, raise_on_error=...): ... def watch(self, *names): ... def unwatch(self): ... def script_load_for_pipeline(self, script): ... class StrictPipeline(BasePipeline, StrictRedis): ... class Pipeline(BasePipeline, Redis): ... class Script: registered_client = ... # type: Any script = ... # type: Any sha = ... # type: Any def __init__(self, registered_client, script) -> None: ... def __call__(self, keys=..., args=..., client=...): ... mypy-0.560/typeshed/third_party/2/redis/connection.pyi0000644€tŠÔÚ€2›s®0000001135313215007212027177 0ustar jukkaDROPBOX\Domain Users00000000000000from typing import Any ssl_available = ... # type: Any hiredis_version = ... # type: Any HIREDIS_SUPPORTS_CALLABLE_ERRORS = ... # type: Any HIREDIS_SUPPORTS_BYTE_BUFFER = ... # type: Any msg = ... # type: Any HIREDIS_USE_BYTE_BUFFER = ... # type: Any SYM_STAR = ... # type: Any SYM_DOLLAR = ... # type: Any SYM_CRLF = ... # type: Any SYM_EMPTY = ... # type: Any SERVER_CLOSED_CONNECTION_ERROR = ... # type: Any class Token: value = ... # type: Any def __init__(self, value) -> None: ... class BaseParser: EXCEPTION_CLASSES = ... # type: Any def parse_error(self, response): ... class SocketBuffer: socket_read_size = ... # type: Any bytes_written = ... # type: Any bytes_read = ... # type: Any def __init__(self, socket, socket_read_size) -> None: ... @property def length(self): ... def read(self, length): ... def readline(self): ... def purge(self): ... def close(self): ... class PythonParser(BaseParser): encoding = ... # type: Any socket_read_size = ... # type: Any def __init__(self, socket_read_size) -> None: ... def __del__(self): ... def on_connect(self, connection): ... def on_disconnect(self): ... def can_read(self): ... def read_response(self): ... class HiredisParser(BaseParser): socket_read_size = ... # type: Any def __init__(self, socket_read_size) -> None: ... def __del__(self): ... def on_connect(self, connection): ... def on_disconnect(self): ... def can_read(self): ... def read_response(self): ... DefaultParser = ... # type: Any class Connection: description_format = ... # type: Any pid = ... # type: Any host = ... # type: Any port = ... # type: Any db = ... # type: Any password = ... # type: Any socket_timeout = ... # type: Any socket_connect_timeout = ... # type: Any socket_keepalive = ... # type: Any socket_keepalive_options = ... # type: Any retry_on_timeout = ... # type: Any encoding = ... # type: Any encoding_errors = ... # type: Any decode_responses = ... # type: Any def __init__(self, host=..., port=..., db=..., password=..., socket_timeout=..., socket_connect_timeout=..., socket_keepalive=..., socket_keepalive_options=..., retry_on_timeout=..., encoding=..., encoding_errors=..., decode_responses=..., parser_class=..., socket_read_size=...) -> None: ... def __del__(self): ... def register_connect_callback(self, callback): ... def clear_connect_callbacks(self): ... def connect(self): ... def on_connect(self): ... def disconnect(self): ... def send_packed_command(self, command): ... def send_command(self, *args): ... def can_read(self): ... def read_response(self): ... def encode(self, value): ... def pack_command(self, *args): ... def pack_commands(self, commands): ... class SSLConnection(Connection): description_format = ... # type: Any keyfile = ... # type: Any certfile = ... # type: Any cert_reqs = ... # type: Any ca_certs = ... # type: Any def __init__(self, ssl_keyfile=..., ssl_certfile=..., ssl_cert_reqs=..., ssl_ca_certs=..., **kwargs) -> None: ... class UnixDomainSocketConnection(Connection): description_format = ... # type: Any pid = ... # type: Any path = ... # type: Any db = ... # type: Any password = ... # type: Any socket_timeout = ... # type: Any retry_on_timeout = ... # type: Any encoding = ... # type: Any encoding_errors = ... # type: Any decode_responses = ... # type: Any def __init__(self, path=..., db=..., password=..., socket_timeout=..., encoding=..., encoding_errors=..., decode_responses=..., retry_on_timeout=..., parser_class=..., socket_read_size=...) -> None: ... class ConnectionPool: @classmethod def from_url(cls, url, db=..., **kwargs): ... connection_class = ... # type: Any connection_kwargs = ... # type: Any max_connections = ... # type: Any def __init__(self, connection_class=..., max_connections=..., **connection_kwargs) -> None: ... pid = ... # type: Any def reset(self): ... def get_connection(self, command_name, *keys, **options): ... def make_connection(self): ... def release(self, connection): ... def disconnect(self): ... class BlockingConnectionPool(ConnectionPool): queue_class = ... # type: Any timeout = ... # type: Any def __init__(self, max_connections=..., timeout=..., connection_class=..., queue_class=..., **connection_kwargs) -> None: ... pid = ... # type: Any pool = ... # type: Any def reset(self): ... def make_connection(self): ... def get_connection(self, command_name, *keys, **options): ... def release(self, connection): ... def disconnect(self): ... mypy-0.560/typeshed/third_party/2/redis/exceptions.pyi0000644€tŠÔÚ€2›s®0000000107113215007212027215 0ustar jukkaDROPBOX\Domain Users00000000000000class RedisError(Exception): ... def __unicode__(self): ... class AuthenticationError(RedisError): ... class ConnectionError(RedisError): ... class TimeoutError(RedisError): ... class BusyLoadingError(ConnectionError): ... class InvalidResponse(RedisError): ... class ResponseError(RedisError): ... class DataError(RedisError): ... class PubSubError(RedisError): ... class WatchError(RedisError): ... class NoScriptError(ResponseError): ... class ExecAbortError(ResponseError): ... class ReadOnlyError(ResponseError): ... class LockError(RedisError, ValueError): ... mypy-0.560/typeshed/third_party/2/redis/utils.pyi0000644€tŠÔÚ€2›s®0000000022613215007212026175 0ustar jukkaDROPBOX\Domain Users00000000000000from typing import Any HIREDIS_AVAILABLE = ... # type: Any def from_url(url, db=..., **kwargs): ... def pipeline(redis_obj): ... class dummy: ... mypy-0.560/typeshed/third_party/2/routes/0000755€tŠÔÚ€2›s®0000000000013215007244024532 5ustar jukkaDROPBOX\Domain Users00000000000000mypy-0.560/typeshed/third_party/2/routes/__init__.pyi0000644€tŠÔÚ€2›s®0000000057113215007212027012 0ustar jukkaDROPBOX\Domain Users00000000000000from . import mapper from . import util class _RequestConfig: def __getattr__(self, name): ... def __setattr__(self, name, value): ... def __delattr__(self, name): ... def load_wsgi_environ(self, environ): ... def request_config(original=...): ... Mapper = mapper.Mapper redirect_to = util.redirect_to url_for = util.url_for URLGenerator = util.URLGenerator mypy-0.560/typeshed/third_party/2/routes/mapper.pyi0000644€tŠÔÚ€2›s®0000000520713215007212026540 0ustar jukkaDROPBOX\Domain Users00000000000000from typing import Any COLLECTION_ACTIONS = ... # type: Any MEMBER_ACTIONS = ... # type: Any def strip_slashes(name): ... class SubMapperParent: def submapper(self, **kargs): ... def collection(self, collection_name, resource_name, path_prefix=..., member_prefix=..., controller=..., collection_actions=..., member_actions=..., member_options=..., **kwargs): ... class SubMapper(SubMapperParent): kwargs = ... # type: Any obj = ... # type: Any collection_name = ... # type: Any member = ... # type: Any resource_name = ... # type: Any formatted = ... # type: Any def __init__(self, obj, resource_name=..., collection_name=..., actions=..., formatted=..., **kwargs) -> None: ... def connect(self, *args, **kwargs): ... def link(self, rel=..., name=..., action=..., method=..., formatted=..., **kwargs): ... def new(self, **kwargs): ... def edit(self, **kwargs): ... def action(self, name=..., action=..., method=..., formatted=..., **kwargs): ... def index(self, name=..., **kwargs): ... def show(self, name=..., **kwargs): ... def create(self, **kwargs): ... def update(self, **kwargs): ... def delete(self, **kwargs): ... def add_actions(self, actions): ... def __enter__(self): ... def __exit__(self, type, value, tb): ... class Mapper(SubMapperParent): matchlist = ... # type: Any maxkeys = ... # type: Any minkeys = ... # type: Any urlcache = ... # type: Any prefix = ... # type: Any req_data = ... # type: Any directory = ... # type: Any always_scan = ... # type: Any controller_scan = ... # type: Any debug = ... # type: Any append_slash = ... # type: Any sub_domains = ... # type: Any sub_domains_ignore = ... # type: Any domain_match = ... # type: Any explicit = ... # type: Any encoding = ... # type: Any decode_errors = ... # type: Any hardcode_names = ... # type: Any minimization = ... # type: Any create_regs_lock = ... # type: Any def __init__(self, controller_scan=..., directory=..., always_scan=..., register=..., explicit=...) -> None: ... environ = ... # type: Any def extend(self, routes, path_prefix=...): ... def make_route(self, *args, **kargs): ... def connect(self, *args, **kargs): ... def create_regs(self, *args, **kwargs): ... def match(self, url=..., environ=...): ... def routematch(self, url=..., environ=...): ... obj = ... # type: Any def generate(self, *args, **kargs): ... def resource(self, member_name, collection_name, **kwargs): ... def redirect(self, match_path, destination_path, *args, **kwargs): ... mypy-0.560/typeshed/third_party/2/routes/util.pyi0000644€tŠÔÚ€2›s®0000000113413215007212026224 0ustar jukkaDROPBOX\Domain Users00000000000000from typing import Any class RoutesException(Exception): ... class MatchException(RoutesException): ... class GenerationException(RoutesException): ... def url_for(*args, **kargs): ... class URLGenerator: mapper = ... # type: Any environ = ... # type: Any def __init__(self, mapper, environ) -> None: ... def __call__(self, *args, **kargs): ... def current(self, *args, **kwargs): ... def redirect_to(*args, **kargs): ... def cache_hostinfo(environ): ... def controller_scan(directory=...): ... def as_unicode(value, encoding, errors=...): ... def ascii_characters(string): ... mypy-0.560/typeshed/third_party/2/scribe/0000755€tŠÔÚ€2›s®0000000000013215007244024460 5ustar jukkaDROPBOX\Domain Users00000000000000mypy-0.560/typeshed/third_party/2/scribe/__init__.pyi0000644€tŠÔÚ€2›s®0000000000013215007212026723 0ustar jukkaDROPBOX\Domain Users00000000000000mypy-0.560/typeshed/third_party/2/scribe/scribe.pyi0000644€tŠÔÚ€2›s®0000000225513215007212026451 0ustar jukkaDROPBOX\Domain Users00000000000000from typing import Any import fb303.FacebookService from .ttypes import * # noqa: F403 from thrift.Thrift import TProcessor class Iface(fb303.FacebookService.Iface): def Log(self, messages): ... class Client(fb303.FacebookService.Client, Iface): def __init__(self, iprot, oprot=...) -> None: ... def Log(self, messages): ... def send_Log(self, messages): ... def recv_Log(self): ... class Processor(fb303.FacebookService.Processor, Iface, TProcessor): def __init__(self, handler) -> None: ... def process(self, iprot, oprot): ... def process_Log(self, seqid, iprot, oprot): ... class Log_args: thrift_spec = ... # type: Any messages = ... # type: Any def __init__(self, messages=...) -> None: ... def read(self, iprot): ... def write(self, oprot): ... def validate(self): ... def __eq__(self, other): ... def __ne__(self, other): ... class Log_result: thrift_spec = ... # type: Any success = ... # type: Any def __init__(self, success=...) -> None: ... def read(self, iprot): ... def write(self, oprot): ... def validate(self): ... def __eq__(self, other): ... def __ne__(self, other): ... mypy-0.560/typeshed/third_party/2/scribe/ttypes.pyi0000644€tŠÔÚ€2›s®0000000072313215007212026530 0ustar jukkaDROPBOX\Domain Users00000000000000from typing import Any fastbinary = ... # type: Any class ResultCode: OK = ... # type: Any TRY_LATER = ... # type: Any class LogEntry: thrift_spec = ... # type: Any category = ... # type: Any message = ... # type: Any def __init__(self, category=..., message=...) -> None: ... def read(self, iprot): ... def write(self, oprot): ... def validate(self): ... def __eq__(self, other): ... def __ne__(self, other): ... mypy-0.560/typeshed/third_party/2/selenium/0000755€tŠÔÚ€2›s®0000000000013215007242025030 5ustar jukkaDROPBOX\Domain Users00000000000000mypy-0.560/typeshed/third_party/2/selenium/webdriver/0000755€tŠÔÚ€2›s®0000000000013215007242027021 5ustar jukkaDROPBOX\Domain Users00000000000000mypy-0.560/typeshed/third_party/2/selenium/webdriver/remote/0000755€tŠÔÚ€2›s®0000000000013215007244030316 5ustar jukkaDROPBOX\Domain Users00000000000000mypy-0.560/typeshed/third_party/2/selenium/webdriver/remote/webdriver.pyi0000644€tŠÔÚ€2›s®0000001126013215007212033025 0ustar jukkaDROPBOX\Domain Users00000000000000from typing import Any, Dict, List, Optional from .mobile import Mobile as Mobile from selenium.webdriver.remote.webelement import WebElement from selenium.webdriver.remote.errorhandler import ErrorHandler from selenium.webdriver.remote.file_detector import FileDetector Capabilities = Dict[basestring, Any] ExecuteResult = Dict[basestring, Any] # containing 'success', 'value', 'sessionId' class WebDriver: command_executor = ... # type: basestring session_id = ... # type: Any capabilities = ... # type: Capabilities error_handler = ... # type: ErrorHandler file_detector = ... # type: FileDetector def __init__(self, command_executor: basestring='', desired_capabilities: Capabilities=None, browser_profile=None, proxy=None, keep_alive: bool = ... ) -> None: ... @property def mobile(self) -> Mobile: ... @property def name(self) -> basestring: ... def start_client(self): ... def stop_client(self): ... w3c = ... # type: Any def start_session(self, desired_capabilities, browser_profile=None): ... def create_web_element(self, element_id: basestring) -> WebElement: ... def execute(self, driver_command: basestring, params: Optional[Dict[basestring, Any]] = ...) -> ExecuteResult: ... def get(self, url: basestring) -> None: ... @property def title(self) -> basestring: ... def find_element_by_id(self, id_: basestring) -> WebElement: ... def find_elements_by_id(self, id_: basestring) -> List[WebElement]: ... def find_element_by_xpath(self, xpath: basestring) -> WebElement: ... def find_elements_by_xpath(self, xpath: basestring) -> List[WebElement]: ... def find_element_by_link_text(self, link_text: basestring) -> WebElement: ... def find_elements_by_link_text(self, text: basestring) -> List[WebElement]: ... def find_element_by_partial_link_text(self, link_text: basestring) -> WebElement: ... def find_elements_by_partial_link_text(self, link_text: basestring) -> List[WebElement]: ... def find_element_by_name(self, name: basestring) -> WebElement: ... def find_elements_by_name(self, name: basestring) -> List[WebElement]: ... def find_element_by_tag_name(self, name: basestring) -> WebElement: ... def find_elements_by_tag_name(self, name: basestring) -> List[WebElement]: ... def find_element_by_class_name(self, name: basestring) -> WebElement: ... def find_elements_by_class_name(self, name: basestring) -> List[WebElement]: ... def find_element_by_css_selector(self, css_selector: basestring) -> WebElement: ... def find_elements_by_css_selector(self, css_selector: basestring) -> List[WebElement]: ... def execute_script(self, script, *args): ... def execute_async_script(self, script, *args): ... @property def current_url(self) -> basestring: ... @property def page_source(self): ... def close(self): ... def quit(self): ... @property def current_window_handle(self): ... @property def window_handles(self): ... def maximize_window(self): ... @property def switch_to(self): ... def switch_to_active_element(self): ... def switch_to_window(self, window_name): ... def switch_to_frame(self, frame_reference): ... def switch_to_default_content(self): ... def switch_to_alert(self): ... def back(self): ... def forward(self): ... def refresh(self): ... def get_cookies(self): ... def get_cookie(self, name): ... def delete_cookie(self, name): ... def delete_all_cookies(self): ... def add_cookie(self, cookie_dict): ... def implicitly_wait(self, time_to_wait): ... def set_script_timeout(self, time_to_wait): ... def set_page_load_timeout(self, time_to_wait): ... def find_element(self, by=..., value=None): ... def find_elements(self, by=..., value=None): ... @property def desired_capabilities(self): ... def get_screenshot_as_file(self, filename): ... save_screenshot = ... # type: Any def get_screenshot_as_png(self): ... def get_screenshot_as_base64(self): ... def set_window_size(self, width, height, windowHandle=''): ... def get_window_size(self, windowHandle=''): ... def set_window_position(self, x, y, windowHandle=''): ... def get_window_position(self, windowHandle=''): ... @property def file_detector(self): ... @file_detector.setter def file_detector(self, detector): ... @property def orientation(self): ... @orientation.setter def orientation(self, value): ... @property def application_cache(self): ... @property def log_types(self): ... def get_log(self, log_type): ... mypy-0.560/typeshed/third_party/2/selenium/webdriver/remote/webelement.pyi0000644€tŠÔÚ€2›s®0000000563413215007212033173 0ustar jukkaDROPBOX\Domain Users00000000000000from selenium.webdriver.remote.webdriver import WebDriver from typing import Any, Optional, Dict, List SizeDict = Dict[str, int] # containing "height", "width" PointDict = Dict[str, int] # containing "x", "y" class WebElement: def __init__(self, parent: WebDriver, id_: Optional[basestring], w3c: bool = ...) -> None: ... @property def tag_name(self) -> basestring: ... @property def text(self) -> Optional[basestring]: ... def click(self) -> None: ... def submit(self) -> None: ... def clear(self) -> None: ... def get_attribute(self, name: basestring) -> Optional[basestring]: ... def is_selected(self) -> bool: ... def is_enabled(self) -> bool: ... def find_element_by_id(self, id_: basestring) -> WebElement: ... def find_elements_by_id(self, id_: basestring) -> List[WebElement]: ... def find_element_by_name(self, name: basestring) -> WebElement: ... def find_elements_by_name(self, name: basestring) -> List[WebElement]: ... def find_element_by_link_text(self, link_text: basestring) -> WebElement: ... def find_elements_by_link_text(self, link_text: basestring) -> List[WebElement]: ... def find_element_by_partial_link_text(self, link_text: basestring) -> WebElement: ... def find_elements_by_partial_link_text(self, link_text: basestring) -> List[WebElement]: ... def find_element_by_tag_name(self, name: basestring) -> WebElement: ... def find_elements_by_tag_name(self, name: basestring) -> List[WebElement]: ... def find_element_by_xpath(self, xpath: basestring) -> WebElement: ... def find_elements_by_xpath(self, xpath: basestring) -> List[WebElement]: ... def find_element_by_class_name(self, name: basestring) -> WebElement: ... def find_elements_by_class_name(self, name: basestring) -> List[WebElement]: ... def find_element_by_css_selector(self, css_selector: basestring) -> WebElement: ... def find_elements_by_css_selector(self, css_selector: basestring) -> List[WebElement]: ... def send_keys(self, *value: basestring) -> None: ... def is_displayed(self) -> bool: ... @property def location_once_scrolled_into_view(self): ... @property def size(self) -> SizeDict: ... def value_of_css_property(self, property_name): ... @property def location(self) -> PointDict: ... @property def rect(self): ... @property def screenshot_as_base64(self): ... @property def screenshot_as_png(self): ... def screenshot(self, filename: basestring): ... @property def parent(self) -> WebDriver: ... @property def id(self) -> Optional[basestring]: ... def __eq__(self, element: object) -> bool: ... def __ne__(self, element: object) -> bool: ... def find_element(self, by: basestring=..., value: basestring=None) -> WebElement: ... def find_elements(self, by: basestring=..., value: basestring=None) -> List[WebElement]: ... def __hash__(self) -> int: ... mypy-0.560/typeshed/third_party/2/simplejson/0000755€tŠÔÚ€2›s®0000000000013215007244025374 5ustar jukkaDROPBOX\Domain Users00000000000000mypy-0.560/typeshed/third_party/2/simplejson/__init__.pyi0000644€tŠÔÚ€2›s®0000000071113215007212027650 0ustar jukkaDROPBOX\Domain Users00000000000000from typing import Any, IO from simplejson.scanner import JSONDecodeError as JSONDecodeError from simplejson.decoder import JSONDecoder as JSONDecoder from simplejson.encoder import JSONEncoder as JSONEncoder, JSONEncoderForHTML as JSONEncoderForHTML def dumps(obj: Any, *args: Any, **kwds: Any) -> str: ... def dump(obj: Any, fp: IO[str], *args: Any, **kwds: Any) -> None: ... def loads(s: str, **kwds: Any) -> Any: ... def load(fp: IO[str]) -> Any: ... mypy-0.560/typeshed/third_party/2/simplejson/decoder.pyi0000644€tŠÔÚ€2›s®0000000035213215007212027517 0ustar jukkaDROPBOX\Domain Users00000000000000from typing import Any, Match class JSONDecoder(object): def __init__(self, **kwargs): ... def decode(self, s: str, _w: Match[str], _PY3: bool): ... def raw_decode(self, s: str, idx: int, _w: Match[str], _PY3: bool): ... mypy-0.560/typeshed/third_party/2/simplejson/encoder.pyi0000644€tŠÔÚ€2›s®0000000041413215007212027530 0ustar jukkaDROPBOX\Domain Users00000000000000from typing import Any, IO class JSONEncoder(object): def __init__(self, *args, **kwargs): ... def encode(self, o: Any): ... def default(self, o: Any): ... def iterencode(self, o: Any, _one_shot: bool): ... class JSONEncoderForHTML(JSONEncoder): ... mypy-0.560/typeshed/third_party/2/simplejson/scanner.pyi0000644€tŠÔÚ€2›s®0000000041513215007212027543 0ustar jukkaDROPBOX\Domain Users00000000000000from typing import Any, IO class JSONDecodeError(ValueError): def dumps(self, obj: Any) -> str: ... def dump(self, obj: Any, fp: IO[str], *args: Any, **kwds: Any) -> None: ... def loads(self, s: str) -> Any: ... def load(self, fp: IO[str]) -> Any: ... mypy-0.560/typeshed/third_party/2/six/0000755€tŠÔÚ€2›s®0000000000013215007244024014 5ustar jukkaDROPBOX\Domain Users00000000000000mypy-0.560/typeshed/third_party/2/six/__init__.pyi0000644€tŠÔÚ€2›s®0000000653713215007212026304 0ustar jukkaDROPBOX\Domain Users00000000000000# Stubs for six (Python 2.7) from __future__ import print_function import types from typing import ( Any, AnyStr, Callable, Dict, Iterable, Mapping, Optional, Pattern, Tuple, Type, TypeVar, Union, overload, ValuesView, KeysView, ItemsView ) import typing import unittest from mypy_extensions import NoReturn # Exports from __builtin__ import unichr as unichr from StringIO import StringIO as StringIO, StringIO as BytesIO from functools import wraps as wraps from . import moves _T = TypeVar('_T') _K = TypeVar('_K') _V = TypeVar('_V') # TODO make constant, then move this stub to 2and3 # https://github.com/python/typeshed/issues/17 PY2 = True PY3 = False PY34 = False string_types = (str, unicode) integer_types = (int, long) class_types = (type, types.ClassType) text_type = unicode binary_type = str MAXSIZE = ... # type: int # def add_move # def remove_move def advance_iterator(it: typing.Iterator[_T]) -> _T: ... next = advance_iterator def callable(obj: object) -> bool: ... def get_unbound_function(unbound: types.MethodType) -> types.FunctionType: ... def create_bound_method(func: types.FunctionType, obj: object) -> types.MethodType: ... def create_unbound_method(func: types.FunctionType, cls: Union[type, types.ClassType]) -> types.MethodType: ... class Iterator: def next(self) -> Any: ... def get_method_function(meth: types.MethodType) -> types.FunctionType: ... def get_method_self(meth: types.MethodType) -> Optional[object]: ... def get_function_closure(fun: types.FunctionType) -> Optional[Tuple[types._Cell, ...]]: ... def get_function_code(fun: types.FunctionType) -> types.CodeType: ... def get_function_defaults(fun: types.FunctionType) -> Optional[Tuple[Any, ...]]: ... def get_function_globals(fun: types.FunctionType) -> Dict[str, Any]: ... def iterkeys(d: Mapping[_K, _V]) -> typing.Iterator[_K]: ... def itervalues(d: Mapping[_K, _V]) -> typing.Iterator[_V]: ... def iteritems(d: Mapping[_K, _V]) -> typing.Iterator[Tuple[_K, _V]]: ... # def iterlists def viewkeys(d: Mapping[_K, _V]) -> KeysView[_K]: ... def viewvalues(d: Mapping[_K, _V]) -> ValuesView[_V]: ... def viewitems(d: Mapping[_K, _V]) -> ItemsView[_K, _V]: ... def b(s: str) -> binary_type: ... def u(s: str) -> text_type: ... int2byte = chr def byte2int(bs: binary_type) -> int: ... def indexbytes(buf: binary_type, i: int) -> int: ... def iterbytes(buf: binary_type) -> typing.Iterator[int]: ... def assertCountEqual(self: unittest.TestCase, first: Iterable[_T], second: Iterable[_T], msg: str = ...) -> None: ... @overload def assertRaisesRegex(self: unittest.TestCase, msg: str = ...) -> Any: ... @overload def assertRaisesRegex(self: unittest.TestCase, callable_obj: Callable[..., Any], *args: Any, **kwargs: Any) -> Any: ... def assertRegex(self: unittest.TestCase, text: AnyStr, expected_regex: Union[AnyStr, Pattern[AnyStr]], msg: str = ...) -> None: ... def reraise(tp: Optional[Type[BaseException]], value: Optional[BaseException], tb: Optional[types.TracebackType] = ...) -> NoReturn: ... def exec_(_code_: Union[unicode, types.CodeType], _globs_: Dict[str, Any] = ..., _locs_: Dict[str, Any] = ...): ... def raise_from(value: BaseException, from_value: Optional[BaseException]) -> NoReturn: ... print_ = print def with_metaclass(meta: type, *bases: type) -> type: ... def add_metaclass(metaclass: type) -> Callable[[_T], _T]: ... def python_2_unicode_compatible(klass: _T) -> _T: ... mypy-0.560/typeshed/third_party/2/six/moves/0000755€tŠÔÚ€2›s®0000000000013215007244025145 5ustar jukkaDROPBOX\Domain Users00000000000000mypy-0.560/typeshed/third_party/2/six/moves/__init__.pyi0000644€tŠÔÚ€2›s®0000000501113215007212027417 0ustar jukkaDROPBOX\Domain Users00000000000000# Stubs for six.moves # # Note: Commented out items means they weren't implemented at the time. # Uncomment them when the modules have been added to the typeshed. from cStringIO import StringIO as cStringIO from itertools import ifilter as filter from itertools import ifilterfalse as filterfalse from __builtin__ import raw_input as input from __builtin__ import intern as intern from itertools import imap as map from os import getcwdu as getcwd from os import getcwd as getcwdb from __builtin__ import xrange as range from __builtin__ import reload as reload_module from __builtin__ import reduce as reduce from pipes import quote as shlex_quote from StringIO import StringIO as StringIO from UserDict import UserDict as UserDict from UserList import UserList as UserList from UserString import UserString as UserString from __builtin__ import xrange as xrange from itertools import izip as zip from itertools import izip_longest as zip_longest import __builtin__ as builtins import ConfigParser as configparser # import copy_reg as copyreg # import gdbm as dbm_gnu # import dummy_thread as _dummy_thread import cookielib as http_cookiejar import Cookie as http_cookies import htmlentitydefs as html_entities import HTMLParser as html_parser import httplib as http_client # import email.MIMEMultipart as email_mime_multipart # import email.MIMENonMultipart as email_mime_nonmultipart import email.MIMEText as email_mime_text # import email.MIMEBase as email_mime_base import BaseHTTPServer as BaseHTTPServer # import CGIHTTPServer as CGIHTTPServer # import SimpleHTTPServer as SimpleHTTPServer import cPickle as cPickle import Queue as queue import repr as reprlib import SocketServer as socketserver import thread as _thread # import Tkinter as tkinter # import Dialog as tkinter_dialog # import FileDialog as tkinter_filedialog # import ScrolledText as tkinter_scrolledtext # import SimpleDialog as tkinter_simpledialog # import Tix as tkinter_tix # import ttk as tkinter_ttk # import Tkconstants as tkinter_constants # import Tkdnd as tkinter_dnd # import tkColorChooser as tkinter_colorchooser # import tkCommonDialog as tkinter_commondialog # import tkFileDialog as tkinter_tkfiledialog # import tkFont as tkinter_font # import tkMessageBox as tkinter_messagebox # import tkSimpleDialog as tkinter_tksimpledialog import six.moves.urllib.parse as urllib_parse import six.moves.urllib.error as urllib_error import six.moves.urllib as urllib import robotparser as urllib_robotparser # import xmlrpclib as xmlrpc_client # import SimpleXMLRPCServer as xmlrpc_server mypy-0.560/typeshed/third_party/2/six/moves/urllib/0000755€tŠÔÚ€2›s®0000000000013215007244026436 5ustar jukkaDROPBOX\Domain Users00000000000000mypy-0.560/typeshed/third_party/2/six/moves/urllib/__init__.pyi0000644€tŠÔÚ€2›s®0000000033113215007212030710 0ustar jukkaDROPBOX\Domain Users00000000000000import six.moves.urllib.error as error import six.moves.urllib.parse as parse import six.moves.urllib.request as request import six.moves.urllib.response as response import six.moves.urllib.robotparser as robotparser mypy-0.560/typeshed/third_party/2/six/moves/urllib/error.pyi0000644€tŠÔÚ€2›s®0000000022413215007212030303 0ustar jukkaDROPBOX\Domain Users00000000000000from urllib2 import URLError as URLError from urllib2 import HTTPError as HTTPError from urllib import ContentTooShortError as ContentTooShortError mypy-0.560/typeshed/third_party/2/six/moves/urllib/parse.pyi0000644€tŠÔÚ€2›s®0000000203713215007212030270 0ustar jukkaDROPBOX\Domain Users00000000000000# Stubs for six.moves.urllib.parse from urlparse import ParseResult as ParseResult from urlparse import SplitResult as SplitResult from urlparse import parse_qs as parse_qs from urlparse import parse_qsl as parse_qsl from urlparse import urldefrag as urldefrag from urlparse import urljoin as urljoin from urlparse import urlparse as urlparse from urlparse import urlsplit as urlsplit from urlparse import urlunparse as urlunparse from urlparse import urlunsplit as urlunsplit from urllib import quote as quote from urllib import quote_plus as quote_plus from urllib import unquote as unquote from urllib import unquote_plus as unquote_plus from urllib import urlencode as urlencode from urllib import splitquery as splitquery from urllib import splittag as splittag from urllib import splituser as splituser from urlparse import uses_fragment as uses_fragment from urlparse import uses_netloc as uses_netloc from urlparse import uses_params as uses_params from urlparse import uses_query as uses_query from urlparse import uses_relative as uses_relative mypy-0.560/typeshed/third_party/2/six/moves/urllib/request.pyi0000644€tŠÔÚ€2›s®0000000347213215007212030652 0ustar jukkaDROPBOX\Domain Users00000000000000# Stubs for six.moves.urllib.request from urllib2 import urlopen as urlopen from urllib2 import install_opener as install_opener from urllib2 import build_opener as build_opener from urllib import pathname2url as pathname2url from urllib import url2pathname as url2pathname from urllib import getproxies as getproxies from urllib2 import Request as Request from urllib2 import OpenerDirector as OpenerDirector from urllib2 import HTTPDefaultErrorHandler as HTTPDefaultErrorHandler from urllib2 import HTTPRedirectHandler as HTTPRedirectHandler from urllib2 import HTTPCookieProcessor as HTTPCookieProcessor from urllib2 import ProxyHandler as ProxyHandler from urllib2 import BaseHandler as BaseHandler from urllib2 import HTTPPasswordMgr as HTTPPasswordMgr from urllib2 import HTTPPasswordMgrWithDefaultRealm as HTTPPasswordMgrWithDefaultRealm from urllib2 import AbstractBasicAuthHandler as AbstractBasicAuthHandler from urllib2 import HTTPBasicAuthHandler as HTTPBasicAuthHandler from urllib2 import ProxyBasicAuthHandler as ProxyBasicAuthHandler from urllib2 import AbstractDigestAuthHandler as AbstractDigestAuthHandler from urllib2 import HTTPDigestAuthHandler as HTTPDigestAuthHandler from urllib2 import ProxyDigestAuthHandler as ProxyDigestAuthHandler from urllib2 import HTTPHandler as HTTPHandler from urllib2 import HTTPSHandler as HTTPSHandler from urllib2 import FileHandler as FileHandler from urllib2 import FTPHandler as FTPHandler from urllib2 import CacheFTPHandler as CacheFTPHandler from urllib2 import UnknownHandler as UnknownHandler from urllib2 import HTTPErrorProcessor as HTTPErrorProcessor from urllib import urlretrieve as urlretrieve from urllib import urlcleanup as urlcleanup from urllib import URLopener as URLopener from urllib import FancyURLopener as FancyURLopener from urllib import proxy_bypass as proxy_bypass mypy-0.560/typeshed/third_party/2/six/moves/urllib/response.pyi0000644€tŠÔÚ€2›s®0000000031613215007212031012 0ustar jukkaDROPBOX\Domain Users00000000000000# Stubs for six.moves.urllib.response from urllib import addbase as addbase from urllib import addclosehook as addclosehook from urllib import addinfo as addinfo from urllib import addinfourl as addinfourl mypy-0.560/typeshed/third_party/2/six/moves/urllib/robotparser.pyi0000644€tŠÔÚ€2›s®0000000007313215007212031516 0ustar jukkaDROPBOX\Domain Users00000000000000from robotparser import RobotFileParser as RobotFileParser mypy-0.560/typeshed/third_party/2/tornado/0000755€tŠÔÚ€2›s®0000000000013215007244024657 5ustar jukkaDROPBOX\Domain Users00000000000000mypy-0.560/typeshed/third_party/2/tornado/__init__.pyi0000644€tŠÔÚ€2›s®0000000000013215007212027122 0ustar jukkaDROPBOX\Domain Users00000000000000mypy-0.560/typeshed/third_party/2/tornado/concurrent.pyi0000644€tŠÔÚ€2›s®0000000211413215007212027555 0ustar jukkaDROPBOX\Domain Users00000000000000from typing import Any futures = ... # type: Any class ReturnValueIgnoredError(Exception): ... class _TracebackLogger: exc_info = ... # type: Any formatted_tb = ... # type: Any def __init__(self, exc_info) -> None: ... def activate(self): ... def clear(self): ... def __del__(self): ... class Future: def __init__(self) -> None: ... def cancel(self): ... def cancelled(self): ... def running(self): ... def done(self): ... def result(self, timeout=...): ... def exception(self, timeout=...): ... def add_done_callback(self, fn): ... def set_result(self, result): ... def set_exception(self, exception): ... def exc_info(self): ... def set_exc_info(self, exc_info): ... def __del__(self): ... TracebackFuture = ... # type: Any FUTURES = ... # type: Any def is_future(x): ... class DummyExecutor: def submit(self, fn, *args, **kwargs): ... def shutdown(self, wait=...): ... dummy_executor = ... # type: Any def run_on_executor(*args, **kwargs): ... def return_future(f): ... def chain_future(a, b): ... mypy-0.560/typeshed/third_party/2/tornado/gen.pyi0000644€tŠÔÚ€2›s®0000000616513215007212026156 0ustar jukkaDROPBOX\Domain Users00000000000000from typing import Any from collections import namedtuple singledispatch = ... # type: Any class KeyReuseError(Exception): ... class UnknownKeyError(Exception): ... class LeakedCallbackError(Exception): ... class BadYieldError(Exception): ... class ReturnValueIgnoredError(Exception): ... class TimeoutError(Exception): ... def engine(func): ... def coroutine(func, replace_callback=...): ... class Return(Exception): value = ... # type: Any def __init__(self, value=...) -> None: ... class WaitIterator: current_index = ... # type: Any def __init__(self, *args, **kwargs) -> None: ... def done(self): ... def next(self): ... class YieldPoint: def start(self, runner): ... def is_ready(self): ... def get_result(self): ... class Callback(YieldPoint): key = ... # type: Any def __init__(self, key) -> None: ... runner = ... # type: Any def start(self, runner): ... def is_ready(self): ... def get_result(self): ... class Wait(YieldPoint): key = ... # type: Any def __init__(self, key) -> None: ... runner = ... # type: Any def start(self, runner): ... def is_ready(self): ... def get_result(self): ... class WaitAll(YieldPoint): keys = ... # type: Any def __init__(self, keys) -> None: ... runner = ... # type: Any def start(self, runner): ... def is_ready(self): ... def get_result(self): ... def Task(func, *args, **kwargs): ... class YieldFuture(YieldPoint): future = ... # type: Any io_loop = ... # type: Any def __init__(self, future, io_loop=...) -> None: ... runner = ... # type: Any key = ... # type: Any result_fn = ... # type: Any def start(self, runner): ... def is_ready(self): ... def get_result(self): ... class Multi(YieldPoint): keys = ... # type: Any children = ... # type: Any unfinished_children = ... # type: Any quiet_exceptions = ... # type: Any def __init__(self, children, quiet_exceptions=...) -> None: ... def start(self, runner): ... def is_ready(self): ... def get_result(self): ... def multi_future(children, quiet_exceptions=...): ... def maybe_future(x): ... def with_timeout(timeout, future, io_loop=..., quiet_exceptions=...): ... def sleep(duration): ... moment = ... # type: Any class Runner: gen = ... # type: Any result_future = ... # type: Any future = ... # type: Any yield_point = ... # type: Any pending_callbacks = ... # type: Any results = ... # type: Any running = ... # type: Any finished = ... # type: Any had_exception = ... # type: Any io_loop = ... # type: Any stack_context_deactivate = ... # type: Any def __init__(self, gen, result_future, first_yielded) -> None: ... def register_callback(self, key): ... def is_ready(self, key): ... def set_result(self, key, result): ... def pop_result(self, key): ... def run(self): ... def handle_yield(self, yielded): ... def result_callback(self, key): ... def handle_exception(self, typ, value, tb): ... Arguments = namedtuple('Arguments', ['args', 'kwargs']) def convert_yielded(yielded): ... mypy-0.560/typeshed/third_party/2/tornado/httpclient.pyi0000644€tŠÔÚ€2›s®0000001002513215007212027551 0ustar jukkaDROPBOX\Domain Users00000000000000from typing import Any from tornado.util import Configurable class HTTPClient: def __init__(self, async_client_class=..., **kwargs) -> None: ... def __del__(self): ... def close(self): ... def fetch(self, request, **kwargs): ... class AsyncHTTPClient(Configurable): @classmethod def configurable_base(cls): ... @classmethod def configurable_default(cls): ... def __new__(cls, io_loop=..., force_instance=..., **kwargs): ... io_loop = ... # type: Any defaults = ... # type: Any def initialize(self, io_loop, defaults=...): ... def close(self): ... def fetch(self, request, callback=..., raise_error=..., **kwargs): ... def fetch_impl(self, request, callback): ... @classmethod def configure(cls, impl, **kwargs): ... class HTTPRequest: headers = ... # type: Any proxy_host = ... # type: Any proxy_port = ... # type: Any proxy_username = ... # type: Any proxy_password = ... # type: Any url = ... # type: Any method = ... # type: Any body = ... # type: Any body_producer = ... # type: Any auth_username = ... # type: Any auth_password = ... # type: Any auth_mode = ... # type: Any connect_timeout = ... # type: Any request_timeout = ... # type: Any follow_redirects = ... # type: Any max_redirects = ... # type: Any user_agent = ... # type: Any decompress_response = ... # type: Any network_interface = ... # type: Any streaming_callback = ... # type: Any header_callback = ... # type: Any prepare_curl_callback = ... # type: Any allow_nonstandard_methods = ... # type: Any validate_cert = ... # type: Any ca_certs = ... # type: Any allow_ipv6 = ... # type: Any client_key = ... # type: Any client_cert = ... # type: Any ssl_options = ... # type: Any expect_100_continue = ... # type: Any start_time = ... # type: Any def __init__(self, url, method=..., headers=..., body=..., auth_username=..., auth_password=..., auth_mode=..., connect_timeout=..., request_timeout=..., if_modified_since=..., follow_redirects=..., max_redirects=..., user_agent=..., use_gzip=..., network_interface=..., streaming_callback=..., header_callback=..., prepare_curl_callback=..., proxy_host=..., proxy_port=..., proxy_username=..., proxy_password=..., allow_nonstandard_methods=..., validate_cert=..., ca_certs=..., allow_ipv6=..., client_key=..., client_cert=..., body_producer=..., expect_100_continue=..., decompress_response=..., ssl_options=...) -> None: ... @property def headers(self): ... @headers.setter def headers(self, value): ... @property def body(self): ... @body.setter def body(self, value): ... @property def body_producer(self): ... @body_producer.setter def body_producer(self, value): ... @property def streaming_callback(self): ... @streaming_callback.setter def streaming_callback(self, value): ... @property def header_callback(self): ... @header_callback.setter def header_callback(self, value): ... @property def prepare_curl_callback(self): ... @prepare_curl_callback.setter def prepare_curl_callback(self, value): ... class HTTPResponse: request = ... # type: Any code = ... # type: Any reason = ... # type: Any headers = ... # type: Any buffer = ... # type: Any effective_url = ... # type: Any error = ... # type: Any request_time = ... # type: Any time_info = ... # type: Any def __init__(self, request, code, headers=..., buffer=..., effective_url=..., error=..., request_time=..., time_info=..., reason=...) -> None: ... body = ... # type: Any def rethrow(self): ... class HTTPError(Exception): code = ... # type: Any response = ... # type: Any def __init__(self, code, message=..., response=...) -> None: ... class _RequestProxy: request = ... # type: Any defaults = ... # type: Any def __init__(self, request, defaults) -> None: ... def __getattr__(self, name): ... def main(): ... mypy-0.560/typeshed/third_party/2/tornado/httpserver.pyi0000644€tŠÔÚ€2›s®0000000323513215007212027606 0ustar jukkaDROPBOX\Domain Users00000000000000from typing import Any from tornado import httputil from tornado.tcpserver import TCPServer from tornado.util import Configurable class HTTPServer(TCPServer, Configurable, httputil.HTTPServerConnectionDelegate): def __init__(self, *args, **kwargs) -> None: ... request_callback = ... # type: Any no_keep_alive = ... # type: Any xheaders = ... # type: Any protocol = ... # type: Any conn_params = ... # type: Any def initialize(self, request_callback, no_keep_alive=..., io_loop=..., xheaders=..., ssl_options=..., protocol=..., decompress_request=..., chunk_size=..., max_header_size=..., idle_connection_timeout=..., body_timeout=..., max_body_size=..., max_buffer_size=...): ... @classmethod def configurable_base(cls): ... @classmethod def configurable_default(cls): ... def close_all_connections(self): ... def handle_stream(self, stream, address): ... def start_request(self, server_conn, request_conn): ... def on_close(self, server_conn): ... class _HTTPRequestContext: address = ... # type: Any protocol = ... # type: Any address_family = ... # type: Any remote_ip = ... # type: Any def __init__(self, stream, address, protocol) -> None: ... class _ServerRequestAdapter(httputil.HTTPMessageDelegate): server = ... # type: Any connection = ... # type: Any request = ... # type: Any delegate = ... # type: Any def __init__(self, server, server_conn, request_conn) -> None: ... def headers_received(self, start_line, headers): ... def data_received(self, chunk): ... def finish(self): ... def on_connection_close(self): ... HTTPRequest = ... # type: Any mypy-0.560/typeshed/third_party/2/tornado/httputil.pyi0000644€tŠÔÚ€2›s®0000000550113215007212027253 0ustar jukkaDROPBOX\Domain Users00000000000000from typing import Any from tornado.util import ObjectDict from collections import namedtuple class SSLError(Exception): ... class _NormalizedHeaderCache(dict): size = ... # type: Any queue = ... # type: Any def __init__(self, size) -> None: ... def __missing__(self, key): ... class HTTPHeaders(dict): def __init__(self, *args, **kwargs) -> None: ... def add(self, name, value): ... def get_list(self, name): ... def get_all(self): ... def parse_line(self, line): ... @classmethod def parse(cls, headers): ... def __setitem__(self, name, value): ... def __getitem__(self, name): ... def __delitem__(self, name): ... def __contains__(self, name): ... def get(self, name, default=...): ... def update(self, *args, **kwargs): ... def copy(self): ... __copy__ = ... # type: Any def __deepcopy__(self, memo_dict): ... class HTTPServerRequest: method = ... # type: Any uri = ... # type: Any version = ... # type: Any headers = ... # type: Any body = ... # type: Any remote_ip = ... # type: Any protocol = ... # type: Any host = ... # type: Any files = ... # type: Any connection = ... # type: Any arguments = ... # type: Any query_arguments = ... # type: Any body_arguments = ... # type: Any def __init__(self, method=..., uri=..., version=..., headers=..., body=..., host=..., files=..., connection=..., start_line=...) -> None: ... def supports_http_1_1(self): ... @property def cookies(self): ... def write(self, chunk, callback=...): ... def finish(self): ... def full_url(self): ... def request_time(self): ... def get_ssl_certificate(self, binary_form=...): ... class HTTPInputError(Exception): ... class HTTPOutputError(Exception): ... class HTTPServerConnectionDelegate: def start_request(self, server_conn, request_conn): ... def on_close(self, server_conn): ... class HTTPMessageDelegate: def headers_received(self, start_line, headers): ... def data_received(self, chunk): ... def finish(self): ... def on_connection_close(self): ... class HTTPConnection: def write_headers(self, start_line, headers, chunk=..., callback=...): ... def write(self, chunk, callback=...): ... def finish(self): ... def url_concat(url, args): ... class HTTPFile(ObjectDict): ... def parse_body_arguments(content_type, body, arguments, files, headers=...): ... def parse_multipart_form_data(boundary, data, arguments, files): ... def format_timestamp(ts): ... RequestStartLine = namedtuple('RequestStartLine', ['method', 'path', 'version']) def parse_request_start_line(line): ... ResponseStartLine = namedtuple('ResponseStartLine', ['version', 'code', 'reason']) def parse_response_start_line(line): ... def doctests(): ... def split_host_and_port(netloc): ... mypy-0.560/typeshed/third_party/2/tornado/ioloop.pyi0000644€tŠÔÚ€2›s®0000000562513215007212026706 0ustar jukkaDROPBOX\Domain Users00000000000000from typing import Any from tornado.util import Configurable signal = ... # type: Any class TimeoutError(Exception): ... class IOLoop(Configurable): NONE = ... # type: Any READ = ... # type: Any WRITE = ... # type: Any ERROR = ... # type: Any @staticmethod def instance(): ... @staticmethod def initialized(): ... def install(self): ... @staticmethod def clear_instance(): ... @staticmethod def current(instance=...): ... def make_current(self): ... @staticmethod def clear_current(): ... @classmethod def configurable_base(cls): ... @classmethod def configurable_default(cls): ... def initialize(self, make_current=...): ... def close(self, all_fds=...): ... def add_handler(self, fd, handler, events): ... def update_handler(self, fd, events): ... def remove_handler(self, fd): ... def set_blocking_signal_threshold(self, seconds, action): ... def set_blocking_log_threshold(self, seconds): ... def log_stack(self, signal, frame): ... def start(self): ... def stop(self): ... def run_sync(self, func, timeout=...): ... def time(self): ... def add_timeout(self, deadline, callback, *args, **kwargs): ... def call_later(self, delay, callback, *args, **kwargs): ... def call_at(self, when, callback, *args, **kwargs): ... def remove_timeout(self, timeout): ... def add_callback(self, callback, *args, **kwargs): ... def add_callback_from_signal(self, callback, *args, **kwargs): ... def spawn_callback(self, callback, *args, **kwargs): ... def add_future(self, future, callback): ... def handle_callback_exception(self, callback): ... def split_fd(self, fd): ... def close_fd(self, fd): ... class PollIOLoop(IOLoop): time_func = ... # type: Any def initialize(self, impl, time_func=..., **kwargs): ... def close(self, all_fds=...): ... def add_handler(self, fd, handler, events): ... def update_handler(self, fd, events): ... def remove_handler(self, fd): ... def set_blocking_signal_threshold(self, seconds, action): ... def start(self): ... def stop(self): ... def time(self): ... def call_at(self, deadline, callback, *args, **kwargs): ... def remove_timeout(self, timeout): ... def add_callback(self, callback, *args, **kwargs): ... def add_callback_from_signal(self, callback, *args, **kwargs): ... class _Timeout: deadline = ... # type: Any callback = ... # type: Any tiebreaker = ... # type: Any def __init__(self, deadline, callback, io_loop) -> None: ... def __lt__(self, other): ... def __le__(self, other): ... class PeriodicCallback: callback = ... # type: Any callback_time = ... # type: Any io_loop = ... # type: Any def __init__(self, callback, callback_time, io_loop=...) -> None: ... def start(self): ... def stop(self): ... def is_running(self): ... mypy-0.560/typeshed/third_party/2/tornado/locks.pyi0000644€tŠÔÚ€2›s®0000000230413215007212026507 0ustar jukkaDROPBOX\Domain Users00000000000000from typing import Any class _TimeoutGarbageCollector: def __init__(self): ... class Condition(_TimeoutGarbageCollector): io_loop = ... # type: Any def __init__(self): ... def wait(self, timeout=None): ... def notify(self, n=1): ... def notify_all(self): ... class Event: def __init__(self): ... def is_set(self): ... def set(self): ... def clear(self): ... def wait(self, timeout=None): ... class _ReleasingContextManager: def __init__(self, obj): ... def __enter__(self): ... def __exit__(self, exc_type, exc_val, exc_tb): ... class Semaphore(_TimeoutGarbageCollector): def __init__(self, value=1): ... def release(self): ... def acquire(self, timeout=None): ... def __enter__(self): ... __exit__ = ... # type: Any def __aenter__(self): ... def __aexit__(self, typ, value, tb): ... class BoundedSemaphore(Semaphore): def __init__(self, value=1): ... def release(self): ... class Lock: def __init__(self): ... def acquire(self, timeout=None): ... def release(self): ... def __enter__(self): ... __exit__ = ... # type: Any def __aenter__(self): ... def __aexit__(self, typ, value, tb): ... mypy-0.560/typeshed/third_party/2/tornado/netutil.pyi0000644€tŠÔÚ€2›s®0000000272113215007212027063 0ustar jukkaDROPBOX\Domain Users00000000000000from typing import Any from tornado.util import Configurable ssl = ... # type: Any certifi = ... # type: Any xrange = ... # type: Any ssl_match_hostname = ... # type: Any SSLCertificateError = ... # type: Any def bind_sockets(port, address=..., family=..., backlog=..., flags=...): ... def bind_unix_socket(file, mode=..., backlog=...): ... def add_accept_handler(sock, callback, io_loop=...): ... def is_valid_ip(ip): ... class Resolver(Configurable): @classmethod def configurable_base(cls): ... @classmethod def configurable_default(cls): ... def resolve(self, host, port, family=..., callback=...): ... def close(self): ... class ExecutorResolver(Resolver): io_loop = ... # type: Any executor = ... # type: Any close_executor = ... # type: Any def initialize(self, io_loop=..., executor=..., close_executor=...): ... def close(self): ... def resolve(self, host, port, family=...): ... class BlockingResolver(ExecutorResolver): def initialize(self, io_loop=...): ... class ThreadedResolver(ExecutorResolver): def initialize(self, io_loop=..., num_threads=...): ... class OverrideResolver(Resolver): resolver = ... # type: Any mapping = ... # type: Any def initialize(self, resolver, mapping): ... def close(self): ... def resolve(self, host, port, *args, **kwargs): ... def ssl_options_to_context(ssl_options): ... def ssl_wrap_socket(socket, ssl_options, server_hostname=..., **kwargs): ... mypy-0.560/typeshed/third_party/2/tornado/tcpserver.pyi0000644€tŠÔÚ€2›s®0000000116213215007212027412 0ustar jukkaDROPBOX\Domain Users00000000000000from typing import Any ssl = ... # type: Any class TCPServer: io_loop = ... # type: Any ssl_options = ... # type: Any max_buffer_size = ... # type: Any read_chunk_size = ... # type: Any def __init__(self, io_loop=..., ssl_options=..., max_buffer_size=..., read_chunk_size=...) -> None: ... def listen(self, port, address=...): ... def add_sockets(self, sockets): ... def add_socket(self, socket): ... def bind(self, port, address=..., family=..., backlog=...): ... def start(self, num_processes=...): ... def stop(self): ... def handle_stream(self, stream, address): ... mypy-0.560/typeshed/third_party/2/tornado/testing.pyi0000644€tŠÔÚ€2›s®0000000325413215007212027056 0ustar jukkaDROPBOX\Domain Users00000000000000from typing import Any import unittest import logging AsyncHTTPClient = ... # type: Any gen = ... # type: Any HTTPServer = ... # type: Any IOLoop = ... # type: Any netutil = ... # type: Any SimpleAsyncHTTPClient = ... # type: Any def get_unused_port(): ... def bind_unused_port(): ... class AsyncTestCase(unittest.TestCase): def __init__(self, *args, **kwargs): ... io_loop = ... # type: Any def setUp(self): ... def tearDown(self): ... def get_new_ioloop(self): ... def run(self, result=None): ... def stop(self, _arg=None, **kwargs): ... def wait(self, condition=None, timeout=5): ... class AsyncHTTPTestCase(AsyncTestCase): http_client = ... # type: Any http_server = ... # type: Any def setUp(self): ... def get_http_client(self): ... def get_http_server(self): ... def get_app(self): ... def fetch(self, path, **kwargs): ... def get_httpserver_options(self): ... def get_http_port(self): ... def get_protocol(self): ... def get_url(self, path): ... def tearDown(self): ... class AsyncHTTPSTestCase(AsyncHTTPTestCase): def get_http_client(self): ... def get_httpserver_options(self): ... def get_ssl_options(self): ... def get_protocol(self): ... def gen_test(f): ... class LogTrapTestCase(unittest.TestCase): def run(self, result=None): ... class ExpectLog(logging.Filter): logger = ... # type: Any regex = ... # type: Any required = ... # type: Any matched = ... # type: Any def __init__(self, logger, regex, required=True): ... def filter(self, record): ... def __enter__(self): ... def __exit__(self, typ, value, tb): ... def main(**kwargs): ... mypy-0.560/typeshed/third_party/2/tornado/util.pyi0000644€tŠÔÚ€2›s®0000000220213215007212026346 0ustar jukkaDROPBOX\Domain Users00000000000000from typing import Any xrange = ... # type: Any class ObjectDict(dict): def __getattr__(self, name): ... def __setattr__(self, name, value): ... class GzipDecompressor: decompressobj = ... # type: Any def __init__(self) -> None: ... def decompress(self, value, max_length=...): ... @property def unconsumed_tail(self): ... def flush(self): ... unicode_type = ... # type: Any basestring_type = ... # type: Any def import_object(name): ... bytes_type = ... # type: Any def errno_from_exception(e): ... class Configurable: def __new__(cls, *args, **kwargs): ... @classmethod def configurable_base(cls): ... @classmethod def configurable_default(cls): ... def initialize(self): ... @classmethod def configure(cls, impl, **kwargs): ... @classmethod def configured_class(cls): ... class ArgReplacer: name = ... # type: Any arg_pos = ... # type: Any def __init__(self, func, name) -> None: ... def get_old_value(self, args, kwargs, default=...): ... def replace(self, new_value, args, kwargs): ... def timedelta_to_seconds(td): ... def doctests(): ... mypy-0.560/typeshed/third_party/2/tornado/web.pyi0000644€tŠÔÚ€2›s®0000002233513215007212026157 0ustar jukkaDROPBOX\Domain Users00000000000000from typing import Any from tornado import httputil MIN_SUPPORTED_SIGNED_VALUE_VERSION = ... # type: Any MAX_SUPPORTED_SIGNED_VALUE_VERSION = ... # type: Any DEFAULT_SIGNED_VALUE_VERSION = ... # type: Any DEFAULT_SIGNED_VALUE_MIN_VERSION = ... # type: Any class RequestHandler: SUPPORTED_METHODS = ... # type: Any application = ... # type: Any request = ... # type: Any path_args = ... # type: Any path_kwargs = ... # type: Any ui = ... # type: Any def __init__(self, application, request, **kwargs) -> None: ... def initialize(self): ... @property def settings(self): ... def head(self, *args, **kwargs): ... def get(self, *args, **kwargs): ... def post(self, *args, **kwargs): ... def delete(self, *args, **kwargs): ... def patch(self, *args, **kwargs): ... def put(self, *args, **kwargs): ... def options(self, *args, **kwargs): ... def prepare(self): ... def on_finish(self): ... def on_connection_close(self): ... def clear(self): ... def set_default_headers(self): ... def set_status(self, status_code, reason=...): ... def get_status(self): ... def set_header(self, name, value): ... def add_header(self, name, value): ... def clear_header(self, name): ... def get_argument(self, name, default=..., strip=...): ... def get_arguments(self, name, strip=...): ... def get_body_argument(self, name, default=..., strip=...): ... def get_body_arguments(self, name, strip=...): ... def get_query_argument(self, name, default=..., strip=...): ... def get_query_arguments(self, name, strip=...): ... def decode_argument(self, value, name=...): ... @property def cookies(self): ... def get_cookie(self, name, default=...): ... def set_cookie(self, name, value, domain=..., expires=..., path=..., expires_days=..., **kwargs): ... def clear_cookie(self, name, path=..., domain=...): ... def clear_all_cookies(self, path=..., domain=...): ... def set_secure_cookie(self, name, value, expires_days=..., version=..., **kwargs): ... def create_signed_value(self, name, value, version=...): ... def get_secure_cookie(self, name, value=..., max_age_days=..., min_version=...): ... def get_secure_cookie_key_version(self, name, value=...): ... def redirect(self, url, permanent=..., status=...): ... def write(self, chunk): ... def render(self, template_name, **kwargs): ... def render_string(self, template_name, **kwargs): ... def get_template_namespace(self): ... def create_template_loader(self, template_path): ... def flush(self, include_footers=..., callback=...): ... def finish(self, chunk=...): ... def send_error(self, status_code=..., **kwargs): ... def write_error(self, status_code, **kwargs): ... @property def locale(self): ... @locale.setter def locale(self, value): ... def get_user_locale(self): ... def get_browser_locale(self, default=...): ... @property def current_user(self): ... @current_user.setter def current_user(self, value): ... def get_current_user(self): ... def get_login_url(self): ... def get_template_path(self): ... @property def xsrf_token(self): ... def check_xsrf_cookie(self): ... def xsrf_form_html(self): ... def static_url(self, path, include_host=..., **kwargs): ... def require_setting(self, name, feature=...): ... def reverse_url(self, name, *args): ... def compute_etag(self): ... def set_etag_header(self): ... def check_etag_header(self): ... def data_received(self, chunk): ... def log_exception(self, typ, value, tb): ... def asynchronous(method): ... def stream_request_body(cls): ... def removeslash(method): ... def addslash(method): ... class Application(httputil.HTTPServerConnectionDelegate): transforms = ... # type: Any handlers = ... # type: Any named_handlers = ... # type: Any default_host = ... # type: Any settings = ... # type: Any ui_modules = ... # type: Any ui_methods = ... # type: Any def __init__(self, handlers=..., default_host=..., transforms=..., **settings) -> None: ... def listen(self, port, address=..., **kwargs): ... def add_handlers(self, host_pattern, host_handlers): ... def add_transform(self, transform_class): ... def start_request(self, server_conn, request_conn): ... def __call__(self, request): ... def reverse_url(self, name, *args): ... def log_request(self, handler): ... class _RequestDispatcher(httputil.HTTPMessageDelegate): application = ... # type: Any connection = ... # type: Any request = ... # type: Any chunks = ... # type: Any handler_class = ... # type: Any handler_kwargs = ... # type: Any path_args = ... # type: Any path_kwargs = ... # type: Any def __init__(self, application, connection) -> None: ... def headers_received(self, start_line, headers): ... stream_request_body = ... # type: Any def set_request(self, request): ... def data_received(self, data): ... def finish(self): ... def on_connection_close(self): ... handler = ... # type: Any def execute(self): ... class HTTPError(Exception): status_code = ... # type: Any log_message = ... # type: Any args = ... # type: Any reason = ... # type: Any def __init__(self, status_code, log_message=..., *args, **kwargs) -> None: ... class Finish(Exception): ... class MissingArgumentError(HTTPError): arg_name = ... # type: Any def __init__(self, arg_name) -> None: ... class ErrorHandler(RequestHandler): def initialize(self, status_code): ... def prepare(self): ... def check_xsrf_cookie(self): ... class RedirectHandler(RequestHandler): def initialize(self, url, permanent=...): ... def get(self): ... class StaticFileHandler(RequestHandler): CACHE_MAX_AGE = ... # type: Any root = ... # type: Any default_filename = ... # type: Any def initialize(self, path, default_filename=...): ... @classmethod def reset(cls): ... def head(self, path): ... path = ... # type: Any absolute_path = ... # type: Any modified = ... # type: Any def get(self, path, include_body=...): ... def compute_etag(self): ... def set_headers(self): ... def should_return_304(self): ... @classmethod def get_absolute_path(cls, root, path): ... def validate_absolute_path(self, root, absolute_path): ... @classmethod def get_content(cls, abspath, start=..., end=...): ... @classmethod def get_content_version(cls, abspath): ... def get_content_size(self): ... def get_modified_time(self): ... def get_content_type(self): ... def set_extra_headers(self, path): ... def get_cache_time(self, path, modified, mime_type): ... @classmethod def make_static_url(cls, settings, path, include_version=...): ... def parse_url_path(self, url_path): ... @classmethod def get_version(cls, settings, path): ... class FallbackHandler(RequestHandler): fallback = ... # type: Any def initialize(self, fallback): ... def prepare(self): ... class OutputTransform: def __init__(self, request) -> None: ... def transform_first_chunk(self, status_code, headers, chunk, finishing): ... def transform_chunk(self, chunk, finishing): ... class GZipContentEncoding(OutputTransform): CONTENT_TYPES = ... # type: Any MIN_LENGTH = ... # type: Any def __init__(self, request) -> None: ... def transform_first_chunk(self, status_code, headers, chunk, finishing): ... def transform_chunk(self, chunk, finishing): ... def authenticated(method): ... class UIModule: handler = ... # type: Any request = ... # type: Any ui = ... # type: Any locale = ... # type: Any def __init__(self, handler) -> None: ... @property def current_user(self): ... def render(self, *args, **kwargs): ... def embedded_javascript(self): ... def javascript_files(self): ... def embedded_css(self): ... def css_files(self): ... def html_head(self): ... def html_body(self): ... def render_string(self, path, **kwargs): ... class _linkify(UIModule): def render(self, text, **kwargs): ... class _xsrf_form_html(UIModule): def render(self): ... class TemplateModule(UIModule): def __init__(self, handler) -> None: ... def render(self, path, **kwargs): ... def embedded_javascript(self): ... def javascript_files(self): ... def embedded_css(self): ... def css_files(self): ... def html_head(self): ... def html_body(self): ... class _UIModuleNamespace: handler = ... # type: Any ui_modules = ... # type: Any def __init__(self, handler, ui_modules) -> None: ... def __getitem__(self, key): ... def __getattr__(self, key): ... class URLSpec: regex = ... # type: Any handler_class = ... # type: Any kwargs = ... # type: Any name = ... # type: Any def __init__(self, pattern, handler, kwargs=..., name=...) -> None: ... def reverse(self, *args): ... url = ... # type: Any def create_signed_value(secret, name, value, version=..., clock=..., key_version=...): ... def decode_signed_value(secret, name, value, max_age_days=..., clock=..., min_version=...): ... def get_signature_key_version(value): ... mypy-0.560/typeshed/third_party/2/werkzeug/0000755€tŠÔÚ€2›s®0000000000013215007244025054 5ustar jukkaDROPBOX\Domain Users00000000000000mypy-0.560/typeshed/third_party/2/werkzeug/__init__.pyi0000644€tŠÔÚ€2›s®0000001266213215007212027340 0ustar jukkaDROPBOX\Domain Users00000000000000from types import ModuleType from typing import Any from werkzeug import _internal from werkzeug import datastructures from werkzeug import debug from werkzeug import exceptions from werkzeug import formparser from werkzeug import http from werkzeug import local from werkzeug import security from werkzeug import serving from werkzeug import test from werkzeug import testapp from werkzeug import urls from werkzeug import useragents from werkzeug import utils from werkzeug import wrappers from werkzeug import wsgi class module(ModuleType): def __getattr__(self, name): ... def __dir__(self): ... __version__ = ... # type: Any run_simple = serving.run_simple test_app = testapp.test_app UserAgent = useragents.UserAgent _easteregg = _internal._easteregg DebuggedApplication = debug.DebuggedApplication MultiDict = datastructures.MultiDict CombinedMultiDict = datastructures.CombinedMultiDict Headers = datastructures.Headers EnvironHeaders = datastructures.EnvironHeaders ImmutableList = datastructures.ImmutableList ImmutableDict = datastructures.ImmutableDict ImmutableMultiDict = datastructures.ImmutableMultiDict TypeConversionDict = datastructures.TypeConversionDict ImmutableTypeConversionDict = datastructures.ImmutableTypeConversionDict Accept = datastructures.Accept MIMEAccept = datastructures.MIMEAccept CharsetAccept = datastructures.CharsetAccept LanguageAccept = datastructures.LanguageAccept RequestCacheControl = datastructures.RequestCacheControl ResponseCacheControl = datastructures.ResponseCacheControl ETags = datastructures.ETags HeaderSet = datastructures.HeaderSet WWWAuthenticate = datastructures.WWWAuthenticate Authorization = datastructures.Authorization FileMultiDict = datastructures.FileMultiDict CallbackDict = datastructures.CallbackDict FileStorage = datastructures.FileStorage OrderedMultiDict = datastructures.OrderedMultiDict ImmutableOrderedMultiDict = datastructures.ImmutableOrderedMultiDict escape = utils.escape environ_property = utils.environ_property append_slash_redirect = utils.append_slash_redirect redirect = utils.redirect cached_property = utils.cached_property import_string = utils.import_string dump_cookie = http.dump_cookie parse_cookie = http.parse_cookie unescape = utils.unescape format_string = utils.format_string find_modules = utils.find_modules header_property = utils.header_property html = utils.html xhtml = utils.xhtml HTMLBuilder = utils.HTMLBuilder validate_arguments = utils.validate_arguments ArgumentValidationError = utils.ArgumentValidationError bind_arguments = utils.bind_arguments secure_filename = utils.secure_filename BaseResponse = wrappers.BaseResponse BaseRequest = wrappers.BaseRequest Request = wrappers.Request Response = wrappers.Response AcceptMixin = wrappers.AcceptMixin ETagRequestMixin = wrappers.ETagRequestMixin ETagResponseMixin = wrappers.ETagResponseMixin ResponseStreamMixin = wrappers.ResponseStreamMixin CommonResponseDescriptorsMixin = wrappers.CommonResponseDescriptorsMixin UserAgentMixin = wrappers.UserAgentMixin AuthorizationMixin = wrappers.AuthorizationMixin WWWAuthenticateMixin = wrappers.WWWAuthenticateMixin CommonRequestDescriptorsMixin = wrappers.CommonRequestDescriptorsMixin Local = local.Local LocalManager = local.LocalManager LocalProxy = local.LocalProxy LocalStack = local.LocalStack release_local = local.release_local generate_password_hash = security.generate_password_hash check_password_hash = security.check_password_hash Client = test.Client EnvironBuilder = test.EnvironBuilder create_environ = test.create_environ run_wsgi_app = test.run_wsgi_app get_current_url = wsgi.get_current_url get_host = wsgi.get_host pop_path_info = wsgi.pop_path_info peek_path_info = wsgi.peek_path_info SharedDataMiddleware = wsgi.SharedDataMiddleware DispatcherMiddleware = wsgi.DispatcherMiddleware ClosingIterator = wsgi.ClosingIterator FileWrapper = wsgi.FileWrapper make_line_iter = wsgi.make_line_iter LimitedStream = wsgi.LimitedStream responder = wsgi.responder wrap_file = wsgi.wrap_file extract_path_info = wsgi.extract_path_info parse_etags = http.parse_etags parse_date = http.parse_date http_date = http.http_date cookie_date = http.cookie_date parse_cache_control_header = http.parse_cache_control_header is_resource_modified = http.is_resource_modified parse_accept_header = http.parse_accept_header parse_set_header = http.parse_set_header quote_etag = http.quote_etag unquote_etag = http.unquote_etag generate_etag = http.generate_etag dump_header = http.dump_header parse_list_header = http.parse_list_header parse_dict_header = http.parse_dict_header parse_authorization_header = http.parse_authorization_header parse_www_authenticate_header = http.parse_www_authenticate_header remove_entity_headers = http.remove_entity_headers is_entity_header = http.is_entity_header remove_hop_by_hop_headers = http.remove_hop_by_hop_headers parse_options_header = http.parse_options_header dump_options_header = http.dump_options_header is_hop_by_hop_header = http.is_hop_by_hop_header unquote_header_value = http.unquote_header_value quote_header_value = http.quote_header_value HTTP_STATUS_CODES = http.HTTP_STATUS_CODES url_decode = urls.url_decode url_encode = urls.url_encode url_quote = urls.url_quote url_quote_plus = urls.url_quote_plus url_unquote = urls.url_unquote url_unquote_plus = urls.url_unquote_plus url_fix = urls.url_fix Href = urls.Href iri_to_uri = urls.iri_to_uri uri_to_iri = urls.uri_to_iri parse_form_data = formparser.parse_form_data abort = exceptions.Aborter Aborter = exceptions.Aborter mypy-0.560/typeshed/third_party/2/werkzeug/_compat.pyi0000644€tŠÔÚ€2›s®0000000224713215007212027221 0ustar jukkaDROPBOX\Domain Users00000000000000from typing import Any import StringIO as BytesIO PY2 = ... # type: Any WIN = ... # type: Any unichr = ... # type: Any text_type = ... # type: Any string_types = ... # type: Any integer_types = ... # type: Any iterkeys = ... # type: Any itervalues = ... # type: Any iteritems = ... # type: Any iterlists = ... # type: Any iterlistvalues = ... # type: Any int_to_byte = ... # type: Any iter_bytes = ... # type: Any def fix_tuple_repr(obj): ... def implements_iterator(cls): ... def implements_to_string(cls): ... def native_string_result(func): ... def implements_bool(cls): ... range_type = ... # type: Any NativeStringIO = ... # type: Any def make_literal_wrapper(reference): ... def normalize_string_tuple(tup): ... def try_coerce_native(s): ... wsgi_get_bytes = ... # type: Any def wsgi_decoding_dance(s, charset='', errors=''): ... def wsgi_encoding_dance(s, charset='', errors=''): ... def to_bytes(x, charset=..., errors=''): ... def to_native(x, charset=..., errors=''): ... def reraise(tp, value, tb=None): ... imap = ... # type: Any izip = ... # type: Any ifilter = ... # type: Any def to_unicode(x, charset=..., errors='', allow_none_charset=False): ... mypy-0.560/typeshed/third_party/2/werkzeug/_internal.pyi0000644€tŠÔÚ€2›s®0000000103713215007212027546 0ustar jukkaDROPBOX\Domain Users00000000000000from typing import Any class _Missing: def __reduce__(self): ... class _DictAccessorProperty: read_only = ... # type: Any name = ... # type: Any default = ... # type: Any load_func = ... # type: Any dump_func = ... # type: Any __doc__ = ... # type: Any def __init__(self, name, default=None, load_func=None, dump_func=None, read_only=None, doc=None): ... def __get__(self, obj, type=None): ... def __set__(self, obj, value): ... def __delete__(self, obj): ... def _easteregg(app=None): ... mypy-0.560/typeshed/third_party/2/werkzeug/_reloader.pyi0000644€tŠÔÚ€2›s®0000000157413215007212027535 0ustar jukkaDROPBOX\Domain Users00000000000000from typing import Any class ReloaderLoop: name = ... # type: Any extra_files = ... # type: Any interval = ... # type: Any def __init__(self, extra_files=None, interval=1): ... def run(self): ... def restart_with_reloader(self): ... def trigger_reload(self, filename): ... def log_reload(self, filename): ... class StatReloaderLoop(ReloaderLoop): name = ... # type: Any def run(self): ... class WatchdogReloaderLoop(ReloaderLoop): observable_paths = ... # type: Any name = ... # type: Any observer_class = ... # type: Any event_handler = ... # type: Any should_reload = ... # type: Any def __init__(self, *args, **kwargs): ... def trigger_reload(self, filename): ... def run(self): ... reloader_loops = ... # type: Any def run_with_reloader(main_func, extra_files=None, interval=1, reloader_type=''): ... mypy-0.560/typeshed/third_party/2/werkzeug/contrib/0000755€tŠÔÚ€2›s®0000000000013215007244026514 5ustar jukkaDROPBOX\Domain Users00000000000000mypy-0.560/typeshed/third_party/2/werkzeug/contrib/__init__.pyi0000644€tŠÔÚ€2›s®0000000000013215007212030757 0ustar jukkaDROPBOX\Domain Users00000000000000mypy-0.560/typeshed/third_party/2/werkzeug/contrib/atom.pyi0000644€tŠÔÚ€2›s®0000000274413215007212030201 0ustar jukkaDROPBOX\Domain Users00000000000000from typing import Any XHTML_NAMESPACE = ... # type: Any def format_iso8601(obj): ... class AtomFeed: default_generator = ... # type: Any title = ... # type: Any title_type = ... # type: Any url = ... # type: Any feed_url = ... # type: Any id = ... # type: Any updated = ... # type: Any author = ... # type: Any icon = ... # type: Any logo = ... # type: Any rights = ... # type: Any rights_type = ... # type: Any subtitle = ... # type: Any subtitle_type = ... # type: Any generator = ... # type: Any links = ... # type: Any entries = ... # type: Any def __init__(self, title=None, entries=None, **kwargs): ... def add(self, *args, **kwargs): ... def generate(self): ... def to_string(self): ... def get_response(self): ... def __call__(self, environ, start_response): ... class FeedEntry: title = ... # type: Any title_type = ... # type: Any content = ... # type: Any content_type = ... # type: Any url = ... # type: Any id = ... # type: Any updated = ... # type: Any summary = ... # type: Any summary_type = ... # type: Any author = ... # type: Any published = ... # type: Any rights = ... # type: Any links = ... # type: Any categories = ... # type: Any xml_base = ... # type: Any def __init__(self, title=None, content=None, feed_url=None, **kwargs): ... def generate(self): ... def to_string(self): ... mypy-0.560/typeshed/third_party/2/werkzeug/contrib/cache.pyi0000644€tŠÔÚ€2›s®0000000504413215007212030300 0ustar jukkaDROPBOX\Domain Users00000000000000from typing import Any class BaseCache: default_timeout = ... # type: Any def __init__(self, default_timeout=300): ... def get(self, key): ... def delete(self, key): ... def get_many(self, *keys): ... def get_dict(self, *keys): ... def set(self, key, value, timeout=None): ... def add(self, key, value, timeout=None): ... def set_many(self, mapping, timeout=None): ... def delete_many(self, *keys): ... def has(self, key): ... def clear(self): ... def inc(self, key, delta=1): ... def dec(self, key, delta=1): ... class NullCache(BaseCache): ... class SimpleCache(BaseCache): clear = ... # type: Any def __init__(self, threshold=500, default_timeout=300): ... def get(self, key): ... def set(self, key, value, timeout=None): ... def add(self, key, value, timeout=None): ... def delete(self, key): ... def has(self, key): ... class MemcachedCache(BaseCache): key_prefix = ... # type: Any def __init__(self, servers=None, default_timeout=300, key_prefix=None): ... def get(self, key): ... def get_dict(self, *keys): ... def add(self, key, value, timeout=None): ... def set(self, key, value, timeout=None): ... def get_many(self, *keys): ... def set_many(self, mapping, timeout=None): ... def delete(self, key): ... def delete_many(self, *keys): ... def has(self, key): ... def clear(self): ... def inc(self, key, delta=1): ... def dec(self, key, delta=1): ... def import_preferred_memcache_lib(self, servers): ... GAEMemcachedCache = ... # type: Any class RedisCache(BaseCache): key_prefix = ... # type: Any def __init__(self, host='', port=6379, password=None, db=0, default_timeout=300, key_prefix=None, **kwargs): ... def dump_object(self, value): ... def load_object(self, value): ... def get(self, key): ... def get_many(self, *keys): ... def set(self, key, value, timeout=None): ... def add(self, key, value, timeout=None): ... def set_many(self, mapping, timeout=None): ... def delete(self, key): ... def delete_many(self, *keys): ... def has(self, key): ... def clear(self): ... def inc(self, key, delta=1): ... def dec(self, key, delta=1): ... class FileSystemCache(BaseCache): def __init__(self, cache_dir, threshold=500, default_timeout=300, mode=384): ... def clear(self): ... def get(self, key): ... def add(self, key, value, timeout=None): ... def set(self, key, value, timeout=None): ... def delete(self, key): ... def has(self, key): ... mypy-0.560/typeshed/third_party/2/werkzeug/contrib/fixers.pyi0000644€tŠÔÚ€2›s®0000000230613215007212030533 0ustar jukkaDROPBOX\Domain Users00000000000000from typing import Any class CGIRootFix: app = ... # type: Any app_root = ... # type: Any def __init__(self, app, app_root=''): ... def __call__(self, environ, start_response): ... LighttpdCGIRootFix = ... # type: Any class PathInfoFromRequestUriFix: app = ... # type: Any def __init__(self, app): ... def __call__(self, environ, start_response): ... class ProxyFix: app = ... # type: Any num_proxies = ... # type: Any def __init__(self, app, num_proxies=1): ... def get_remote_addr(self, forwarded_for): ... def __call__(self, environ, start_response): ... class HeaderRewriterFix: app = ... # type: Any remove_headers = ... # type: Any add_headers = ... # type: Any def __init__(self, app, remove_headers=None, add_headers=None): ... def __call__(self, environ, start_response): ... class InternetExplorerFix: app = ... # type: Any fix_vary = ... # type: Any fix_attach = ... # type: Any def __init__(self, app, fix_vary=True, fix_attach=True): ... def fix_headers(self, environ, headers, status=None): ... def run_fixed(self, environ, start_response): ... def __call__(self, environ, start_response): ... mypy-0.560/typeshed/third_party/2/werkzeug/contrib/iterio.pyi0000644€tŠÔÚ€2›s®0000000204613215007212030527 0ustar jukkaDROPBOX\Domain Users00000000000000from typing import Any greenlet = ... # type: Any class IterIO: def __new__(cls, obj, sentinel=''): ... def __iter__(self): ... def tell(self): ... def isatty(self): ... def seek(self, pos, mode=0): ... def truncate(self, size=None): ... def write(self, s): ... def writelines(self, list): ... def read(self, n=-1): ... def readlines(self, sizehint=0): ... def readline(self, length=None): ... def flush(self): ... def __next__(self): ... class IterI(IterIO): def __new__(cls, func, sentinel=''): ... closed = ... # type: Any def close(self): ... def write(self, s): ... def writelines(self, list): ... def flush(self): ... class IterO(IterIO): sentinel = ... # type: Any closed = ... # type: Any pos = ... # type: Any def __new__(cls, gen, sentinel=''): ... def __iter__(self): ... def close(self): ... def seek(self, pos, mode=0): ... def read(self, n=-1): ... def readline(self, length=None): ... def readlines(self, sizehint=0): ... mypy-0.560/typeshed/third_party/2/werkzeug/contrib/jsrouting.pyi0000644€tŠÔÚ€2›s®0000000047313215007212031262 0ustar jukkaDROPBOX\Domain Users00000000000000from typing import Any def dumps(*args): ... def render_template(name_parts, rules, converters): ... def generate_map(map, name=''): ... def generate_adapter(adapter, name='', map_name=''): ... def js_to_url_function(converter): ... def NumberConverter_js_to_url(conv): ... js_to_url_functions = ... # type: Any mypy-0.560/typeshed/third_party/2/werkzeug/contrib/limiter.pyi0000644€tŠÔÚ€2›s®0000000033413215007212030677 0ustar jukkaDROPBOX\Domain Users00000000000000from typing import Any class StreamLimitMiddleware: app = ... # type: Any maximum_size = ... # type: Any def __init__(self, app, maximum_size=...): ... def __call__(self, environ, start_response): ... mypy-0.560/typeshed/third_party/2/werkzeug/contrib/lint.pyi0000644€tŠÔÚ€2›s®0000000223713215007212030204 0ustar jukkaDROPBOX\Domain Users00000000000000from typing import Any class WSGIWarning(Warning): ... class HTTPWarning(Warning): ... def check_string(context, obj, stacklevel=3): ... class InputStream: def __init__(self, stream): ... def read(self, *args): ... def readline(self, *args): ... def __iter__(self): ... def close(self): ... class ErrorStream: def __init__(self, stream): ... def write(self, s): ... def flush(self): ... def writelines(self, seq): ... def close(self): ... class GuardedWrite: def __init__(self, write, chunks): ... def __call__(self, s): ... class GuardedIterator: closed = ... # type: Any headers_set = ... # type: Any chunks = ... # type: Any def __init__(self, iterator, headers_set, chunks): ... def __iter__(self): ... def next(self): ... def close(self): ... def __del__(self): ... class LintMiddleware: app = ... # type: Any def __init__(self, app): ... def check_environ(self, environ): ... def check_start_response(self, status, headers, exc_info): ... def check_headers(self, headers): ... def check_iterator(self, app_iter): ... def __call__(self, *args, **kwargs): ... mypy-0.560/typeshed/third_party/2/werkzeug/contrib/profiler.pyi0000644€tŠÔÚ€2›s®0000000074013215007212031055 0ustar jukkaDROPBOX\Domain Users00000000000000from typing import Any available = ... # type: Any class MergeStream: streams = ... # type: Any def __init__(self, *streams): ... def write(self, data): ... class ProfilerMiddleware: def __init__(self, app, stream=None, sort_by=..., restrictions=..., profile_dir=None): ... def __call__(self, environ, start_response): ... def make_action(app_factory, hostname='', port=5000, threaded=False, processes=1, stream=None, sort_by=..., restrictions=...): ... mypy-0.560/typeshed/third_party/2/werkzeug/contrib/securecookie.pyi0000644€tŠÔÚ€2›s®0000000172313215007212031715 0ustar jukkaDROPBOX\Domain Users00000000000000from typing import Any from hmac import new as hmac from hashlib import sha1 as _default_hash from werkzeug.contrib.sessions import ModificationTrackingDict class UnquoteError(Exception): ... class SecureCookie(ModificationTrackingDict): hash_method = ... # type: Any serialization_method = ... # type: Any quote_base64 = ... # type: Any secret_key = ... # type: Any new = ... # type: Any def __init__(self, data=None, secret_key=None, new=True): ... @property def should_save(self): ... @classmethod def quote(cls, value): ... @classmethod def unquote(cls, value): ... def serialize(self, expires=None): ... @classmethod def unserialize(cls, string, secret_key): ... @classmethod def load_cookie(cls, request, key='', secret_key=None): ... def save_cookie(self, response, key='', expires=None, session_expires=None, max_age=None, path='', domain=None, secure=None, httponly=False, force=False): ... mypy-0.560/typeshed/third_party/2/werkzeug/contrib/sessions.pyi0000644€tŠÔÚ€2›s®0000000354513215007212031107 0ustar jukkaDROPBOX\Domain Users00000000000000from typing import Any from werkzeug.datastructures import CallbackDict def generate_key(salt=None): ... class ModificationTrackingDict(CallbackDict): modified = ... # type: Any def __init__(self, *args, **kwargs): ... def copy(self): ... def __copy__(self): ... class Session(ModificationTrackingDict): sid = ... # type: Any new = ... # type: Any def __init__(self, data, sid, new=False): ... @property def should_save(self): ... class SessionStore: session_class = ... # type: Any def __init__(self, session_class=None): ... def is_valid_key(self, key): ... def generate_key(self, salt=None): ... def new(self): ... def save(self, session): ... def save_if_modified(self, session): ... def delete(self, session): ... def get(self, sid): ... class FilesystemSessionStore(SessionStore): path = ... # type: Any filename_template = ... # type: Any renew_missing = ... # type: Any mode = ... # type: Any def __init__(self, path=None, filename_template='', session_class=None, renew_missing=False, mode=420): ... def get_session_filename(self, sid): ... def save(self, session): ... def delete(self, session): ... def get(self, sid): ... def list(self): ... class SessionMiddleware: app = ... # type: Any store = ... # type: Any cookie_name = ... # type: Any cookie_age = ... # type: Any cookie_expires = ... # type: Any cookie_path = ... # type: Any cookie_domain = ... # type: Any cookie_secure = ... # type: Any cookie_httponly = ... # type: Any environ_key = ... # type: Any def __init__(self, app, store, cookie_name='', cookie_age=None, cookie_expires=None, cookie_path='', cookie_domain=None, cookie_secure=None, cookie_httponly=False, environ_key=''): ... def __call__(self, environ, start_response): ... mypy-0.560/typeshed/third_party/2/werkzeug/contrib/testtools.pyi0000644€tŠÔÚ€2›s®0000000032313215007212031270 0ustar jukkaDROPBOX\Domain Users00000000000000from typing import Any from werkzeug.wrappers import Response class ContentAccessors: def xml(self): ... def lxml(self): ... def json(self): ... class TestResponse(Response, ContentAccessors): ... mypy-0.560/typeshed/third_party/2/werkzeug/contrib/wrappers.pyi0000644€tŠÔÚ€2›s®0000000125713215007212031102 0ustar jukkaDROPBOX\Domain Users00000000000000from typing import Any def is_known_charset(charset): ... class JSONRequestMixin: def json(self): ... class ProtobufRequestMixin: protobuf_check_initialization = ... # type: Any def parse_protobuf(self, proto_type): ... class RoutingArgsRequestMixin: routing_args = ... # type: Any routing_vars = ... # type: Any class ReverseSlashBehaviorRequestMixin: def path(self): ... def script_root(self): ... class DynamicCharsetRequestMixin: default_charset = ... # type: Any def unknown_charset(self, charset): ... def charset(self): ... class DynamicCharsetResponseMixin: default_charset = ... # type: Any charset = ... # type: Any mypy-0.560/typeshed/third_party/2/werkzeug/datastructures.pyi0000644€tŠÔÚ€2›s®0000003042113215007212030647 0ustar jukkaDROPBOX\Domain Users00000000000000from typing import Any from collections import Container, Iterable, Mapping, MutableSet def is_immutable(self): ... def iter_multi_items(mapping): ... def native_itermethods(names): ... class ImmutableListMixin: def __hash__(self): ... def __reduce_ex__(self, protocol): ... def __delitem__(self, key): ... def __delslice__(self, i, j): ... def __iadd__(self, other): ... __imul__ = ... # type: Any def __setitem__(self, key, value): ... def __setslice__(self, i, j, value): ... def append(self, item): ... remove = ... # type: Any def extend(self, iterable): ... def insert(self, pos, value): ... def pop(self, index=-1): ... def reverse(self): ... def sort(self, cmp=None, key=None, reverse=None): ... class ImmutableList(ImmutableListMixin, list): ... class ImmutableDictMixin: @classmethod def fromkeys(cls, *args, **kwargs): ... def __reduce_ex__(self, protocol): ... def __hash__(self): ... def setdefault(self, key, default=None): ... def update(self, *args, **kwargs): ... def pop(self, key, default=None): ... def popitem(self): ... def __setitem__(self, key, value): ... def __delitem__(self, key): ... def clear(self): ... class ImmutableMultiDictMixin(ImmutableDictMixin): def __reduce_ex__(self, protocol): ... def add(self, key, value): ... def popitemlist(self): ... def poplist(self, key): ... def setlist(self, key, new_list): ... def setlistdefault(self, key, default_list=None): ... class UpdateDictMixin: on_update = ... # type: Any def calls_update(name): ... def setdefault(self, key, default=None): ... def pop(self, key, default=...): ... __setitem__ = ... # type: Any __delitem__ = ... # type: Any clear = ... # type: Any popitem = ... # type: Any update = ... # type: Any class TypeConversionDict(dict): def get(self, key, default=None, type=None): ... class ImmutableTypeConversionDict(ImmutableDictMixin, TypeConversionDict): def copy(self): ... def __copy__(self): ... class ViewItems: def __init__(self, multi_dict, method, repr_name, *a, **kw): ... def __iter__(self): ... class MultiDict(TypeConversionDict): def __init__(self, mapping=None): ... def __getitem__(self, key): ... def __setitem__(self, key, value): ... def add(self, key, value): ... def getlist(self, key, type=None): ... def setlist(self, key, new_list): ... def setdefault(self, key, default=None): ... def setlistdefault(self, key, default_list=None): ... def items(self, multi=False): ... def lists(self): ... def keys(self): ... __iter__ = ... # type: Any def values(self): ... def listvalues(self): ... def copy(self): ... def deepcopy(self, memo=None): ... def to_dict(self, flat=True): ... def update(self, other_dict): ... def pop(self, key, default=...): ... def popitem(self): ... def poplist(self, key): ... def popitemlist(self): ... def __copy__(self): ... def __deepcopy__(self, memo): ... class _omd_bucket: prev = ... # type: Any key = ... # type: Any value = ... # type: Any next = ... # type: Any def __init__(self, omd, key, value): ... def unlink(self, omd): ... class OrderedMultiDict(MultiDict): def __init__(self, mapping=None): ... def __eq__(self, other): ... def __ne__(self, other): ... def __reduce_ex__(self, protocol): ... def __getitem__(self, key): ... def __setitem__(self, key, value): ... def __delitem__(self, key): ... def keys(self): ... __iter__ = ... # type: Any def values(self): ... def items(self, multi=False): ... def lists(self): ... def listvalues(self): ... def add(self, key, value): ... def getlist(self, key, type=None): ... def setlist(self, key, new_list): ... def setlistdefault(self, key, default_list=None): ... def update(self, mapping): ... def poplist(self, key): ... def pop(self, key, default=...): ... def popitem(self): ... def popitemlist(self): ... class Headers(Mapping): def __init__(self, defaults=None): ... def __getitem__(self, key, _get_mode=False): ... def __eq__(self, other): ... def __ne__(self, other): ... def get(self, key, default=None, type=None, as_bytes=False): ... def getlist(self, key, type=None, as_bytes=False): ... def get_all(self, name): ... def items(self, lower=False): ... def keys(self, lower=False): ... def values(self): ... def extend(self, iterable): ... def __delitem__(self, key, _index_operation=True): ... def remove(self, key): ... def pop(self, **kwargs): ... def popitem(self): ... def __contains__(self, key): ... has_key = ... # type: Any def __iter__(self): ... def __len__(self): ... def add(self, _key, _value, **kw): ... def add_header(self, _key, _value, **_kw): ... def clear(self): ... def set(self, _key, _value, **kw): ... def setdefault(self, key, value): ... def __setitem__(self, key, value): ... def to_list(self, charset=''): ... def to_wsgi_list(self): ... def copy(self): ... def __copy__(self): ... class ImmutableHeadersMixin: def __delitem__(self, key, _index_operation=True): ... # FIXME: This is invalid but works around https://github.com/pallets/werkzeug/issues/1051 def __setitem__(self, key, value): ... set = ... # type: Any def add(self, *args, **kwargs): ... remove = ... # type: Any add_header = ... # type: Any def extend(self, iterable): ... def insert(self, pos, value): ... def pop(self, **kwargs): ... def popitem(self): ... def setdefault(self, key, default): ... class EnvironHeaders(ImmutableHeadersMixin, Headers): environ = ... # type: Any def __init__(self, environ): ... def __eq__(self, other): ... def __getitem__(self, key, _get_mode=False): ... def __len__(self): ... def __iter__(self): ... def copy(self): ... class CombinedMultiDict(ImmutableMultiDictMixin, MultiDict): def __reduce_ex__(self, protocol): ... dicts = ... # type: Any def __init__(self, dicts=None): ... @classmethod def fromkeys(cls): ... def __getitem__(self, key): ... def get(self, key, default=None, type=None): ... def getlist(self, key, type=None): ... def keys(self): ... __iter__ = ... # type: Any def items(self, multi=False): ... def values(self): ... def lists(self): ... def listvalues(self): ... def copy(self): ... def to_dict(self, flat=True): ... def __len__(self): ... def __contains__(self, key): ... has_key = ... # type: Any class FileMultiDict(MultiDict): def add_file(self, name, file, filename=None, content_type=None): ... class ImmutableDict(ImmutableDictMixin, dict): def copy(self): ... def __copy__(self): ... class ImmutableMultiDict(ImmutableMultiDictMixin, MultiDict): def copy(self): ... def __copy__(self): ... class ImmutableOrderedMultiDict(ImmutableMultiDictMixin, OrderedMultiDict): def copy(self): ... def __copy__(self): ... class Accept(ImmutableList): provided = ... # type: Any def __init__(self, values=...): ... def __getitem__(self, key): ... def quality(self, key): ... def __contains__(self, value): ... def index(self, key): ... def find(self, key): ... def values(self): ... def to_header(self): ... def best_match(self, matches, default=None): ... @property def best(self): ... class MIMEAccept(Accept): @property def accept_html(self): ... @property def accept_xhtml(self): ... @property def accept_json(self): ... class LanguageAccept(Accept): ... class CharsetAccept(Accept): ... def cache_property(key, empty, type): ... class _CacheControl(UpdateDictMixin, dict): no_cache = ... # type: Any no_store = ... # type: Any max_age = ... # type: Any no_transform = ... # type: Any on_update = ... # type: Any provided = ... # type: Any def __init__(self, values=..., on_update=None): ... def to_header(self): ... class RequestCacheControl(ImmutableDictMixin, _CacheControl): max_stale = ... # type: Any min_fresh = ... # type: Any no_transform = ... # type: Any only_if_cached = ... # type: Any class ResponseCacheControl(_CacheControl): public = ... # type: Any private = ... # type: Any must_revalidate = ... # type: Any proxy_revalidate = ... # type: Any s_maxage = ... # type: Any class CallbackDict(UpdateDictMixin, dict): on_update = ... # type: Any def __init__(self, initial=None, on_update=None): ... class HeaderSet(MutableSet): on_update = ... # type: Any def __init__(self, headers=None, on_update=None): ... def add(self, header): ... def remove(self, header): ... def update(self, iterable): ... def discard(self, header): ... def find(self, header): ... def index(self, header): ... def clear(self): ... def as_set(self, preserve_casing=False): ... def to_header(self): ... def __getitem__(self, idx): ... def __delitem__(self, idx): ... def __setitem__(self, idx, value): ... def __contains__(self, header): ... def __len__(self): ... def __iter__(self): ... def __nonzero__(self): ... class ETags(Container, Iterable): star_tag = ... # type: Any def __init__(self, strong_etags=None, weak_etags=None, star_tag=False): ... def as_set(self, include_weak=False): ... def is_weak(self, etag): ... def contains_weak(self, etag): ... def contains(self, etag): ... def contains_raw(self, etag): ... def to_header(self): ... def __call__(self, etag=None, data=None, include_weak=False): ... def __bool__(self): ... __nonzero__ = ... # type: Any def __iter__(self): ... def __contains__(self, etag): ... class IfRange: etag = ... # type: Any date = ... # type: Any def __init__(self, etag=None, date=None): ... def to_header(self): ... class Range: units = ... # type: Any ranges = ... # type: Any def __init__(self, units, ranges): ... def range_for_length(self, length): ... def make_content_range(self, length): ... def to_header(self): ... def to_content_range_header(self, length): ... class ContentRange: on_update = ... # type: Any def __init__(self, units, start, stop, length=None, on_update=None): ... units = ... # type: Any start = ... # type: Any stop = ... # type: Any length = ... # type: Any def set(self, start, stop, length=None, units=''): ... def unset(self): ... def to_header(self): ... def __nonzero__(self): ... __bool__ = ... # type: Any class Authorization(ImmutableDictMixin, dict): type = ... # type: Any def __init__(self, auth_type, data=None): ... username = ... # type: Any password = ... # type: Any realm = ... # type: Any nonce = ... # type: Any uri = ... # type: Any nc = ... # type: Any cnonce = ... # type: Any response = ... # type: Any opaque = ... # type: Any @property def qop(self): ... class WWWAuthenticate(UpdateDictMixin, dict): on_update = ... # type: Any def __init__(self, auth_type=None, values=None, on_update=None): ... def set_basic(self, realm=''): ... def set_digest(self, realm, nonce, qop=..., opaque=None, algorithm=None, stale=False): ... def to_header(self): ... @staticmethod def auth_property(name, doc=None): ... type = ... # type: Any realm = ... # type: Any domain = ... # type: Any nonce = ... # type: Any opaque = ... # type: Any algorithm = ... # type: Any qop = ... # type: Any stale = ... # type: Any class FileStorage: name = ... # type: Any stream = ... # type: Any filename = ... # type: Any headers = ... # type: Any def __init__(self, stream=None, filename=None, name=None, content_type=None, content_length=None, headers=None): ... @property def content_type(self): ... @property def content_length(self): ... @property def mimetype(self): ... @property def mimetype_params(self): ... def save(self, dst, buffer_size=16384): ... def close(self): ... def __nonzero__(self): ... __bool__ = ... # type: Any def __getattr__(self, name): ... def __iter__(self): ... mypy-0.560/typeshed/third_party/2/werkzeug/debug/0000755€tŠÔÚ€2›s®0000000000013215007244026142 5ustar jukkaDROPBOX\Domain Users00000000000000mypy-0.560/typeshed/third_party/2/werkzeug/debug/__init__.pyi0000644€tŠÔÚ€2›s®0000000251113215007212030416 0ustar jukkaDROPBOX\Domain Users00000000000000from typing import Any from werkzeug.wrappers import BaseRequest as Request, BaseResponse as Response PIN_TIME = ... # type: Any class _ConsoleFrame: console = ... # type: Any id = ... # type: Any def __init__(self, namespace): ... def get_pin_and_cookie_name(app): ... class DebuggedApplication: app = ... # type: Any evalex = ... # type: Any frames = ... # type: Any tracebacks = ... # type: Any request_key = ... # type: Any console_path = ... # type: Any console_init_func = ... # type: Any show_hidden_frames = ... # type: Any secret = ... # type: Any pin_logging = ... # type: Any pin = ... # type: Any def __init__(self, app, evalex=False, request_key='', console_path='', console_init_func=None, show_hidden_frames=False, lodgeit_url=None, pin_security=True, pin_logging=True): ... @property def pin_cookie_name(self): ... def debug_application(self, environ, start_response): ... def execute_command(self, request, command, frame): ... def display_console(self, request): ... def paste_traceback(self, request, traceback): ... def get_resource(self, request, filename): ... def is_trusted(self, environ): ... def pin_auth(self, request): ... def log_pin_request(self): ... def __call__(self, environ, start_response): ... mypy-0.560/typeshed/third_party/2/werkzeug/debug/console.pyi0000644€tŠÔÚ€2›s®0000000223613215007212030325 0ustar jukkaDROPBOX\Domain Users00000000000000from typing import Any import code class HTMLStringO: def __init__(self): ... def isatty(self): ... def close(self): ... def flush(self): ... def seek(self, n, mode=0): ... def readline(self): ... def reset(self): ... def write(self, x): ... def writelines(self, x): ... class ThreadedStream: @staticmethod def push(): ... @staticmethod def fetch(): ... @staticmethod def displayhook(obj): ... def __setattr__(self, name, value): ... def __dir__(self): ... def __getattribute__(self, name): ... class _ConsoleLoader: def __init__(self): ... def register(self, code, source): ... def get_source_by_code(self, code): ... class _InteractiveConsole(code.InteractiveInterpreter): globals = ... # type: Any more = ... # type: Any buffer = ... # type: Any def __init__(self, globals, locals): ... def runsource(self, source): ... def runcode(self, code): ... def showtraceback(self): ... def showsyntaxerror(self, filename=None): ... def write(self, data): ... class Console: def __init__(self, globals=None, locals=None): ... def eval(self, code): ... mypy-0.560/typeshed/third_party/2/werkzeug/debug/repr.pyi0000644€tŠÔÚ€2›s®0000000165513215007212027637 0ustar jukkaDROPBOX\Domain Users00000000000000from typing import Any deque = ... # type: Any missing = ... # type: Any RegexType = ... # type: Any HELP_HTML = ... # type: Any OBJECT_DUMP_HTML = ... # type: Any def debug_repr(obj): ... def dump(obj=...): ... class _Helper: def __call__(self, topic=None): ... helper = ... # type: Any class DebugReprGenerator: def __init__(self): ... list_repr = ... # type: Any tuple_repr = ... # type: Any set_repr = ... # type: Any frozenset_repr = ... # type: Any deque_repr = ... # type: Any def regex_repr(self, obj): ... def string_repr(self, obj, limit=70): ... def dict_repr(self, d, recursive, limit=5): ... def object_repr(self, obj): ... def dispatch_repr(self, obj, recursive): ... def fallback_repr(self): ... def repr(self, obj): ... def dump_object(self, obj): ... def dump_locals(self, d): ... def render_object_dump(self, items, title, repr=None): ... mypy-0.560/typeshed/third_party/2/werkzeug/debug/tbtools.pyi0000644€tŠÔÚ€2›s®0000000370213215007212030350 0ustar jukkaDROPBOX\Domain Users00000000000000from typing import Any UTF8_COOKIE = ... # type: Any system_exceptions = ... # type: Any HEADER = ... # type: Any FOOTER = ... # type: Any PAGE_HTML = ... # type: Any CONSOLE_HTML = ... # type: Any SUMMARY_HTML = ... # type: Any FRAME_HTML = ... # type: Any SOURCE_LINE_HTML = ... # type: Any def render_console_html(secret, evalex_trusted=True): ... def get_current_traceback(ignore_system_exceptions=False, show_hidden_frames=False, skip=0): ... class Line: lineno = ... # type: Any code = ... # type: Any in_frame = ... # type: Any current = ... # type: Any def __init__(self, lineno, code): ... def classes(self): ... def render(self): ... class Traceback: exc_type = ... # type: Any exc_value = ... # type: Any exception_type = ... # type: Any frames = ... # type: Any def __init__(self, exc_type, exc_value, tb): ... def filter_hidden_frames(self): ... def is_syntax_error(self): ... def exception(self): ... def log(self, logfile=None): ... def paste(self): ... def render_summary(self, include_title=True): ... def render_full(self, evalex=False, secret=None, evalex_trusted=True): ... def generate_plaintext_traceback(self): ... def plaintext(self): ... id = ... # type: Any class Frame: lineno = ... # type: Any function_name = ... # type: Any locals = ... # type: Any globals = ... # type: Any filename = ... # type: Any module = ... # type: Any loader = ... # type: Any code = ... # type: Any hide = ... # type: Any info = ... # type: Any def __init__(self, exc_type, exc_value, tb): ... def render(self): ... def render_line_context(self): ... def get_annotated_lines(self): ... def eval(self, code, mode=''): ... def sourcelines(self): ... def get_context_lines(self, context=5): ... @property def current_line(self): ... def console(self): ... id = ... # type: Any mypy-0.560/typeshed/third_party/2/werkzeug/exceptions.pyi0000644€tŠÔÚ€2›s®0000000721013215007212027753 0ustar jukkaDROPBOX\Domain Users00000000000000from typing import Any class HTTPException(Exception): code = ... # type: Any description = ... # type: Any response = ... # type: Any def __init__(self, description=None, response=None): ... @classmethod def wrap(cls, exception, name=None): ... @property def name(self): ... def get_description(self, environ=None): ... def get_body(self, environ=None): ... def get_headers(self, environ=None): ... def get_response(self, environ=None): ... def __call__(self, environ, start_response): ... class BadRequest(HTTPException): code = ... # type: Any description = ... # type: Any class ClientDisconnected(BadRequest): ... class SecurityError(BadRequest): ... class BadHost(BadRequest): ... class Unauthorized(HTTPException): code = ... # type: Any description = ... # type: Any class Forbidden(HTTPException): code = ... # type: Any description = ... # type: Any class NotFound(HTTPException): code = ... # type: Any description = ... # type: Any class MethodNotAllowed(HTTPException): code = ... # type: Any description = ... # type: Any valid_methods = ... # type: Any def __init__(self, valid_methods=None, description=None): ... def get_headers(self, environ): ... class NotAcceptable(HTTPException): code = ... # type: Any description = ... # type: Any class RequestTimeout(HTTPException): code = ... # type: Any description = ... # type: Any class Conflict(HTTPException): code = ... # type: Any description = ... # type: Any class Gone(HTTPException): code = ... # type: Any description = ... # type: Any class LengthRequired(HTTPException): code = ... # type: Any description = ... # type: Any class PreconditionFailed(HTTPException): code = ... # type: Any description = ... # type: Any class RequestEntityTooLarge(HTTPException): code = ... # type: Any description = ... # type: Any class RequestURITooLarge(HTTPException): code = ... # type: Any description = ... # type: Any class UnsupportedMediaType(HTTPException): code = ... # type: Any description = ... # type: Any class RequestedRangeNotSatisfiable(HTTPException): code = ... # type: Any description = ... # type: Any class ExpectationFailed(HTTPException): code = ... # type: Any description = ... # type: Any class ImATeapot(HTTPException): code = ... # type: Any description = ... # type: Any class UnprocessableEntity(HTTPException): code = ... # type: Any description = ... # type: Any class PreconditionRequired(HTTPException): code = ... # type: Any description = ... # type: Any class TooManyRequests(HTTPException): code = ... # type: Any description = ... # type: Any class RequestHeaderFieldsTooLarge(HTTPException): code = ... # type: Any description = ... # type: Any class InternalServerError(HTTPException): code = ... # type: Any description = ... # type: Any class NotImplemented(HTTPException): code = ... # type: Any description = ... # type: Any class BadGateway(HTTPException): code = ... # type: Any description = ... # type: Any class ServiceUnavailable(HTTPException): code = ... # type: Any description = ... # type: Any class GatewayTimeout(HTTPException): code = ... # type: Any description = ... # type: Any class HTTPVersionNotSupported(HTTPException): code = ... # type: Any description = ... # type: Any class Aborter: mapping = ... # type: Any def __init__(self, mapping=None, extra=None): ... def __call__(self, code, *args, **kwargs): ... mypy-0.560/typeshed/third_party/2/werkzeug/filesystem.pyi0000644€tŠÔÚ€2›s®0000000026713215007212027763 0ustar jukkaDROPBOX\Domain Users00000000000000from typing import Any has_likely_buggy_unicode_filesystem = ... # type: Any class BrokenFilesystemWarning(RuntimeWarning, UnicodeWarning): ... def get_filesystem_encoding(): ... mypy-0.560/typeshed/third_party/2/werkzeug/formparser.pyi0000644€tŠÔÚ€2›s®0000000350613215007212027756 0ustar jukkaDROPBOX\Domain Users00000000000000from typing import Any def default_stream_factory(total_content_length, filename, content_type, content_length=None): ... def parse_form_data(environ, stream_factory=None, charset='', errors='', max_form_memory_size=None, max_content_length=None, cls=None, silent=True): ... def exhaust_stream(f): ... class FormDataParser: stream_factory = ... # type: Any charset = ... # type: Any errors = ... # type: Any max_form_memory_size = ... # type: Any max_content_length = ... # type: Any cls = ... # type: Any silent = ... # type: Any def __init__(self, stream_factory=None, charset='', errors='', max_form_memory_size=None, max_content_length=None, cls=None, silent=True): ... def get_parse_func(self, mimetype, options): ... def parse_from_environ(self, environ): ... def parse(self, stream, mimetype, content_length, options=None): ... parse_functions = ... # type: Any def is_valid_multipart_boundary(boundary): ... def parse_multipart_headers(iterable): ... class MultiPartParser: stream_factory = ... # type: Any charset = ... # type: Any errors = ... # type: Any max_form_memory_size = ... # type: Any cls = ... # type: Any buffer_size = ... # type: Any def __init__(self, stream_factory=None, charset='', errors='', max_form_memory_size=None, cls=None, buffer_size=...): ... def fail(self, message): ... def get_part_encoding(self, headers): ... def get_part_charset(self, headers): ... def start_file_streaming(self, filename, headers, total_content_length): ... def in_memory_threshold_reached(self, bytes): ... def validate_boundary(self, boundary): ... def parse_lines(self, file, boundary, content_length): ... def parse_parts(self, file, boundary, content_length): ... def parse(self, file, boundary, content_length): ... mypy-0.560/typeshed/third_party/2/werkzeug/http.pyi0000644€tŠÔÚ€2›s®0000000322013215007212026546 0ustar jukkaDROPBOX\Domain Users00000000000000from typing import Any from urllib2 import parse_http_list as _parse_list_header HTTP_STATUS_CODES = ... # type: Any def wsgi_to_bytes(data): ... def bytes_to_wsgi(data): ... def quote_header_value(value, extra_chars='', allow_token=True): ... def unquote_header_value(value, is_filename=False): ... def dump_options_header(header, options): ... def dump_header(iterable, allow_token=True): ... def parse_list_header(value): ... def parse_dict_header(value, cls=...): ... def parse_options_header(value, multiple=False): ... def parse_accept_header(value, cls=None): ... def parse_cache_control_header(value, on_update=None, cls=None): ... def parse_set_header(value, on_update=None): ... def parse_authorization_header(value): ... def parse_www_authenticate_header(value, on_update=None): ... def parse_if_range_header(value): ... def parse_range_header(value, make_inclusive=True): ... def parse_content_range_header(value, on_update=None): ... def quote_etag(etag, weak=False): ... def unquote_etag(etag): ... def parse_etags(value): ... def generate_etag(data): ... def parse_date(value): ... def cookie_date(expires=None): ... def http_date(timestamp=None): ... def is_resource_modified(environ, etag=None, data=None, last_modified=None): ... def remove_entity_headers(headers, allowed=...): ... def remove_hop_by_hop_headers(headers): ... def is_entity_header(header): ... def is_hop_by_hop_header(header): ... def parse_cookie(header, charset='', errors='', cls=None): ... def dump_cookie(key, value='', max_age=None, expires=None, path='', domain=None, secure=False, httponly=False, charset='', sync_expires=True): ... def is_byte_range_valid(start, stop, length): ... mypy-0.560/typeshed/third_party/2/werkzeug/local.pyi0000644€tŠÔÚ€2›s®0000000575713215007212026702 0ustar jukkaDROPBOX\Domain Users00000000000000from typing import Any def release_local(local): ... class Local: def __init__(self): ... def __iter__(self): ... def __call__(self, proxy): ... def __release_local__(self): ... def __getattr__(self, name): ... def __setattr__(self, name, value): ... def __delattr__(self, name): ... class LocalStack: def __init__(self): ... def __release_local__(self): ... def _get__ident_func__(self): ... def _set__ident_func__(self, value): ... __ident_func__ = ... # type: Any def __call__(self): ... def push(self, obj): ... def pop(self): ... @property def top(self): ... class LocalManager: locals = ... # type: Any ident_func = ... # type: Any def __init__(self, locals=None, ident_func=None): ... def get_ident(self): ... def cleanup(self): ... def make_middleware(self, app): ... def middleware(self, func): ... class LocalProxy: def __init__(self, local, name=None): ... @property def __dict__(self): ... def __bool__(self): ... def __unicode__(self): ... def __dir__(self): ... def __getattr__(self, name): ... def __setitem__(self, key, value): ... def __delitem__(self, key): ... __getslice__ = ... # type: Any def __setslice__(self, i, j, seq): ... def __delslice__(self, i, j): ... __setattr__ = ... # type: Any __delattr__ = ... # type: Any __lt__ = ... # type: Any __le__ = ... # type: Any __eq__ = ... # type: Any __ne__ = ... # type: Any __gt__ = ... # type: Any __ge__ = ... # type: Any __cmp__ = ... # type: Any __hash__ = ... # type: Any __call__ = ... # type: Any __len__ = ... # type: Any __getitem__ = ... # type: Any __iter__ = ... # type: Any __contains__ = ... # type: Any __add__ = ... # type: Any __sub__ = ... # type: Any __mul__ = ... # type: Any __floordiv__ = ... # type: Any __mod__ = ... # type: Any __divmod__ = ... # type: Any __pow__ = ... # type: Any __lshift__ = ... # type: Any __rshift__ = ... # type: Any __and__ = ... # type: Any __xor__ = ... # type: Any __or__ = ... # type: Any __div__ = ... # type: Any __truediv__ = ... # type: Any __neg__ = ... # type: Any __pos__ = ... # type: Any __abs__ = ... # type: Any __invert__ = ... # type: Any __complex__ = ... # type: Any __int__ = ... # type: Any __long__ = ... # type: Any __float__ = ... # type: Any __oct__ = ... # type: Any __hex__ = ... # type: Any __index__ = ... # type: Any __coerce__ = ... # type: Any __enter__ = ... # type: Any __exit__ = ... # type: Any __radd__ = ... # type: Any __rsub__ = ... # type: Any __rmul__ = ... # type: Any __rdiv__ = ... # type: Any __rtruediv__ = ... # type: Any __rfloordiv__ = ... # type: Any __rmod__ = ... # type: Any __rdivmod__ = ... # type: Any __copy__ = ... # type: Any __deepcopy__ = ... # type: Any mypy-0.560/typeshed/third_party/2/werkzeug/posixemulation.pyi0000644€tŠÔÚ€2›s®0000000032413215007212030651 0ustar jukkaDROPBOX\Domain Users00000000000000from typing import Any from ._compat import to_unicode as to_unicode from .filesystem import get_filesystem_encoding as get_filesystem_encoding can_rename_open_file = ... # type: Any def rename(src, dst): ... mypy-0.560/typeshed/third_party/2/werkzeug/routing.pyi0000644€tŠÔÚ€2›s®0000001433313215007212027265 0ustar jukkaDROPBOX\Domain Users00000000000000from typing import Any from werkzeug.exceptions import HTTPException def parse_converter_args(argstr): ... def parse_rule(rule): ... class RoutingException(Exception): ... class RequestRedirect(HTTPException, RoutingException): code = ... # type: Any new_url = ... # type: Any def __init__(self, new_url): ... def get_response(self, environ): ... class RequestSlash(RoutingException): ... class RequestAliasRedirect(RoutingException): matched_values = ... # type: Any def __init__(self, matched_values): ... class BuildError(RoutingException, LookupError): endpoint = ... # type: Any values = ... # type: Any method = ... # type: Any suggested = ... # type: Any def __init__(self, endpoint, values, method, adapter=None): ... def closest_rule(self, adapter): ... class ValidationError(ValueError): ... class RuleFactory: def get_rules(self, map): ... class Subdomain(RuleFactory): subdomain = ... # type: Any rules = ... # type: Any def __init__(self, subdomain, rules): ... def get_rules(self, map): ... class Submount(RuleFactory): path = ... # type: Any rules = ... # type: Any def __init__(self, path, rules): ... def get_rules(self, map): ... class EndpointPrefix(RuleFactory): prefix = ... # type: Any rules = ... # type: Any def __init__(self, prefix, rules): ... def get_rules(self, map): ... class RuleTemplate: rules = ... # type: Any def __init__(self, rules): ... def __call__(self, *args, **kwargs): ... class RuleTemplateFactory(RuleFactory): rules = ... # type: Any context = ... # type: Any def __init__(self, rules, context): ... def get_rules(self, map): ... class Rule(RuleFactory): rule = ... # type: Any is_leaf = ... # type: Any map = ... # type: Any strict_slashes = ... # type: Any subdomain = ... # type: Any host = ... # type: Any defaults = ... # type: Any build_only = ... # type: Any alias = ... # type: Any methods = ... # type: Any endpoint = ... # type: Any redirect_to = ... # type: Any arguments = ... # type: Any def __init__(self, string, defaults=None, subdomain=None, methods=None, build_only=False, endpoint=None, strict_slashes=None, redirect_to=None, alias=False, host=None): ... def empty(self): ... def get_empty_kwargs(self): ... def get_rules(self, map): ... def refresh(self): ... def bind(self, map, rebind=False): ... def get_converter(self, variable_name, converter_name, args, kwargs): ... def compile(self): ... def match(self, path): ... def build(self, values, append_unknown=True): ... def provides_defaults_for(self, rule): ... def suitable_for(self, values, method=None): ... def match_compare_key(self): ... def build_compare_key(self): ... def __eq__(self, other): ... def __ne__(self, other): ... class BaseConverter: regex = ... # type: Any weight = ... # type: Any map = ... # type: Any def __init__(self, map): ... def to_python(self, value): ... def to_url(self, value): ... class UnicodeConverter(BaseConverter): regex = ... # type: Any def __init__(self, map, minlength=1, maxlength=None, length=None): ... class AnyConverter(BaseConverter): regex = ... # type: Any def __init__(self, map, *items): ... class PathConverter(BaseConverter): regex = ... # type: Any weight = ... # type: Any class NumberConverter(BaseConverter): weight = ... # type: Any fixed_digits = ... # type: Any min = ... # type: Any max = ... # type: Any def __init__(self, map, fixed_digits=0, min=None, max=None): ... def to_python(self, value): ... def to_url(self, value): ... class IntegerConverter(NumberConverter): regex = ... # type: Any num_convert = ... # type: Any class FloatConverter(NumberConverter): regex = ... # type: Any num_convert = ... # type: Any def __init__(self, map, min=None, max=None): ... class UUIDConverter(BaseConverter): regex = ... # type: Any def to_python(self, value): ... def to_url(self, value): ... DEFAULT_CONVERTERS = ... # type: Any class Map: default_converters = ... # type: Any default_subdomain = ... # type: Any charset = ... # type: Any encoding_errors = ... # type: Any strict_slashes = ... # type: Any redirect_defaults = ... # type: Any host_matching = ... # type: Any converters = ... # type: Any sort_parameters = ... # type: Any sort_key = ... # type: Any def __init__(self, rules=None, default_subdomain='', charset='', strict_slashes=True, redirect_defaults=True, converters=None, sort_parameters=False, sort_key=None, encoding_errors='', host_matching=False): ... def is_endpoint_expecting(self, endpoint, *arguments): ... def iter_rules(self, endpoint=None): ... def add(self, rulefactory): ... def bind(self, server_name, script_name=None, subdomain=None, url_scheme='', default_method='', path_info=None, query_args=None): ... def bind_to_environ(self, environ, server_name=None, subdomain=None): ... def update(self): ... class MapAdapter: map = ... # type: Any server_name = ... # type: Any script_name = ... # type: Any subdomain = ... # type: Any url_scheme = ... # type: Any path_info = ... # type: Any default_method = ... # type: Any query_args = ... # type: Any def __init__(self, map, server_name, script_name, subdomain, url_scheme, path_info, default_method, query_args=None): ... def dispatch(self, view_func, path_info=None, method=None, catch_http_exceptions=False): ... def match(self, path_info=None, method=None, return_rule=False, query_args=None): ... def test(self, path_info=None, method=None): ... def allowed_methods(self, path_info=None): ... def get_host(self, domain_part): ... def get_default_redirect(self, rule, method, values, query_args): ... def encode_query_args(self, query_args): ... def make_redirect_url(self, path_info, query_args=None, domain_part=None): ... def make_alias_redirect_url(self, path, endpoint, values, method, query_args): ... def build(self, endpoint, values=None, method=None, force_external=False, append_unknown=True): ... mypy-0.560/typeshed/third_party/2/werkzeug/script.pyi0000644€tŠÔÚ€2›s®0000000105413215007212027076 0ustar jukkaDROPBOX\Domain Users00000000000000from typing import Any argument_types = ... # type: Any converters = ... # type: Any def run(namespace=None, action_prefix='', args=None): ... def fail(message, code=-1): ... def find_actions(namespace, action_prefix): ... def print_usage(actions): ... def analyse_action(func): ... def make_shell(init_func=None, banner=None, use_ipython=True): ... def make_runserver(app_factory, hostname='', port=5000, use_reloader=False, use_debugger=False, use_evalex=True, threaded=False, processes=1, static_files=None, extra_files=None, ssl_context=None): ... mypy-0.560/typeshed/third_party/2/werkzeug/security.pyi0000644€tŠÔÚ€2›s®0000000071513215007212027444 0ustar jukkaDROPBOX\Domain Users00000000000000from typing import Any SALT_CHARS = ... # type: Any DEFAULT_PBKDF2_ITERATIONS = ... # type: Any def pbkdf2_hex(data, salt, iterations=..., keylen=None, hashfunc=None): ... def pbkdf2_bin(data, salt, iterations=..., keylen=None, hashfunc=None): ... def safe_str_cmp(a, b): ... def gen_salt(length): ... def generate_password_hash(password, method='', salt_length=8): ... def check_password_hash(pwhash, password): ... def safe_join(directory, filename): ... mypy-0.560/typeshed/third_party/2/werkzeug/serving.pyi0000644€tŠÔÚ€2›s®0000000611613215007212027253 0ustar jukkaDROPBOX\Domain Users00000000000000from typing import Any from ._compat import PY2 as PY2 from SocketServer import ThreadingMixIn, ForkingMixIn from BaseHTTPServer import HTTPServer, BaseHTTPRequestHandler class _SslDummy: def __getattr__(self, name): ... ssl = ... # type: Any LISTEN_QUEUE = ... # type: Any can_open_by_fd = ... # type: Any class WSGIRequestHandler(BaseHTTPRequestHandler): @property def server_version(self): ... def make_environ(self): ... environ = ... # type: Any close_connection = ... # type: Any def run_wsgi(self): ... def handle(self): ... def initiate_shutdown(self): ... def connection_dropped(self, error, environ=None): ... raw_requestline = ... # type: Any def handle_one_request(self): ... def send_response(self, code, message=None): ... def version_string(self): ... def address_string(self): ... def log_request(self, code='', size=''): ... def log_error(self, *args): ... def log_message(self, format, *args): ... def log(self, type, message, *args): ... BaseRequestHandler = ... # type: Any def generate_adhoc_ssl_pair(cn=None): ... def make_ssl_devcert(base_path, host=None, cn=None): ... def generate_adhoc_ssl_context(): ... def load_ssl_context(cert_file, pkey_file=None, protocol=None): ... class _SSLContext: def __init__(self, protocol): ... def load_cert_chain(self, certfile, keyfile=None, password=None): ... def wrap_socket(self, sock, **kwargs): ... def is_ssl_error(error=None): ... def select_ip_version(host, port): ... class BaseWSGIServer(HTTPServer): multithread = ... # type: Any multiprocess = ... # type: Any request_queue_size = ... # type: Any address_family = ... # type: Any app = ... # type: Any passthrough_errors = ... # type: Any shutdown_signal = ... # type: Any host = ... # type: Any port = ... # type: Any socket = ... # type: Any server_address = ... # type: Any ssl_context = ... # type: Any def __init__(self, host, port, app, handler=None, passthrough_errors=False, ssl_context=None, fd=None): ... def log(self, type, message, *args): ... def serve_forever(self): ... def handle_error(self, request, client_address): ... def get_request(self): ... class ThreadedWSGIServer(ThreadingMixIn, BaseWSGIServer): multithread = ... # type: Any class ForkingWSGIServer(ForkingMixIn, BaseWSGIServer): multiprocess = ... # type: Any max_children = ... # type: Any def __init__(self, host, port, app, processes=40, handler=None, passthrough_errors=False, ssl_context=None, fd=None): ... def make_server(host=None, port=None, app=None, threaded=False, processes=1, request_handler=None, passthrough_errors=False, ssl_context=None, fd=None): ... def is_running_from_reloader(): ... def run_simple(hostname, port, application, use_reloader=False, use_debugger=False, use_evalex=True, extra_files=None, reloader_interval=1, reloader_type='', threaded=False, processes=1, request_handler=None, static_files=None, passthrough_errors=False, ssl_context=None): ... def run_with_reloader(*args, **kwargs): ... def main(): ... mypy-0.560/typeshed/third_party/2/werkzeug/test.pyi0000644€tŠÔÚ€2›s®0000000615713215007212026562 0ustar jukkaDROPBOX\Domain Users00000000000000from typing import Any from urllib2 import Request as U2Request from cookielib import CookieJar def stream_encode_multipart(values, use_tempfile=True, threshold=..., boundary=None, charset=''): ... def encode_multipart(values, boundary=None, charset=''): ... def File(fd, filename=None, mimetype=None): ... class _TestCookieHeaders: headers = ... # type: Any def __init__(self, headers): ... def getheaders(self, name): ... def get_all(self, name, default=None): ... class _TestCookieResponse: headers = ... # type: Any def __init__(self, headers): ... def info(self): ... class _TestCookieJar(CookieJar): def inject_wsgi(self, environ): ... def extract_wsgi(self, environ, headers): ... class EnvironBuilder: server_protocol = ... # type: Any wsgi_version = ... # type: Any request_class = ... # type: Any charset = ... # type: Any path = ... # type: Any base_url = ... # type: Any query_string = ... # type: Any args = ... # type: Any method = ... # type: Any headers = ... # type: Any content_type = ... # type: Any errors_stream = ... # type: Any multithread = ... # type: Any multiprocess = ... # type: Any run_once = ... # type: Any environ_base = ... # type: Any environ_overrides = ... # type: Any input_stream = ... # type: Any content_length = ... # type: Any closed = ... # type: Any def __init__(self, path='', base_url=None, query_string=None, method='', input_stream=None, content_type=None, content_length=None, errors_stream=None, multithread=False, multiprocess=False, run_once=False, headers=None, data=None, environ_base=None, environ_overrides=None, charset=''): ... def form_property(name, storage, doc): ... form = ... # type: Any files = ... # type: Any @property def server_name(self): ... @property def server_port(self): ... def __del__(self): ... def close(self): ... def get_environ(self): ... def get_request(self, cls=None): ... class ClientRedirectError(Exception): ... class Client: application = ... # type: Any response_wrapper = ... # type: Any cookie_jar = ... # type: Any allow_subdomain_redirects = ... # type: Any def __init__(self, application, response_wrapper=None, use_cookies=True, allow_subdomain_redirects=False): ... def set_cookie(self, server_name, key, value='', max_age=None, expires=None, path='', domain=None, secure=None, httponly=False, charset=''): ... def delete_cookie(self, server_name, key, path='', domain=None): ... def run_wsgi_app(self, environ, buffered=False): ... def resolve_redirect(self, response, new_location, environ, buffered=False): ... def open(self, *args, **kwargs): ... def get(self, *args, **kw): ... def patch(self, *args, **kw): ... def post(self, *args, **kw): ... def head(self, *args, **kw): ... def put(self, *args, **kw): ... def delete(self, *args, **kw): ... def options(self, *args, **kw): ... def trace(self, *args, **kw): ... def create_environ(*args, **kwargs): ... def run_wsgi_app(app, environ, buffered=False): ... mypy-0.560/typeshed/third_party/2/werkzeug/testapp.pyi0000644€tŠÔÚ€2›s®0000000037513215007212027257 0ustar jukkaDROPBOX\Domain Users00000000000000from typing import Any from werkzeug.wrappers import BaseRequest as Request, BaseResponse as Response logo = ... # type: Any TEMPLATE = ... # type: Any def iter_sys_path(): ... def render_testapp(req): ... def test_app(environ, start_response): ... mypy-0.560/typeshed/third_party/2/werkzeug/urls.pyi0000644€tŠÔÚ€2›s®0000000442413215007212026563 0ustar jukkaDROPBOX\Domain Users00000000000000from collections import namedtuple from typing import Any _URLTuple = namedtuple( '_URLTuple', ['scheme', 'netloc', 'path', 'query', 'fragment'] ) class BaseURL(_URLTuple): def replace(self, **kwargs): ... @property def host(self): ... @property def ascii_host(self): ... @property def port(self): ... @property def auth(self): ... @property def username(self): ... @property def raw_username(self): ... @property def password(self): ... @property def raw_password(self): ... def decode_query(self, *args, **kwargs): ... def join(self, *args, **kwargs): ... def to_url(self): ... def decode_netloc(self): ... def to_uri_tuple(self): ... def to_iri_tuple(self): ... def get_file_location(self, pathformat=None): ... class URL(BaseURL): def encode_netloc(self): ... def encode(self, charset='', errors=''): ... class BytesURL(BaseURL): def encode_netloc(self): ... def decode(self, charset='', errors=''): ... def url_parse(url, scheme=None, allow_fragments=True): ... def url_quote(string, charset='', errors='', safe='', unsafe=''): ... def url_quote_plus(string, charset='', errors='', safe=''): ... def url_unparse(components): ... def url_unquote(string, charset='', errors='', unsafe=''): ... def url_unquote_plus(s, charset='', errors=''): ... def url_fix(s, charset=''): ... def uri_to_iri(uri, charset='', errors=''): ... def iri_to_uri(iri, charset='', errors='', safe_conversion=False): ... def url_decode(s, charset='', decode_keys=False, include_empty=True, errors='', separator='', cls=None): ... def url_decode_stream(stream, charset='', decode_keys=False, include_empty=True, errors='', separator='', cls=None, limit=None, return_iterator=False): ... def url_encode(obj, charset='', encode_keys=False, sort=False, key=None, separator=''): ... def url_encode_stream(obj, stream=None, charset='', encode_keys=False, sort=False, key=None, separator=''): ... def url_join(base, url, allow_fragments=True): ... class Href: base = ... # type: Any charset = ... # type: Any sort = ... # type: Any key = ... # type: Any def __init__(self, base='', charset='', sort=False, key=None): ... def __getattr__(self, name): ... def __call__(self, *path, **query): ... mypy-0.560/typeshed/third_party/2/werkzeug/useragents.pyi0000644€tŠÔÚ€2›s®0000000055713215007212027761 0ustar jukkaDROPBOX\Domain Users00000000000000from typing import Any class UserAgentParser: platforms = ... # type: Any browsers = ... # type: Any def __init__(self): ... def __call__(self, user_agent): ... class UserAgent: string = ... # type: Any def __init__(self, environ_or_string): ... def to_header(self): ... def __nonzero__(self): ... __bool__ = ... # type: Any mypy-0.560/typeshed/third_party/2/werkzeug/utils.pyi0000644€tŠÔÚ€2›s®0000000313313215007212026732 0ustar jukkaDROPBOX\Domain Users00000000000000from typing import Any from werkzeug._internal import _DictAccessorProperty class cached_property(property): __name__ = ... # type: Any __module__ = ... # type: Any __doc__ = ... # type: Any func = ... # type: Any def __init__(self, func, name=None, doc=None): ... def __set__(self, obj, value): ... def __get__(self, obj, type=None): ... class environ_property(_DictAccessorProperty): read_only = ... # type: Any def lookup(self, obj): ... class header_property(_DictAccessorProperty): def lookup(self, obj): ... class HTMLBuilder: def __init__(self, dialect): ... def __call__(self, s): ... def __getattr__(self, tag): ... html = ... # type: Any xhtml = ... # type: Any def get_content_type(mimetype, charset): ... def format_string(string, context): ... def secure_filename(filename): ... def escape(s, quote=None): ... def unescape(s): ... def redirect(location, code=302, Response=None): ... def append_slash_redirect(environ, code=301): ... def import_string(import_name, silent=False): ... def find_modules(import_path, include_packages=False, recursive=False): ... def validate_arguments(func, args, kwargs, drop_extra=True): ... def bind_arguments(func, args, kwargs): ... class ArgumentValidationError(ValueError): missing = ... # type: Any extra = ... # type: Any extra_positional = ... # type: Any def __init__(self, missing=None, extra=None, extra_positional=None): ... class ImportStringError(ImportError): import_name = ... # type: Any exception = ... # type: Any def __init__(self, import_name, exception): ... mypy-0.560/typeshed/third_party/2/werkzeug/wrappers.pyi0000644€tŠÔÚ€2›s®0000001641113215007212027440 0ustar jukkaDROPBOX\Domain Users00000000000000from typing import ( Any, Iterable, Mapping, Optional, Sequence, Tuple, Type, Union, ) from .datastructures import ( CombinedMultiDict, EnvironHeaders, Headers, ImmutableMultiDict, MultiDict, TypeConversionDict, ) class BaseRequest: charset = ... # type: str encoding_errors = ... # type: str max_content_length = ... # type: Union[int, long] max_form_memory_size = ... # type: Union[int, long] parameter_storage_class = ... # type: Type list_storage_class = ... # type: Type dict_storage_class = ... # type: Type form_data_parser_class = ... # type: Type trusted_hosts = ... # type: Optional[Sequence[unicode]] disable_data_descriptor = ... # type: Any environ = ... # type: Mapping[str, object] shallow = ... # type: Any def __init__(self, environ: Mapping[basestring, object], populate_request: bool = ..., shallow: bool = ...) -> None: ... @property def url_charset(self) -> str: ... @classmethod def from_values(cls, *args, **kwargs) -> 'BaseRequest': ... @classmethod def application(cls, f): ... @property def want_form_data_parsed(self): ... def make_form_data_parser(self): ... def close(self) -> None: ... def __enter__(self): ... def __exit__(self, exc_type, exc_value, tb): ... def stream(self): ... input_stream = ... # type: Any args = ... # type: ImmutableMultiDict def data(self): ... def get_data(self, cache: bool = ..., as_text: bool = ..., parse_form_data: bool = ...) -> str: ... form = ... # type: ImmutableMultiDict values = ... # type: CombinedMultiDict files = ... # type: MultiDict cookies = ... # type: TypeConversionDict headers = ... # type: EnvironHeaders path = ... # type: unicode full_path = ... # type: unicode script_root = ... # type: unicode url = ... # type: unicode base_url = ... # type: unicode url_root = ... # type: unicode host_url = ... # type: unicode host = ... # type: unicode query_string = ... # type: str method = ... # type: str def access_route(self): ... @property def remote_addr(self) -> str: ... remote_user = ... # type: unicode scheme = ... # type: str is_xhr = ... # type: bool is_secure = ... # type: bool is_multithread = ... # type: bool is_multiprocess = ... # type: bool is_run_once = ... # type: bool class BaseResponse: charset = ... # type: str default_status = ... # type: int default_mimetype = ... # type: str implicit_sequence_conversion = ... # type: bool autocorrect_location_header = ... # type: bool automatically_set_content_length = ... # type: bool headers = ... # type: Headers status_code = ... # type: int status = ... # type: str direct_passthrough = ... # type: bool response = ... # type: Iterable[str] def __init__(self, response: Optional[Union[Iterable[str], str]] = ..., status: Optional[Union[basestring, int]] = ..., headers: Optional[Union[Headers, Mapping[basestring, basestring], Sequence[Tuple[basestring, basestring]]]] = None, mimetype: Optional[basestring] = ..., content_type: Optional[basestring] = ..., direct_passthrough: Optional[bool] = ...) -> None: ... def call_on_close(self, func): ... @classmethod def force_type(cls, response, environ=None): ... @classmethod def from_app(cls, app, environ, buffered=False): ... def get_data(self, as_text=False): ... def set_data(self, value): ... data = ... # type: Any def calculate_content_length(self): ... def make_sequence(self): ... def iter_encoded(self): ... def set_cookie(self, key, value='', max_age=None, expires=None, path='', domain=None, secure=False, httponly=False): ... def delete_cookie(self, key, path='', domain=None): ... @property def is_streamed(self) -> bool: ... @property def is_sequence(self) -> bool: ... def close(self) -> None: ... def __enter__(self): ... def __exit__(self, exc_type, exc_value, tb): ... def freeze(self): ... def get_wsgi_headers(self, environ): ... def get_app_iter(self, environ): ... def get_wsgi_response(self, environ): ... def __call__(self, environ, start_response): ... class AcceptMixin: def accept_mimetypes(self): ... def accept_charsets(self): ... def accept_encodings(self): ... def accept_languages(self): ... class ETagRequestMixin: def cache_control(self): ... def if_match(self): ... def if_none_match(self): ... def if_modified_since(self): ... def if_unmodified_since(self): ... def if_range(self): ... def range(self): ... class UserAgentMixin: def user_agent(self): ... class AuthorizationMixin: def authorization(self): ... class StreamOnlyMixin: disable_data_descriptor = ... # type: Any want_form_data_parsed = ... # type: Any class ETagResponseMixin: @property def cache_control(self): ... status_code = ... # type: Any def make_conditional(self, request_or_environ, accept_ranges=False, complete_length=None): ... def add_etag(self, overwrite=False, weak=False): ... def set_etag(self, etag, weak=False): ... def get_etag(self): ... def freeze(self, no_etag=False): ... accept_ranges = ... # type: Any content_range = ... # type: Any class ResponseStream: mode = ... # type: Any response = ... # type: Any closed = ... # type: Any def __init__(self, response): ... def write(self, value): ... def writelines(self, seq): ... def close(self): ... def flush(self): ... def isatty(self): ... @property def encoding(self): ... class ResponseStreamMixin: def stream(self): ... class CommonRequestDescriptorsMixin: content_type = ... # type: Any def content_length(self): ... content_encoding = ... # type: Any content_md5 = ... # type: Any referrer = ... # type: Any date = ... # type: Any max_forwards = ... # type: Any @property def mimetype(self): ... @property def mimetype_params(self): ... def pragma(self): ... class CommonResponseDescriptorsMixin: mimetype = ... # type: Any mimetype_params = ... # type: Any location = ... # type: Any age = ... # type: Any content_type = ... # type: Any content_length = ... # type: Any content_location = ... # type: Any content_encoding = ... # type: Any content_md5 = ... # type: Any date = ... # type: Any expires = ... # type: Any last_modified = ... # type: Any retry_after = ... # type: Any vary = ... # type: Any content_language = ... # type: Any allow = ... # type: Any class WWWAuthenticateMixin: @property def www_authenticate(self): ... class Request(BaseRequest, AcceptMixin, ETagRequestMixin, UserAgentMixin, AuthorizationMixin, CommonRequestDescriptorsMixin): ... class PlainRequest(StreamOnlyMixin, Request): ... class Response(ETagResponseMixin, BaseResponse, ResponseStreamMixin, CommonResponseDescriptorsMixin, WWWAuthenticateMixin): ... # FIXME: This is invalid but works around https://github.com/pallets/werkzeug/issues/1052 mypy-0.560/typeshed/third_party/2/werkzeug/wsgi.pyi0000644€tŠÔÚ€2›s®0000000576113215007212026554 0ustar jukkaDROPBOX\Domain Users00000000000000from typing import Any def responder(f): ... def get_current_url(environ, root_only=False, strip_querystring=False, host_only=False, trusted_hosts=None): ... def host_is_trusted(hostname, trusted_list): ... def get_host(environ, trusted_hosts=None): ... def get_content_length(environ): ... def get_input_stream(environ, safe_fallback=True): ... def get_query_string(environ): ... def get_path_info(environ, charset='', errors=''): ... def get_script_name(environ, charset='', errors=''): ... def pop_path_info(environ, charset='', errors=''): ... def peek_path_info(environ, charset='', errors=''): ... def extract_path_info(environ_or_baseurl, path_or_url, charset='', errors='', collapse_http_schemes=True): ... class SharedDataMiddleware: app = ... # type: Any exports = ... # type: Any cache = ... # type: Any cache_timeout = ... # type: Any fallback_mimetype = ... # type: Any def __init__(self, app, exports, disallow=None, cache=True, cache_timeout=..., fallback_mimetype=''): ... def is_allowed(self, filename): ... def get_file_loader(self, filename): ... def get_package_loader(self, package, package_path): ... def get_directory_loader(self, directory): ... def generate_etag(self, mtime, file_size, real_filename): ... def __call__(self, environ, start_response): ... class DispatcherMiddleware: app = ... # type: Any mounts = ... # type: Any def __init__(self, app, mounts=None): ... def __call__(self, environ, start_response): ... class ClosingIterator: def __init__(self, iterable, callbacks=None): ... def __iter__(self): ... def __next__(self): ... def close(self): ... def wrap_file(environ, file, buffer_size=8192): ... class FileWrapper: file = ... # type: Any buffer_size = ... # type: Any def __init__(self, file, buffer_size=8192): ... def close(self): ... def seekable(self): ... def seek(self, *args): ... def tell(self): ... def __iter__(self): ... def __next__(self): ... class _RangeWrapper: iterable = ... # type: Any byte_range = ... # type: Any start_byte = ... # type: Any end_byte = ... # type: Any read_length = ... # type: Any seekable = ... # type: Any end_reached = ... # type: Any def __init__(self, iterable, start_byte=0, byte_range=None): ... def __iter__(self): ... def __next__(self): ... def close(self): ... def make_line_iter(stream, limit=None, buffer_size=..., cap_at_buffer=False): ... def make_chunk_iter(stream, separator, limit=None, buffer_size=..., cap_at_buffer=False): ... class LimitedStream: limit = ... # type: Any def __init__(self, stream, limit): ... def __iter__(self): ... @property def is_exhausted(self): ... def on_exhausted(self): ... def on_disconnect(self): ... def exhaust(self, chunk_size=...): ... def read(self, size=None): ... def readline(self, size=None): ... def readlines(self, size=None): ... def tell(self): ... def __next__(self): ... mypy-0.560/typeshed/third_party/2and3/0000755€tŠÔÚ€2›s®0000000000013215007244023757 5ustar jukkaDROPBOX\Domain Users00000000000000mypy-0.560/typeshed/third_party/2and3/atomicwrites/0000755€tŠÔÚ€2›s®0000000000013215007244026471 5ustar jukkaDROPBOX\Domain Users00000000000000mypy-0.560/typeshed/third_party/2and3/atomicwrites/__init__.pyi0000644€tŠÔÚ€2›s®0000000135613215007212030753 0ustar jukkaDROPBOX\Domain Users00000000000000from typing import AnyStr, Callable, ContextManager, IO, Optional, Text, Type def replace_atomic(src: AnyStr, dst: AnyStr) -> None: ... def move_atomic(src: AnyStr, dst: AnyStr) -> None: ... class AtomicWriter(object): def __init__(self, path: AnyStr, mode: Text = ..., overwrite: bool = ...) -> None: ... def open(self) -> ContextManager[IO]: ... def _open(self, get_fileobject: Callable) -> ContextManager[IO]: ... def get_fileobject(self, dir: Optional[AnyStr] = ..., **kwargs) -> IO: ... def sync(self, f: IO) -> None: ... def commit(self, f: IO) -> None: ... def rollback(self, f: IO) -> None: ... def atomic_write(path: AnyStr, writer_cls: Type[AtomicWriter] = ..., **cls_kwargs: object) -> ContextManager[IO]: ... mypy-0.560/typeshed/third_party/2and3/backports/0000755€tŠÔÚ€2›s®0000000000013215007244025747 5ustar jukkaDROPBOX\Domain Users00000000000000mypy-0.560/typeshed/third_party/2and3/backports/__init__.pyi0000644€tŠÔÚ€2›s®0000000000013215007212030212 0ustar jukkaDROPBOX\Domain Users00000000000000mypy-0.560/typeshed/third_party/2and3/backports/ssl_match_hostname.pyi0000644€tŠÔÚ€2›s®0000000012113215007212032332 0ustar jukkaDROPBOX\Domain Users00000000000000class CertificateError(ValueError): ... def match_hostname(cert, hostname): ... mypy-0.560/typeshed/third_party/2and3/backports_abc.pyi0000644€tŠÔÚ€2›s®0000000041513215007212027272 0ustar jukkaDROPBOX\Domain Users00000000000000from typing import Any def mk_gen(): ... def mk_awaitable(): ... def mk_coroutine(): ... Generator = ... # type: Any Awaitable = ... # type: Any Coroutine = ... # type: Any def isawaitable(obj): ... PATCHED = ... # type: Any def patch(patch_inspect=True): ... mypy-0.560/typeshed/third_party/2and3/boto/0000755€tŠÔÚ€2›s®0000000000013215007244024722 5ustar jukkaDROPBOX\Domain Users00000000000000mypy-0.560/typeshed/third_party/2and3/boto/__init__.pyi0000644€tŠÔÚ€2›s®0000001572613215007212027212 0ustar jukkaDROPBOX\Domain Users00000000000000from typing import Any, Optional, Text import logging from .s3.connection import S3Connection Version = ... # type: Any UserAgent = ... # type: Any config = ... # type: Any BUCKET_NAME_RE = ... # type: Any TOO_LONG_DNS_NAME_COMP = ... # type: Any GENERATION_RE = ... # type: Any VERSION_RE = ... # type: Any ENDPOINTS_PATH = ... # type: Any def init_logging(): ... class NullHandler(logging.Handler): def emit(self, record): ... log = ... # type: Any perflog = ... # type: Any def set_file_logger(name, filepath, level: Any = ..., format_string: Optional[Any] = ...): ... def set_stream_logger(name, level: Any = ..., format_string: Optional[Any] = ...): ... def connect_sqs(aws_access_key_id: Optional[Any] = ..., aws_secret_access_key: Optional[Any] = ..., **kwargs): ... def connect_s3(aws_access_key_id: Optional[Text] = ..., aws_secret_access_key: Optional[Text] = ..., **kwargs) -> S3Connection: ... def connect_gs(gs_access_key_id: Optional[Any] = ..., gs_secret_access_key: Optional[Any] = ..., **kwargs): ... def connect_ec2(aws_access_key_id: Optional[Any] = ..., aws_secret_access_key: Optional[Any] = ..., **kwargs): ... def connect_elb(aws_access_key_id: Optional[Any] = ..., aws_secret_access_key: Optional[Any] = ..., **kwargs): ... def connect_autoscale(aws_access_key_id: Optional[Any] = ..., aws_secret_access_key: Optional[Any] = ..., **kwargs): ... def connect_cloudwatch(aws_access_key_id: Optional[Any] = ..., aws_secret_access_key: Optional[Any] = ..., **kwargs): ... def connect_sdb(aws_access_key_id: Optional[Any] = ..., aws_secret_access_key: Optional[Any] = ..., **kwargs): ... def connect_fps(aws_access_key_id: Optional[Any] = ..., aws_secret_access_key: Optional[Any] = ..., **kwargs): ... def connect_mturk(aws_access_key_id: Optional[Any] = ..., aws_secret_access_key: Optional[Any] = ..., **kwargs): ... def connect_cloudfront(aws_access_key_id: Optional[Any] = ..., aws_secret_access_key: Optional[Any] = ..., **kwargs): ... def connect_vpc(aws_access_key_id: Optional[Any] = ..., aws_secret_access_key: Optional[Any] = ..., **kwargs): ... def connect_rds(aws_access_key_id: Optional[Any] = ..., aws_secret_access_key: Optional[Any] = ..., **kwargs): ... def connect_rds2(aws_access_key_id: Optional[Any] = ..., aws_secret_access_key: Optional[Any] = ..., **kwargs): ... def connect_emr(aws_access_key_id: Optional[Any] = ..., aws_secret_access_key: Optional[Any] = ..., **kwargs): ... def connect_sns(aws_access_key_id: Optional[Any] = ..., aws_secret_access_key: Optional[Any] = ..., **kwargs): ... def connect_iam(aws_access_key_id: Optional[Any] = ..., aws_secret_access_key: Optional[Any] = ..., **kwargs): ... def connect_route53(aws_access_key_id: Optional[Any] = ..., aws_secret_access_key: Optional[Any] = ..., **kwargs): ... def connect_cloudformation(aws_access_key_id: Optional[Any] = ..., aws_secret_access_key: Optional[Any] = ..., **kwargs): ... def connect_euca(host: Optional[Any] = ..., aws_access_key_id: Optional[Any] = ..., aws_secret_access_key: Optional[Any] = ..., port: int = ..., path: str = ..., is_secure: bool = ..., **kwargs): ... def connect_glacier(aws_access_key_id: Optional[Any] = ..., aws_secret_access_key: Optional[Any] = ..., **kwargs): ... def connect_ec2_endpoint(url, aws_access_key_id: Optional[Any] = ..., aws_secret_access_key: Optional[Any] = ..., **kwargs): ... def connect_walrus(host: Optional[Any] = ..., aws_access_key_id: Optional[Any] = ..., aws_secret_access_key: Optional[Any] = ..., port: int = ..., path: str = ..., is_secure: bool = ..., **kwargs): ... def connect_ses(aws_access_key_id: Optional[Any] = ..., aws_secret_access_key: Optional[Any] = ..., **kwargs): ... def connect_sts(aws_access_key_id: Optional[Any] = ..., aws_secret_access_key: Optional[Any] = ..., **kwargs): ... def connect_ia(ia_access_key_id: Optional[Any] = ..., ia_secret_access_key: Optional[Any] = ..., is_secure: bool = ..., **kwargs): ... def connect_dynamodb(aws_access_key_id: Optional[Any] = ..., aws_secret_access_key: Optional[Any] = ..., **kwargs): ... def connect_swf(aws_access_key_id: Optional[Any] = ..., aws_secret_access_key: Optional[Any] = ..., **kwargs): ... def connect_cloudsearch(aws_access_key_id: Optional[Any] = ..., aws_secret_access_key: Optional[Any] = ..., **kwargs): ... def connect_cloudsearch2(aws_access_key_id: Optional[Any] = ..., aws_secret_access_key: Optional[Any] = ..., sign_request: bool = ..., **kwargs): ... def connect_cloudsearchdomain(aws_access_key_id: Optional[Any] = ..., aws_secret_access_key: Optional[Any] = ..., **kwargs): ... def connect_beanstalk(aws_access_key_id: Optional[Any] = ..., aws_secret_access_key: Optional[Any] = ..., **kwargs): ... def connect_elastictranscoder(aws_access_key_id: Optional[Any] = ..., aws_secret_access_key: Optional[Any] = ..., **kwargs): ... def connect_opsworks(aws_access_key_id: Optional[Any] = ..., aws_secret_access_key: Optional[Any] = ..., **kwargs): ... def connect_redshift(aws_access_key_id: Optional[Any] = ..., aws_secret_access_key: Optional[Any] = ..., **kwargs): ... def connect_support(aws_access_key_id: Optional[Any] = ..., aws_secret_access_key: Optional[Any] = ..., **kwargs): ... def connect_cloudtrail(aws_access_key_id: Optional[Any] = ..., aws_secret_access_key: Optional[Any] = ..., **kwargs): ... def connect_directconnect(aws_access_key_id: Optional[Any] = ..., aws_secret_access_key: Optional[Any] = ..., **kwargs): ... def connect_kinesis(aws_access_key_id: Optional[Any] = ..., aws_secret_access_key: Optional[Any] = ..., **kwargs): ... def connect_logs(aws_access_key_id: Optional[Any] = ..., aws_secret_access_key: Optional[Any] = ..., **kwargs): ... def connect_route53domains(aws_access_key_id: Optional[Any] = ..., aws_secret_access_key: Optional[Any] = ..., **kwargs): ... def connect_cognito_identity(aws_access_key_id: Optional[Any] = ..., aws_secret_access_key: Optional[Any] = ..., **kwargs): ... def connect_cognito_sync(aws_access_key_id: Optional[Any] = ..., aws_secret_access_key: Optional[Any] = ..., **kwargs): ... def connect_kms(aws_access_key_id: Optional[Any] = ..., aws_secret_access_key: Optional[Any] = ..., **kwargs): ... def connect_awslambda(aws_access_key_id: Optional[Any] = ..., aws_secret_access_key: Optional[Any] = ..., **kwargs): ... def connect_codedeploy(aws_access_key_id: Optional[Any] = ..., aws_secret_access_key: Optional[Any] = ..., **kwargs): ... def connect_configservice(aws_access_key_id: Optional[Any] = ..., aws_secret_access_key: Optional[Any] = ..., **kwargs): ... def connect_cloudhsm(aws_access_key_id: Optional[Any] = ..., aws_secret_access_key: Optional[Any] = ..., **kwargs): ... def connect_ec2containerservice(aws_access_key_id: Optional[Any] = ..., aws_secret_access_key: Optional[Any] = ..., **kwargs): ... def connect_machinelearning(aws_access_key_id: Optional[Any] = ..., aws_secret_access_key: Optional[Any] = ..., **kwargs): ... def storage_uri(uri_str, default_scheme: str = ..., debug: int = ..., validate: bool = ..., bucket_storage_uri_class: Any = ..., suppress_consec_slashes: bool = ..., is_latest: bool = ...): ... def storage_uri_for_key(key): ... mypy-0.560/typeshed/third_party/2and3/boto/auth.pyi0000644€tŠÔÚ€2›s®0000001050413215007212026401 0ustar jukkaDROPBOX\Domain Users00000000000000from typing import Any, Optional from boto.auth_handler import AuthHandler SIGV4_DETECT = ... # type: Any class HmacKeys: host = ... # type: Any def __init__(self, host, config, provider) -> None: ... def update_provider(self, provider): ... def algorithm(self): ... def sign_string(self, string_to_sign): ... class AnonAuthHandler(AuthHandler, HmacKeys): capability = ... # type: Any def __init__(self, host, config, provider) -> None: ... def add_auth(self, http_request, **kwargs): ... class HmacAuthV1Handler(AuthHandler, HmacKeys): capability = ... # type: Any def __init__(self, host, config, provider) -> None: ... def update_provider(self, provider): ... def add_auth(self, http_request, **kwargs): ... class HmacAuthV2Handler(AuthHandler, HmacKeys): capability = ... # type: Any def __init__(self, host, config, provider) -> None: ... def update_provider(self, provider): ... def add_auth(self, http_request, **kwargs): ... class HmacAuthV3Handler(AuthHandler, HmacKeys): capability = ... # type: Any def __init__(self, host, config, provider) -> None: ... def add_auth(self, http_request, **kwargs): ... class HmacAuthV3HTTPHandler(AuthHandler, HmacKeys): capability = ... # type: Any def __init__(self, host, config, provider) -> None: ... def headers_to_sign(self, http_request): ... def canonical_headers(self, headers_to_sign): ... def string_to_sign(self, http_request): ... def add_auth(self, req, **kwargs): ... class HmacAuthV4Handler(AuthHandler, HmacKeys): capability = ... # type: Any service_name = ... # type: Any region_name = ... # type: Any def __init__(self, host, config, provider, service_name: Optional[Any] = ..., region_name: Optional[Any] = ...) -> None: ... def headers_to_sign(self, http_request): ... def host_header(self, host, http_request): ... def query_string(self, http_request): ... def canonical_query_string(self, http_request): ... def canonical_headers(self, headers_to_sign): ... def signed_headers(self, headers_to_sign): ... def canonical_uri(self, http_request): ... def payload(self, http_request): ... def canonical_request(self, http_request): ... def scope(self, http_request): ... def split_host_parts(self, host): ... def determine_region_name(self, host): ... def determine_service_name(self, host): ... def credential_scope(self, http_request): ... def string_to_sign(self, http_request, canonical_request): ... def signature(self, http_request, string_to_sign): ... def add_auth(self, req, **kwargs): ... class S3HmacAuthV4Handler(HmacAuthV4Handler, AuthHandler): capability = ... # type: Any region_name = ... # type: Any def __init__(self, *args, **kwargs) -> None: ... def clean_region_name(self, region_name): ... def canonical_uri(self, http_request): ... def canonical_query_string(self, http_request): ... def host_header(self, host, http_request): ... def headers_to_sign(self, http_request): ... def determine_region_name(self, host): ... def determine_service_name(self, host): ... def mangle_path_and_params(self, req): ... def payload(self, http_request): ... def add_auth(self, req, **kwargs): ... def presign(self, req, expires, iso_date: Optional[Any] = ...): ... class STSAnonHandler(AuthHandler): capability = ... # type: Any def add_auth(self, http_request, **kwargs): ... class QuerySignatureHelper(HmacKeys): def add_auth(self, http_request, **kwargs): ... class QuerySignatureV0AuthHandler(QuerySignatureHelper, AuthHandler): SignatureVersion = ... # type: int capability = ... # type: Any class QuerySignatureV1AuthHandler(QuerySignatureHelper, AuthHandler): SignatureVersion = ... # type: int capability = ... # type: Any def __init__(self, *args, **kw) -> None: ... class QuerySignatureV2AuthHandler(QuerySignatureHelper, AuthHandler): SignatureVersion = ... # type: int capability = ... # type: Any class POSTPathQSV2AuthHandler(QuerySignatureV2AuthHandler, AuthHandler): capability = ... # type: Any def add_auth(self, req, **kwargs): ... def get_auth_handler(host, config, provider, requested_capability: Optional[Any] = ...): ... def detect_potential_sigv4(func): ... def detect_potential_s3sigv4(func): ... mypy-0.560/typeshed/third_party/2and3/boto/auth_handler.pyi0000644€tŠÔÚ€2›s®0000000041013215007212030071 0ustar jukkaDROPBOX\Domain Users00000000000000from typing import Any from boto.plugin import Plugin class NotReadyToAuthenticate(Exception): ... class AuthHandler(Plugin): capability = ... # type: Any def __init__(self, host, config, provider) -> None: ... def add_auth(self, http_request): ... mypy-0.560/typeshed/third_party/2and3/boto/compat.pyi0000644€tŠÔÚ€2›s®0000000040613215007212026723 0ustar jukkaDROPBOX\Domain Users00000000000000from typing import Any from base64 import encodestring as encodebytes from six.moves import http_client expanduser = ... # type: Any StandardError = ... # type: Any long_type = ... # type: Any unquote_str = ... # type: Any parse_qs_safe = ... # type: Any mypy-0.560/typeshed/third_party/2and3/boto/connection.pyi0000644€tŠÔÚ€2›s®0000001344213215007212027603 0ustar jukkaDROPBOX\Domain Users00000000000000from typing import Any, Dict, Optional, Text from six.moves import http_client HAVE_HTTPS_CONNECTION = ... # type: bool ON_APP_ENGINE = ... # type: Any PORTS_BY_SECURITY = ... # type: Any DEFAULT_CA_CERTS_FILE = ... # type: Any class HostConnectionPool: queue = ... # type: Any def __init__(self) -> None: ... def size(self): ... def put(self, conn): ... def get(self): ... def clean(self): ... class ConnectionPool: CLEAN_INTERVAL = ... # type: float STALE_DURATION = ... # type: float host_to_pool = ... # type: Any last_clean_time = ... # type: float mutex = ... # type: Any def __init__(self) -> None: ... def size(self): ... def get_http_connection(self, host, port, is_secure): ... def put_http_connection(self, host, port, is_secure, conn): ... def clean(self): ... class HTTPRequest: method = ... # type: Any protocol = ... # type: Any host = ... # type: Any port = ... # type: Any path = ... # type: Any auth_path = ... # type: Any params = ... # type: Any headers = ... # type: Any body = ... # type: Any def __init__(self, method, protocol, host, port, path, auth_path, params, headers, body) -> None: ... def authorize(self, connection, **kwargs): ... class HTTPResponse(http_client.HTTPResponse): def __init__(self, *args, **kwargs) -> None: ... def read(self, amt: Optional[Any] = ...): ... class AWSAuthConnection: suppress_consec_slashes = ... # type: Any num_retries = ... # type: int is_secure = ... # type: Any https_validate_certificates = ... # type: Any ca_certificates_file = ... # type: Any port = ... # type: Any http_exceptions = ... # type: Any http_unretryable_exceptions = ... # type: Any socket_exception_values = ... # type: Any https_connection_factory = ... # type: Any protocol = ... # type: str host = ... # type: Any path = ... # type: Any debug = ... # type: Any host_header = ... # type: Any http_connection_kwargs = ... # type: Any provider = ... # type: Any auth_service_name = ... # type: Any request_hook = ... # type: Any def __init__(self, host, aws_access_key_id: Optional[Any] = ..., aws_secret_access_key: Optional[Any] = ..., is_secure: bool = ..., port: Optional[Any] = ..., proxy: Optional[Any] = ..., proxy_port: Optional[Any] = ..., proxy_user: Optional[Any] = ..., proxy_pass: Optional[Any] = ..., debug: int = ..., https_connection_factory: Optional[Any] = ..., path: str = ..., provider: str = ..., security_token: Optional[Any] = ..., suppress_consec_slashes: bool = ..., validate_certs: bool = ..., profile_name: Optional[Any] = ...) -> None: ... auth_region_name = ... # type: Any @property def connection(self): ... @property def aws_access_key_id(self): ... @property def gs_access_key_id(self): ... # type: Any access_key = ... # type: Any @property def aws_secret_access_key(self): ... @property def gs_secret_access_key(self): ... secret_key = ... # type: Any @property def profile_name(self): ... def get_path(self, path: str = ...): ... def server_name(self, port: Optional[Any] = ...): ... proxy = ... # type: Any proxy_port = ... # type: Any proxy_user = ... # type: Any proxy_pass = ... # type: Any no_proxy = ... # type: Any use_proxy = ... # type: Any def handle_proxy(self, proxy, proxy_port, proxy_user, proxy_pass): ... def get_http_connection(self, host, port, is_secure): ... def skip_proxy(self, host): ... def new_http_connection(self, host, port, is_secure): ... def put_http_connection(self, host, port, is_secure, connection): ... def proxy_ssl(self, host: Optional[Any] = ..., port: Optional[Any] = ...): ... def prefix_proxy_to_path(self, path, host: Optional[Any] = ...): ... def get_proxy_auth_header(self): ... def get_proxy_url_with_auth(self): ... def set_host_header(self, request): ... def set_request_hook(self, hook): ... def build_base_http_request(self, method, path, auth_path, params: Optional[Any] = ..., headers: Optional[Any] = ..., data: str = ..., host: Optional[Any] = ...): ... def make_request(self, method, path, headers: Optional[Any] = ..., data: str = ..., host: Optional[Any] = ..., auth_path: Optional[Any] = ..., sender: Optional[Any] = ..., override_num_retries: Optional[Any] = ..., params: Optional[Any] = ..., retry_handler: Optional[Any] = ...): ... def close(self): ... class AWSQueryConnection(AWSAuthConnection): APIVersion = ... # type: str ResponseError = ... # type: Any def __init__(self, aws_access_key_id: Optional[Any] = ..., aws_secret_access_key: Optional[Any] = ..., is_secure: bool = ..., port: Optional[Any] = ..., proxy: Optional[Any] = ..., proxy_port: Optional[Any] = ..., proxy_user: Optional[Any] = ..., proxy_pass: Optional[Any] = ..., host: Optional[Any] = ..., debug: int = ..., https_connection_factory: Optional[Any] = ..., path: str = ..., security_token: Optional[Any] = ..., validate_certs: bool = ..., profile_name: Optional[Any] = ..., provider: str = ...) -> None: ... def get_utf8_value(self, value): ... def make_request(self, action, params: Optional[Any] = ..., path: str = ..., verb: str = ..., *args, **kwargs): ... # type: ignore # https://github.com/python/mypy/issues/1237 def build_list_params(self, params, items, label): ... def build_complex_list_params(self, params, items, label, names): ... def get_list(self, action, params, markers, path: str = ..., parent: Optional[Any] = ..., verb: str = ...): ... def get_object(self, action, params, cls, path: str = ..., parent: Optional[Any] = ..., verb: str = ...): ... def get_status(self, action, params, path: str = ..., parent: Optional[Any] = ..., verb: str = ...): ... mypy-0.560/typeshed/third_party/2and3/boto/ec2/0000755€tŠÔÚ€2›s®0000000000013215007244025373 5ustar jukkaDROPBOX\Domain Users00000000000000mypy-0.560/typeshed/third_party/2and3/boto/ec2/__init__.pyi0000644€tŠÔÚ€2›s®0000000027013215007212027647 0ustar jukkaDROPBOX\Domain Users00000000000000from typing import Any RegionData = ... # type: Any def regions(**kw_params): ... def connect_to_region(region_name, **kw_params): ... def get_region(region_name, **kw_params): ... mypy-0.560/typeshed/third_party/2and3/boto/elb/0000755€tŠÔÚ€2›s®0000000000013215007244025464 5ustar jukkaDROPBOX\Domain Users00000000000000mypy-0.560/typeshed/third_party/2and3/boto/elb/__init__.pyi0000644€tŠÔÚ€2›s®0000000476513215007212027755 0ustar jukkaDROPBOX\Domain Users00000000000000from typing import Any from boto.connection import AWSQueryConnection RegionData = ... # type: Any def regions(): ... def connect_to_region(region_name, **kw_params): ... class ELBConnection(AWSQueryConnection): APIVersion = ... # type: Any DefaultRegionName = ... # type: Any DefaultRegionEndpoint = ... # type: Any region = ... # type: Any def __init__(self, aws_access_key_id=..., aws_secret_access_key=..., is_secure=..., port=..., proxy=..., proxy_port=..., proxy_user=..., proxy_pass=..., debug=..., https_connection_factory=..., region=..., path=..., security_token=..., validate_certs=..., profile_name=...) -> None: ... def build_list_params(self, params, items, label): ... def get_all_load_balancers(self, load_balancer_names=..., marker=...): ... def create_load_balancer(self, name, zones, listeners=..., subnets=..., security_groups=..., scheme=..., complex_listeners=...): ... def create_load_balancer_listeners(self, name, listeners=..., complex_listeners=...): ... def delete_load_balancer(self, name): ... def delete_load_balancer_listeners(self, name, ports): ... def enable_availability_zones(self, load_balancer_name, zones_to_add): ... def disable_availability_zones(self, load_balancer_name, zones_to_remove): ... def modify_lb_attribute(self, load_balancer_name, attribute, value): ... def get_all_lb_attributes(self, load_balancer_name): ... def get_lb_attribute(self, load_balancer_name, attribute): ... def register_instances(self, load_balancer_name, instances): ... def deregister_instances(self, load_balancer_name, instances): ... def describe_instance_health(self, load_balancer_name, instances=...): ... def configure_health_check(self, name, health_check): ... def set_lb_listener_SSL_certificate(self, lb_name, lb_port, ssl_certificate_id): ... def create_app_cookie_stickiness_policy(self, name, lb_name, policy_name): ... def create_lb_cookie_stickiness_policy(self, cookie_expiration_period, lb_name, policy_name): ... def create_lb_policy(self, lb_name, policy_name, policy_type, policy_attributes): ... def delete_lb_policy(self, lb_name, policy_name): ... def set_lb_policies_of_listener(self, lb_name, lb_port, policies): ... def set_lb_policies_of_backend_server(self, lb_name, instance_port, policies): ... def apply_security_groups_to_lb(self, name, security_groups): ... def attach_lb_to_subnets(self, name, subnets): ... def detach_lb_from_subnets(self, name, subnets): ... mypy-0.560/typeshed/third_party/2and3/boto/exception.pyi0000644€tŠÔÚ€2›s®0000001210013215007212027430 0ustar jukkaDROPBOX\Domain Users00000000000000from typing import Any, Optional from boto.compat import StandardError class BotoClientError(StandardError): reason = ... # type: Any def __init__(self, reason, *args) -> None: ... class SDBPersistenceError(StandardError): ... class StoragePermissionsError(BotoClientError): ... class S3PermissionsError(StoragePermissionsError): ... class GSPermissionsError(StoragePermissionsError): ... class BotoServerError(StandardError): status = ... # type: Any reason = ... # type: Any body = ... # type: Any request_id = ... # type: Any error_code = ... # type: Any message = ... # type: str box_usage = ... # type: Any def __init__(self, status, reason, body: Optional[Any] = ..., *args) -> None: ... def __getattr__(self, name): ... def __setattr__(self, name, value): ... def startElement(self, name, attrs, connection): ... def endElement(self, name, value, connection): ... class ConsoleOutput: parent = ... # type: Any instance_id = ... # type: Any timestamp = ... # type: Any comment = ... # type: Any output = ... # type: Any def __init__(self, parent: Optional[Any] = ...) -> None: ... def startElement(self, name, attrs, connection): ... def endElement(self, name, value, connection): ... class StorageCreateError(BotoServerError): bucket = ... # type: Any def __init__(self, status, reason, body: Optional[Any] = ...) -> None: ... def endElement(self, name, value, connection): ... class S3CreateError(StorageCreateError): ... class GSCreateError(StorageCreateError): ... class StorageCopyError(BotoServerError): ... class S3CopyError(StorageCopyError): ... class GSCopyError(StorageCopyError): ... class SQSError(BotoServerError): detail = ... # type: Any type = ... # type: Any def __init__(self, status, reason, body: Optional[Any] = ...) -> None: ... def startElement(self, name, attrs, connection): ... def endElement(self, name, value, connection): ... class SQSDecodeError(BotoClientError): message = ... # type: Any def __init__(self, reason, message) -> None: ... class StorageResponseError(BotoServerError): resource = ... # type: Any def __init__(self, status, reason, body: Optional[Any] = ...) -> None: ... def startElement(self, name, attrs, connection): ... def endElement(self, name, value, connection): ... class S3ResponseError(StorageResponseError): ... class GSResponseError(StorageResponseError): ... class EC2ResponseError(BotoServerError): errors = ... # type: Any def __init__(self, status, reason, body: Optional[Any] = ...) -> None: ... def startElement(self, name, attrs, connection): ... request_id = ... # type: Any def endElement(self, name, value, connection): ... class JSONResponseError(BotoServerError): status = ... # type: Any reason = ... # type: Any body = ... # type: Any error_message = ... # type: Any error_code = ... # type: Any def __init__(self, status, reason, body: Optional[Any] = ..., *args) -> None: ... class DynamoDBResponseError(JSONResponseError): ... class SWFResponseError(JSONResponseError): ... class EmrResponseError(BotoServerError): ... class _EC2Error: connection = ... # type: Any error_code = ... # type: Any error_message = ... # type: Any def __init__(self, connection: Optional[Any] = ...) -> None: ... def startElement(self, name, attrs, connection): ... def endElement(self, name, value, connection): ... class SDBResponseError(BotoServerError): ... class AWSConnectionError(BotoClientError): ... class StorageDataError(BotoClientError): ... class S3DataError(StorageDataError): ... class GSDataError(StorageDataError): ... class InvalidUriError(Exception): message = ... # type: Any def __init__(self, message) -> None: ... class InvalidAclError(Exception): message = ... # type: Any def __init__(self, message) -> None: ... class InvalidCorsError(Exception): message = ... # type: Any def __init__(self, message) -> None: ... class NoAuthHandlerFound(Exception): ... class InvalidLifecycleConfigError(Exception): message = ... # type: Any def __init__(self, message) -> None: ... class ResumableTransferDisposition: START_OVER = ... # type: str WAIT_BEFORE_RETRY = ... # type: str ABORT_CUR_PROCESS = ... # type: str ABORT = ... # type: str class ResumableUploadException(Exception): message = ... # type: Any disposition = ... # type: Any def __init__(self, message, disposition) -> None: ... class ResumableDownloadException(Exception): message = ... # type: Any disposition = ... # type: Any def __init__(self, message, disposition) -> None: ... class TooManyRecordsException(Exception): message = ... # type: Any def __init__(self, message) -> None: ... class PleaseRetryException(Exception): message = ... # type: Any response = ... # type: Any def __init__(self, message, response: Optional[Any] = ...) -> None: ... class InvalidInstanceMetadataError(Exception): MSG = ... # type: str def __init__(self, msg) -> None: ... mypy-0.560/typeshed/third_party/2and3/boto/kms/0000755€tŠÔÚ€2›s®0000000000013215007244025514 5ustar jukkaDROPBOX\Domain Users00000000000000mypy-0.560/typeshed/third_party/2and3/boto/kms/__init__.pyi0000644€tŠÔÚ€2›s®0000000022113215007212027764 0ustar jukkaDROPBOX\Domain Users00000000000000from typing import List import boto def regions() -> List[boto.regioninfo.RegionInfo]: ... def connect_to_region(region_name, **kw_params): ... mypy-0.560/typeshed/third_party/2and3/boto/kms/exceptions.pyi0000644€tŠÔÚ€2›s®0000000147513215007212030422 0ustar jukkaDROPBOX\Domain Users00000000000000from boto.exception import BotoServerError class InvalidGrantTokenException(BotoServerError): ... class DisabledException(BotoServerError): ... class LimitExceededException(BotoServerError): ... class DependencyTimeoutException(BotoServerError): ... class InvalidMarkerException(BotoServerError): ... class AlreadyExistsException(BotoServerError): ... class InvalidCiphertextException(BotoServerError): ... class KeyUnavailableException(BotoServerError): ... class InvalidAliasNameException(BotoServerError): ... class UnsupportedOperationException(BotoServerError): ... class InvalidArnException(BotoServerError): ... class KMSInternalException(BotoServerError): ... class InvalidKeyUsageException(BotoServerError): ... class MalformedPolicyDocumentException(BotoServerError): ... class NotFoundException(BotoServerError): ... mypy-0.560/typeshed/third_party/2and3/boto/kms/layer1.pyi0000644€tŠÔÚ€2›s®0000000721713215007212027436 0ustar jukkaDROPBOX\Domain Users00000000000000from typing import Any, Dict, List, Mapping, Optional, Type from boto.connection import AWSQueryConnection class KMSConnection(AWSQueryConnection): APIVersion = ... # type: str DefaultRegionName = ... # type: str DefaultRegionEndpoint = ... # type: str ServiceName = ... # type: str TargetPrefix = ... # type: str ResponseError = ... # type: Type[Exception] region = ... # type: Any def __init__(self, **kwargs) -> None: ... def create_alias(self, alias_name: str, target_key_id: str) -> Optional[Dict[str, Any]]: ... def create_grant(self, key_id: str, grantee_principal: str, retiring_principal: Optional[str] = ..., operations: Optional[List[str]] = ..., constraints: Optional[Dict[str, Dict[str, str]]] = ..., grant_tokens: Optional[List[str]] = ...) -> Optional[Dict[str, Any]]: ... def create_key(self, policy: Optional[str] = ..., description: Optional[str] = ..., key_usage: Optional[str] = ...) -> Optional[Dict[str, Any]]: ... def decrypt(self, ciphertext_blob: bytes, encryption_context: Optional[Mapping[str, Any]] = ..., grant_tokens: Optional[List[str]] = ...) -> Optional[Dict[str, Any]]: ... def delete_alias(self, alias_name: str) -> Optional[Dict[str, Any]]: ... def describe_key(self, key_id: str) -> Optional[Dict[str, Any]]: ... def disable_key(self, key_id: str) -> Optional[Dict[str, Any]]: ... def disable_key_rotation(self, key_id: str) -> Optional[Dict[str, Any]]: ... def enable_key(self, key_id: str) -> Optional[Dict[str, Any]]: ... def enable_key_rotation(self, key_id: str) -> Optional[Dict[str, Any]]: ... def encrypt(self, key_id: str, plaintext: bytes, encryption_context: Optional[Mapping[str, Any]] = ..., grant_tokens: Optional[List[str]] = ...) -> Optional[Dict[str, Any]]: ... def generate_data_key(self, key_id: str, encryption_context: Optional[Mapping[str, Any]] = ..., number_of_bytes: Optional[int] = ..., key_spec: Optional[str] = ..., grant_tokens: Optional[List[str]] = ...) -> Optional[Dict[str, Any]]: ... def generate_data_key_without_plaintext(self, key_id: str, encryption_context: Optional[Mapping[str, Any]] = ..., key_spec: Optional[str] = ..., number_of_bytes: Optional[int] = ..., grant_tokens: Optional[List[str]] = ...) -> Optional[Dict[str, Any]]: ... def generate_random(self, number_of_bytes: Optional[int] = ...) -> Optional[Dict[str, Any]]: ... def get_key_policy(self, key_id: str, policy_name: str) -> Optional[Dict[str, Any]]: ... def get_key_rotation_status(self, key_id: str) -> Optional[Dict[str, Any]]: ... def list_aliases(self, limit: Optional[int] = ..., marker: Optional[str] = ...) -> Optional[Dict[str, Any]]: ... def list_grants(self, key_id: str, limit: Optional[int] = ..., marker: Optional[str] = ...) -> Optional[Dict[str, Any]]: ... def list_key_policies(self, key_id: str, limit: Optional[int] = ..., marker: Optional[str] = ...) -> Optional[Dict[str, Any]]: ... def list_keys(self, limit: Optional[int] = ..., marker: Optional[str] = ...) -> Optional[Dict[str, Any]]: ... def put_key_policy(self, key_id: str, policy_name: str, policy: str) -> Optional[Dict[str, Any]]: ... def re_encrypt(self, ciphertext_blob: bytes, destination_key_id: str, source_encryption_context: Optional[Mapping[str, Any]] = ..., destination_encryption_context: Optional[Mapping[str, Any]] = ..., grant_tokens: Optional[List[str]] = ...) -> Optional[Dict[str, Any]]: ... def retire_grant(self, grant_token: str) -> Optional[Dict[str, Any]]: ... def revoke_grant(self, key_id: str, grant_id: str) -> Optional[Dict[str, Any]]: ... def update_key_description(self, key_id: str, description: str) -> Optional[Dict[str, Any]]: ... mypy-0.560/typeshed/third_party/2and3/boto/plugin.pyi0000644€tŠÔÚ€2›s®0000000037113215007212026737 0ustar jukkaDROPBOX\Domain Users00000000000000from typing import Any, Optional class Plugin: capability = ... # type: Any @classmethod def is_capable(cls, requested_capability): ... def get_plugin(cls, requested_capability: Optional[Any] = ...): ... def load_plugins(config): ... mypy-0.560/typeshed/third_party/2and3/boto/regioninfo.pyi0000644€tŠÔÚ€2›s®0000000130613215007212027577 0ustar jukkaDROPBOX\Domain Users00000000000000from typing import Any, Optional def load_endpoint_json(path): ... def merge_endpoints(defaults, additions): ... def load_regions(): ... def get_regions(service_name, region_cls: Optional[Any] = ..., connection_cls: Optional[Any] = ...): ... class RegionInfo: connection = ... # type: Any name = ... # type: Any endpoint = ... # type: Any connection_cls = ... # type: Any def __init__(self, connection: Optional[Any] = ..., name: Optional[Any] = ..., endpoint: Optional[Any] = ..., connection_cls: Optional[Any] = ...) -> None: ... def startElement(self, name, attrs, connection): ... def endElement(self, name, value, connection): ... def connect(self, **kw_params): ... mypy-0.560/typeshed/third_party/2and3/boto/s3/0000755€tŠÔÚ€2›s®0000000000013215007244025247 5ustar jukkaDROPBOX\Domain Users00000000000000mypy-0.560/typeshed/third_party/2and3/boto/s3/__init__.pyi0000644€tŠÔÚ€2›s®0000000075713215007212027535 0ustar jukkaDROPBOX\Domain Users00000000000000from typing import Optional from .connection import S3Connection from boto.connection import AWSAuthConnection from boto.regioninfo import RegionInfo from typing import List, Type, Text class S3RegionInfo(RegionInfo): def connect(self, name: Optional[Text] = ..., endpoint: Optional[str] = ..., connection_cls: Optional[Type[AWSAuthConnection]] = ..., **kw_params) -> S3Connection: ... def regions() -> List[S3RegionInfo]: ... def connect_to_region(region_name: Text, **kw_params): ... mypy-0.560/typeshed/third_party/2and3/boto/s3/acl.pyi0000644€tŠÔÚ€2›s®0000000343213215007212026526 0ustar jukkaDROPBOX\Domain Users00000000000000from .connection import S3Connection from .user import User from typing import Any, Dict, Optional, List, Text, Union CannedACLStrings = ... # type: List[str] class Policy: parent = ... # type: Any namespace = ... # type: Any acl = ... # type: ACL def __init__(self, parent: Optional[Any] = ...) -> None: ... owner = ... # type: User def startElement(self, name: Text, attrs: Dict[str, Any], connection: S3Connection) -> Union[None, User, ACL]: ... def endElement(self, name: Text, value: Any, connection: S3Connection) -> None: ... def to_xml(self) -> str: ... class ACL: policy = ... # type: Policy grants = ... # type: List[Grant] def __init__(self, policy: Optional[Policy] = ...) -> None: ... def add_grant(self, grant: Grant) -> None: ... def add_email_grant(self, permission: Text, email_address: Text) -> None: ... def add_user_grant(self, permission: Text, user_id: Text, display_name: Optional[Text] = ...) -> None: ... def startElement(self, name, attrs, connection): ... def endElement(self, name: Text, value: Any, connection: S3Connection) -> None: ... def to_xml(self) -> str: ... class Grant: NameSpace = ... # type: Text permission = ... # type: Text id = ... # type: Text display_name = ... # type: Text uri = ... # type: Text email_address = ... # type: Text type = ... # type: Text def __init__(self, permission: Optional[Text] = ..., type: Optional[Text] = ..., id: Optional[Text] = ..., display_name: Optional[Text] = ..., uri: Optional[Text] = ..., email_address: Optional[Text] = ...) -> None: ... def startElement(self, name, attrs, connection): ... def endElement(self, name: Text, value: Any, connection: S3Connection) -> None: ... def to_xml(self) -> str: ... mypy-0.560/typeshed/third_party/2and3/boto/s3/bucket.pyi0000644€tŠÔÚ€2›s®0000001777013215007212027256 0ustar jukkaDROPBOX\Domain Users00000000000000from .bucketlistresultset import BucketListResultSet from .connection import S3Connection from .key import Key from typing import Any, Dict, Optional, Text, Type, List class S3WebsiteEndpointTranslate: trans_region = ... # type: Dict[str, str] @classmethod def translate_region(self, reg: Text) -> str: ... S3Permissions = ... # type: List[str] class Bucket: LoggingGroup = ... # type: str BucketPaymentBody = ... # type: str VersioningBody = ... # type: str VersionRE = ... # type: str MFADeleteRE = ... # type: str name = ... # type: Text connection = ... # type: S3Connection key_class = ... # type: Type[Key] def __init__(self, connection: Optional[S3Connection] = ..., name: Optional[Text] = ..., key_class: Type[Key] = ...) -> None: ... def __iter__(self): ... def __contains__(self, key_name) -> bool: ... def startElement(self, name, attrs, connection): ... creation_date = ... # type: Any def endElement(self, name, value, connection): ... def set_key_class(self, key_class): ... def lookup(self, key_name, headers: Optional[Dict[Text, Text]] = ...): ... def get_key(self, key_name, headers: Optional[Dict[Text, Text]] = ..., version_id: Optional[Any] = ..., response_headers: Optional[Dict[Text, Text]] = ..., validate: bool = ...) -> Key: ... def list(self, prefix: Text = ..., delimiter: Text = ..., marker: Text = ..., headers: Optional[Dict[Text, Text]] = ..., encoding_type: Optional[Any] = ...) -> BucketListResultSet: ... def list_versions(self, prefix: str = ..., delimiter: str = ..., key_marker: str = ..., version_id_marker: str = ..., headers: Optional[Dict[Text, Text]] = ..., encoding_type: Optional[Text] = ...) -> BucketListResultSet: ... def list_multipart_uploads(self, key_marker: str = ..., upload_id_marker: str = ..., headers: Optional[Dict[Text, Text]] = ..., encoding_type: Optional[Any] = ...): ... def validate_kwarg_names(self, kwargs, names): ... def get_all_keys(self, headers: Optional[Dict[Text, Text]] = ..., **params): ... def get_all_versions(self, headers: Optional[Dict[Text, Text]] = ..., **params): ... def validate_get_all_versions_params(self, params): ... def get_all_multipart_uploads(self, headers: Optional[Dict[Text, Text]] = ..., **params): ... def new_key(self, key_name: Optional[Any] = ...): ... def generate_url(self, expires_in, method: str = ..., headers: Optional[Dict[Text, Text]] = ..., force_http: bool = ..., response_headers: Optional[Dict[Text, Text]] = ..., expires_in_absolute: bool = ...): ... def delete_keys(self, keys, quiet: bool = ..., mfa_token: Optional[Any] = ..., headers: Optional[Dict[Text, Text]] = ...): ... def delete_key(self, key_name, headers: Optional[Dict[Text, Text]] = ..., version_id: Optional[Any] = ..., mfa_token: Optional[Any] = ...): ... def copy_key(self, new_key_name, src_bucket_name, src_key_name, metadata: Optional[Any] = ..., src_version_id: Optional[Any] = ..., storage_class: str = ..., preserve_acl: bool = ..., encrypt_key: bool = ..., headers: Optional[Dict[Text, Text]] = ..., query_args: Optional[Any] = ...): ... def set_canned_acl(self, acl_str, key_name: str = ..., headers: Optional[Dict[Text, Text]] = ..., version_id: Optional[Any] = ...): ... def get_xml_acl(self, key_name: str = ..., headers: Optional[Dict[Text, Text]] = ..., version_id: Optional[Any] = ...): ... def set_xml_acl(self, acl_str, key_name: str = ..., headers: Optional[Dict[Text, Text]] = ..., version_id: Optional[Any] = ..., query_args: str = ...): ... def set_acl(self, acl_or_str, key_name: str = ..., headers: Optional[Dict[Text, Text]] = ..., version_id: Optional[Any] = ...): ... def get_acl(self, key_name: str = ..., headers: Optional[Dict[Text, Text]] = ..., version_id: Optional[Any] = ...): ... def set_subresource(self, subresource, value, key_name: str = ..., headers: Optional[Dict[Text, Text]] = ..., version_id: Optional[Any] = ...): ... def get_subresource(self, subresource, key_name: str = ..., headers: Optional[Dict[Text, Text]] = ..., version_id: Optional[Any] = ...): ... def make_public(self, recursive: bool = ..., headers: Optional[Dict[Text, Text]] = ...): ... def add_email_grant(self, permission, email_address, recursive: bool = ..., headers: Optional[Dict[Text, Text]] = ...): ... def add_user_grant(self, permission, user_id, recursive: bool = ..., headers: Optional[Dict[Text, Text]] = ..., display_name: Optional[Any] = ...): ... def list_grants(self, headers: Optional[Dict[Text, Text]] = ...): ... def get_location(self): ... def set_xml_logging(self, logging_str, headers: Optional[Dict[Text, Text]] = ...): ... def enable_logging(self, target_bucket, target_prefix: str = ..., grants: Optional[Any] = ..., headers: Optional[Dict[Text, Text]] = ...): ... def disable_logging(self, headers: Optional[Dict[Text, Text]] = ...): ... def get_logging_status(self, headers: Optional[Dict[Text, Text]] = ...): ... def set_as_logging_target(self, headers: Optional[Dict[Text, Text]] = ...): ... def get_request_payment(self, headers: Optional[Dict[Text, Text]] = ...): ... def set_request_payment(self, payer: str = ..., headers: Optional[Dict[Text, Text]] = ...): ... def configure_versioning(self, versioning, mfa_delete: bool = ..., mfa_token: Optional[Any] = ..., headers: Optional[Dict[Text, Text]] = ...): ... def get_versioning_status(self, headers: Optional[Dict[Text, Text]] = ...): ... def configure_lifecycle(self, lifecycle_config, headers: Optional[Dict[Text, Text]] = ...): ... def get_lifecycle_config(self, headers: Optional[Dict[Text, Text]] = ...): ... def delete_lifecycle_configuration(self, headers: Optional[Dict[Text, Text]] = ...): ... def configure_website(self, suffix: Optional[Any] = ..., error_key: Optional[Any] = ..., redirect_all_requests_to: Optional[Any] = ..., routing_rules: Optional[Any] = ..., headers: Optional[Dict[Text, Text]] = ...): ... def set_website_configuration(self, config, headers: Optional[Dict[Text, Text]] = ...): ... def set_website_configuration_xml(self, xml, headers: Optional[Dict[Text, Text]] = ...): ... def get_website_configuration(self, headers: Optional[Dict[Text, Text]] = ...): ... def get_website_configuration_obj(self, headers: Optional[Dict[Text, Text]] = ...): ... def get_website_configuration_with_xml(self, headers: Optional[Dict[Text, Text]] = ...): ... def get_website_configuration_xml(self, headers: Optional[Dict[Text, Text]] = ...): ... def delete_website_configuration(self, headers: Optional[Dict[Text, Text]] = ...): ... def get_website_endpoint(self): ... def get_policy(self, headers: Optional[Dict[Text, Text]] = ...): ... def set_policy(self, policy, headers: Optional[Dict[Text, Text]] = ...): ... def delete_policy(self, headers: Optional[Dict[Text, Text]] = ...): ... def set_cors_xml(self, cors_xml, headers: Optional[Dict[Text, Text]] = ...): ... def set_cors(self, cors_config, headers: Optional[Dict[Text, Text]] = ...): ... def get_cors_xml(self, headers: Optional[Dict[Text, Text]] = ...): ... def get_cors(self, headers: Optional[Dict[Text, Text]] = ...): ... def delete_cors(self, headers: Optional[Dict[Text, Text]] = ...): ... def initiate_multipart_upload(self, key_name, headers: Optional[Dict[Text, Text]] = ..., reduced_redundancy: bool = ..., metadata: Optional[Any] = ..., encrypt_key: bool = ..., policy: Optional[Any] = ...): ... def complete_multipart_upload(self, key_name, upload_id, xml_body, headers: Optional[Dict[Text, Text]] = ...): ... def cancel_multipart_upload(self, key_name, upload_id, headers: Optional[Dict[Text, Text]] = ...): ... def delete(self, headers: Optional[Dict[Text, Text]] = ...): ... def get_tags(self): ... def get_xml_tags(self): ... def set_xml_tags(self, tag_str, headers: Optional[Dict[Text, Text]] = ..., query_args: str = ...): ... def set_tags(self, tags, headers: Optional[Dict[Text, Text]] = ...): ... def delete_tags(self, headers: Optional[Dict[Text, Text]] = ...): ... mypy-0.560/typeshed/third_party/2and3/boto/s3/bucketlistresultset.pyi0000644€tŠÔÚ€2›s®0000000400713215007212032112 0ustar jukkaDROPBOX\Domain Users00000000000000from .bucket import Bucket from .key import Key from typing import Any, Iterable, Iterator, Optional def bucket_lister(bucket, prefix: str = ..., delimiter: str = ..., marker: str = ..., headers: Optional[Any] = ..., encoding_type: Optional[Any] = ...): ... class BucketListResultSet(Iterable[Key]): bucket = ... # type: Any prefix = ... # type: Any delimiter = ... # type: Any marker = ... # type: Any headers = ... # type: Any encoding_type = ... # type: Any def __init__(self, bucket: Optional[Any] = ..., prefix: str = ..., delimiter: str = ..., marker: str = ..., headers: Optional[Any] = ..., encoding_type: Optional[Any] = ...) -> None: ... def __iter__(self) -> Iterator[Key]: ... def versioned_bucket_lister(bucket, prefix: str = ..., delimiter: str = ..., key_marker: str = ..., version_id_marker: str = ..., headers: Optional[Any] = ..., encoding_type: Optional[Any] = ...): ... class VersionedBucketListResultSet: bucket = ... # type: Any prefix = ... # type: Any delimiter = ... # type: Any key_marker = ... # type: Any version_id_marker = ... # type: Any headers = ... # type: Any encoding_type = ... # type: Any def __init__(self, bucket: Optional[Any] = ..., prefix: str = ..., delimiter: str = ..., key_marker: str = ..., version_id_marker: str = ..., headers: Optional[Any] = ..., encoding_type: Optional[Any] = ...) -> None: ... def __iter__(self) -> Iterator[Key]: ... def multipart_upload_lister(bucket, key_marker: str = ..., upload_id_marker: str = ..., headers: Optional[Any] = ..., encoding_type: Optional[Any] = ...): ... class MultiPartUploadListResultSet: bucket = ... # type: Any key_marker = ... # type: Any upload_id_marker = ... # type: Any headers = ... # type: Any encoding_type = ... # type: Any def __init__(self, bucket: Optional[Any] = ..., key_marker: str = ..., upload_id_marker: str = ..., headers: Optional[Any] = ..., encoding_type: Optional[Any] = ...) -> None: ... def __iter__(self): ... mypy-0.560/typeshed/third_party/2and3/boto/s3/bucketlogging.pyi0000644€tŠÔÚ€2›s®0000000067213215007212030616 0ustar jukkaDROPBOX\Domain Users00000000000000from typing import Any, Optional class BucketLogging: target = ... # type: Any prefix = ... # type: Any grants = ... # type: Any def __init__(self, target: Optional[Any] = ..., prefix: Optional[Any] = ..., grants: Optional[Any] = ...) -> None: ... def add_grant(self, grant): ... def startElement(self, name, attrs, connection): ... def endElement(self, name, value, connection): ... def to_xml(self): ... mypy-0.560/typeshed/third_party/2and3/boto/s3/connection.pyi0000644€tŠÔÚ€2›s®0000001074213215007212030130 0ustar jukkaDROPBOX\Domain Users00000000000000from .bucket import Bucket from typing import Any, Dict, Optional, Text, Type from boto.connection import AWSAuthConnection from boto.exception import BotoClientError def check_lowercase_bucketname(n): ... def assert_case_insensitive(f): ... class _CallingFormat: def get_bucket_server(self, server, bucket): ... def build_url_base(self, connection, protocol, server, bucket, key: str = ...): ... def build_host(self, server, bucket): ... def build_auth_path(self, bucket, key: str = ...): ... def build_path_base(self, bucket, key: str = ...): ... class SubdomainCallingFormat(_CallingFormat): def get_bucket_server(self, server, bucket): ... class VHostCallingFormat(_CallingFormat): def get_bucket_server(self, server, bucket): ... class OrdinaryCallingFormat(_CallingFormat): def get_bucket_server(self, server, bucket): ... def build_path_base(self, bucket, key: str = ...): ... class ProtocolIndependentOrdinaryCallingFormat(OrdinaryCallingFormat): def build_url_base(self, connection, protocol, server, bucket, key: str = ...): ... class Location: DEFAULT = ... # type: str EU = ... # type: str EUCentral1 = ... # type: str USWest = ... # type: str USWest2 = ... # type: str SAEast = ... # type: str APNortheast = ... # type: str APSoutheast = ... # type: str APSoutheast2 = ... # type: str CNNorth1 = ... # type: str class NoHostProvided: ... class HostRequiredError(BotoClientError): ... class S3Connection(AWSAuthConnection): DefaultHost = ... # type: Any DefaultCallingFormat = ... # type: Any QueryString = ... # type: str calling_format = ... # type: Any bucket_class = ... # type: Type[Bucket] anon = ... # type: Any def __init__(self, aws_access_key_id: Optional[Any] = ..., aws_secret_access_key: Optional[Any] = ..., is_secure: bool = ..., port: Optional[Any] = ..., proxy: Optional[Any] = ..., proxy_port: Optional[Any] = ..., proxy_user: Optional[Any] = ..., proxy_pass: Optional[Any] = ..., host: Any = ..., debug: int = ..., https_connection_factory: Optional[Any] = ..., calling_format: Any = ..., path: str = ..., provider: str = ..., bucket_class: Type[Bucket] = ..., security_token: Optional[Any] = ..., suppress_consec_slashes: bool = ..., anon: bool = ..., validate_certs: Optional[Any] = ..., profile_name: Optional[Any] = ...) -> None: ... def __iter__(self): ... def __contains__(self, bucket_name): ... def set_bucket_class(self, bucket_class: Type[Bucket]) -> None: ... def build_post_policy(self, expiration_time, conditions): ... def build_post_form_args(self, bucket_name, key, expires_in: int = ..., acl: Optional[Any] = ..., success_action_redirect: Optional[Any] = ..., max_content_length: Optional[Any] = ..., http_method: str = ..., fields: Optional[Any] = ..., conditions: Optional[Any] = ..., storage_class: str = ..., server_side_encryption: Optional[Any] = ...): ... def generate_url_sigv4(self, expires_in, method, bucket: str = ..., key: str = ..., headers: Optional[Dict[Text, Text]] = ..., force_http: bool = ..., response_headers: Optional[Dict[Text, Text]] = ..., version_id: Optional[Any] = ..., iso_date: Optional[Any] = ...): ... def generate_url(self, expires_in, method, bucket: str = ..., key: str = ..., headers: Optional[Dict[Text, Text]] = ..., query_auth: bool = ..., force_http: bool = ..., response_headers: Optional[Dict[Text, Text]] = ..., expires_in_absolute: bool = ..., version_id: Optional[Any] = ...): ... def get_all_buckets(self, headers: Optional[Dict[Text, Text]] = ...): ... def get_canonical_user_id(self, headers: Optional[Dict[Text, Text]] = ...): ... def get_bucket(self, bucket_name: Text, validate: bool = ..., headers: Optional[Dict[Text, Text]] = ...) -> Bucket: ... def head_bucket(self, bucket_name, headers: Optional[Dict[Text, Text]] = ...): ... def lookup(self, bucket_name, validate: bool = ..., headers: Optional[Dict[Text, Text]] = ...): ... def create_bucket(self, bucket_name, headers: Optional[Dict[Text, Text]] = ..., location: Any = ..., policy: Optional[Any] = ...): ... def delete_bucket(self, bucket, headers: Optional[Dict[Text, Text]] = ...): ... def make_request(self, method, bucket: str = ..., key: str = ..., headers: Optional[Any] = ..., data: str = ..., query_args: Optional[Any] = ..., sender: Optional[Any] = ..., override_num_retries: Optional[Any] = ..., retry_handler: Optional[Any] = ..., *args, **kwargs): ... # type: ignore # https://github.com/python/mypy/issues/1237 mypy-0.560/typeshed/third_party/2and3/boto/s3/cors.pyi0000644€tŠÔÚ€2›s®0000000200613215007212026731 0ustar jukkaDROPBOX\Domain Users00000000000000from typing import Any, Optional class CORSRule: allowed_method = ... # type: Any allowed_origin = ... # type: Any id = ... # type: Any allowed_header = ... # type: Any max_age_seconds = ... # type: Any expose_header = ... # type: Any def __init__(self, allowed_method: Optional[Any] = ..., allowed_origin: Optional[Any] = ..., id: Optional[Any] = ..., allowed_header: Optional[Any] = ..., max_age_seconds: Optional[Any] = ..., expose_header: Optional[Any] = ...) -> None: ... def startElement(self, name, attrs, connection): ... def endElement(self, name, value, connection): ... def to_xml(self) -> str: ... class CORSConfiguration(list): def startElement(self, name, attrs, connection): ... def endElement(self, name, value, connection): ... def to_xml(self) -> str: ... def add_rule(self, allowed_method, allowed_origin, id: Optional[Any] = ..., allowed_header: Optional[Any] = ..., max_age_seconds: Optional[Any] = ..., expose_header: Optional[Any] = ...): ... mypy-0.560/typeshed/third_party/2and3/boto/s3/deletemarker.pyi0000644€tŠÔÚ€2›s®0000000070213215007212030430 0ustar jukkaDROPBOX\Domain Users00000000000000from typing import Any, Optional class DeleteMarker: bucket = ... # type: Any name = ... # type: Any version_id = ... # type: Any is_latest = ... # type: bool last_modified = ... # type: Any owner = ... # type: Any def __init__(self, bucket: Optional[Any] = ..., name: Optional[Any] = ...) -> None: ... def startElement(self, name, attrs, connection): ... def endElement(self, name, value, connection): ... mypy-0.560/typeshed/third_party/2and3/boto/s3/key.pyi0000644€tŠÔÚ€2›s®0000001626113215007212026563 0ustar jukkaDROPBOX\Domain Users00000000000000from typing import Any, Callable, Dict, Optional, Text class Key: DefaultContentType = ... # type: str RestoreBody = ... # type: str BufferSize = ... # type: Any base_user_settable_fields = ... # type: Any base_fields = ... # type: Any bucket = ... # type: Any name = ... # type: str metadata = ... # type: Any cache_control = ... # type: Any content_type = ... # type: Any content_encoding = ... # type: Any content_disposition = ... # type: Any content_language = ... # type: Any filename = ... # type: Any etag = ... # type: Any is_latest = ... # type: bool last_modified = ... # type: Any owner = ... # type: Any path = ... # type: Any resp = ... # type: Any mode = ... # type: Any size = ... # type: Any version_id = ... # type: Any source_version_id = ... # type: Any delete_marker = ... # type: bool encrypted = ... # type: Any ongoing_restore = ... # type: Any expiry_date = ... # type: Any local_hashes = ... # type: Any def __init__(self, bucket: Optional[Any] = ..., name: Optional[Any] = ...) -> None: ... def __iter__(self): ... @property def provider(self): ... key = ... # type: Any md5 = ... # type: Any base64md5 = ... # type: Any storage_class = ... # type: Any def get_md5_from_hexdigest(self, md5_hexdigest): ... def handle_encryption_headers(self, resp): ... def handle_version_headers(self, resp, force: bool = ...): ... def handle_restore_headers(self, response): ... def handle_addl_headers(self, headers): ... def open_read(self, headers: Optional[Dict[Text, Text]] = ..., query_args: str = ..., override_num_retries: Optional[Any] = ..., response_headers: Optional[Dict[Text, Text]] = ...): ... def open_write(self, headers: Optional[Dict[Text, Text]] = ..., override_num_retries: Optional[Any] = ...): ... def open(self, mode: str = ..., headers: Optional[Dict[Text, Text]] = ..., query_args: Optional[Any] = ..., override_num_retries: Optional[Any] = ...): ... closed = ... # type: bool def close(self, fast: bool = ...): ... def next(self): ... __next__ = ... # type: Any def read(self, size: int = ...): ... def change_storage_class(self, new_storage_class, dst_bucket: Optional[Any] = ..., validate_dst_bucket: bool = ...): ... def copy(self, dst_bucket, dst_key, metadata: Optional[Any] = ..., reduced_redundancy: bool = ..., preserve_acl: bool = ..., encrypt_key: bool = ..., validate_dst_bucket: bool = ...): ... def startElement(self, name, attrs, connection): ... def endElement(self, name, value, connection): ... def exists(self, headers: Optional[Dict[Text, Text]] = ...): ... def delete(self, headers: Optional[Dict[Text, Text]] = ...): ... def get_metadata(self, name): ... def set_metadata(self, name, value): ... def update_metadata(self, d): ... def set_acl(self, acl_str, headers: Optional[Dict[Text, Text]] = ...): ... def get_acl(self, headers: Optional[Dict[Text, Text]] = ...): ... def get_xml_acl(self, headers: Optional[Dict[Text, Text]] = ...): ... def set_xml_acl(self, acl_str, headers: Optional[Dict[Text, Text]] = ...): ... def set_canned_acl(self, acl_str, headers: Optional[Dict[Text, Text]] = ...): ... def get_redirect(self): ... def set_redirect(self, redirect_location, headers: Optional[Dict[Text, Text]] = ...): ... def make_public(self, headers: Optional[Dict[Text, Text]] = ...): ... def generate_url(self, expires_in, method: str = ..., headers: Optional[Dict[Text, Text]] = ..., query_auth: bool = ..., force_http: bool = ..., response_headers: Optional[Dict[Text, Text]] = ..., expires_in_absolute: bool = ..., version_id: Optional[Any] = ..., policy: Optional[Any] = ..., reduced_redundancy: bool = ..., encrypt_key: bool = ...): ... def send_file(self, fp, headers: Optional[Dict[Text, Text]] = ..., cb: Optional[Callable[[int, int], Any]] = ..., num_cb: int = ..., query_args: Optional[Any] = ..., chunked_transfer: bool = ..., size: Optional[Any] = ...): ... def should_retry(self, response, chunked_transfer: bool = ...): ... def compute_md5(self, fp, size: Optional[Any] = ...): ... def set_contents_from_stream(self, fp, headers: Optional[Dict[Text, Text]] = ..., replace: bool = ..., cb: Optional[Callable[[int, int], Any]] = ..., num_cb: int = ..., policy: Optional[Any] = ..., reduced_redundancy: bool = ..., query_args: Optional[Any] = ..., size: Optional[Any] = ...): ... def set_contents_from_file(self, fp, headers: Optional[Dict[Text, Text]] = ..., replace: bool = ..., cb: Optional[Callable[[int, int], Any]] = ..., num_cb: int = ..., policy: Optional[Any] = ..., md5: Optional[Any] = ..., reduced_redundancy: bool = ..., query_args: Optional[Any] = ..., encrypt_key: bool = ..., size: Optional[Any] = ..., rewind: bool = ...): ... def set_contents_from_filename(self, filename, headers: Optional[Dict[Text, Text]] = ..., replace: bool = ..., cb: Optional[Callable[[int, int], Any]] = ..., num_cb: int = ..., policy: Optional[Any] = ..., md5: Optional[Any] = ..., reduced_redundancy: bool = ..., encrypt_key: bool = ...): ... def set_contents_from_string(self, string_data: Text, headers: Optional[Dict[Text, Text]] = ..., replace: bool = ..., cb: Optional[Callable[[int, int], Any]] = ..., num_cb: int = ..., policy: Optional[Any] = ..., md5: Optional[Any] = ..., reduced_redundancy: bool = ..., encrypt_key: bool = ...) -> None: ... def get_file(self, fp, headers: Optional[Dict[Text, Text]] = ..., cb: Optional[Callable[[int, int], Any]] = ..., num_cb: int = ..., torrent: bool = ..., version_id: Optional[Any] = ..., override_num_retries: Optional[Any] = ..., response_headers: Optional[Dict[Text, Text]] = ...): ... def get_torrent_file(self, fp, headers: Optional[Dict[Text, Text]] = ..., cb: Optional[Callable[[int, int], Any]] = ..., num_cb: int = ...): ... def get_contents_to_file(self, fp, headers: Optional[Dict[Text, Text]] = ..., cb: Optional[Callable[[int, int], Any]] = ..., num_cb: int = ..., torrent: bool = ..., version_id: Optional[Any] = ..., res_download_handler: Optional[Any] = ..., response_headers: Optional[Dict[Text, Text]] = ...): ... def get_contents_to_filename(self, filename, headers: Optional[Dict[Text, Text]] = ..., cb: Optional[Callable[[int, int], Any]] = ..., num_cb: int = ..., torrent: bool = ..., version_id: Optional[Any] = ..., res_download_handler: Optional[Any] = ..., response_headers: Optional[Dict[Text, Text]] = ...): ... def get_contents_as_string(self, headers: Optional[Dict[Text, Text]] = ..., cb: Optional[Callable[[int, int], Any]] = ..., num_cb: int = ..., torrent: bool = ..., version_id: Optional[Any] = ..., response_headers: Optional[Dict[Text, Text]] = ..., encoding: Optional[Any] = ...) -> str: ... def add_email_grant(self, permission, email_address, headers: Optional[Dict[Text, Text]] = ...): ... def add_user_grant(self, permission, user_id, headers: Optional[Dict[Text, Text]] = ..., display_name: Optional[Any] = ...): ... def set_remote_metadata(self, metadata_plus, metadata_minus, preserve_acl, headers: Optional[Dict[Text, Text]] = ...): ... def restore(self, days, headers: Optional[Dict[Text, Text]] = ...): ... mypy-0.560/typeshed/third_party/2and3/boto/s3/keyfile.pyi0000644€tŠÔÚ€2›s®0000000145213215007212027417 0ustar jukkaDROPBOX\Domain Users00000000000000from typing import Any class KeyFile: key = ... # type: Any location = ... # type: int closed = ... # type: bool softspace = ... # type: int mode = ... # type: str encoding = ... # type: str errors = ... # type: str newlines = ... # type: str name = ... # type: Any def __init__(self, key) -> None: ... def tell(self): ... def seek(self, pos, whence: Any = ...): ... def read(self, size): ... def close(self): ... def isatty(self): ... def getkey(self): ... def write(self, buf): ... def fileno(self): ... def flush(self): ... def next(self): ... def readinto(self): ... def readline(self): ... def readlines(self): ... def truncate(self): ... def writelines(self): ... def xreadlines(self): ... mypy-0.560/typeshed/third_party/2and3/boto/s3/lifecycle.pyi0000644€tŠÔÚ€2›s®0000000401413215007212027723 0ustar jukkaDROPBOX\Domain Users00000000000000from typing import Any, Optional class Rule: id = ... # type: Any prefix = ... # type: Any status = ... # type: Any expiration = ... # type: Any transition = ... # type: Any def __init__(self, id: Optional[Any] = ..., prefix: Optional[Any] = ..., status: Optional[Any] = ..., expiration: Optional[Any] = ..., transition: Optional[Any] = ...) -> None: ... def startElement(self, name, attrs, connection): ... def endElement(self, name, value, connection): ... def to_xml(self): ... class Expiration: days = ... # type: Any date = ... # type: Any def __init__(self, days: Optional[Any] = ..., date: Optional[Any] = ...) -> None: ... def startElement(self, name, attrs, connection): ... def endElement(self, name, value, connection): ... def to_xml(self): ... class Transition: days = ... # type: Any date = ... # type: Any storage_class = ... # type: Any def __init__(self, days: Optional[Any] = ..., date: Optional[Any] = ..., storage_class: Optional[Any] = ...) -> None: ... def to_xml(self): ... class Transitions(list): transition_properties = ... # type: int current_transition_property = ... # type: int temp_days = ... # type: Any temp_date = ... # type: Any temp_storage_class = ... # type: Any def __init__(self) -> None: ... def startElement(self, name, attrs, connection): ... def endElement(self, name, value, connection): ... def to_xml(self): ... def add_transition(self, days: Optional[Any] = ..., date: Optional[Any] = ..., storage_class: Optional[Any] = ...): ... @property def days(self): ... @property def date(self): ... @property def storage_class(self): ... class Lifecycle(list): def startElement(self, name, attrs, connection): ... def endElement(self, name, value, connection): ... def to_xml(self): ... def add_rule(self, id: Optional[Any] = ..., prefix: str = ..., status: str = ..., expiration: Optional[Any] = ..., transition: Optional[Any] = ...): ... mypy-0.560/typeshed/third_party/2and3/boto/s3/multidelete.pyi0000644€tŠÔÚ€2›s®0000000222013215007212030276 0ustar jukkaDROPBOX\Domain Users00000000000000from typing import Any, Optional class Deleted: key = ... # type: Any version_id = ... # type: Any delete_marker = ... # type: Any delete_marker_version_id = ... # type: Any def __init__(self, key: Optional[Any] = ..., version_id: Optional[Any] = ..., delete_marker: bool = ..., delete_marker_version_id: Optional[Any] = ...) -> None: ... def startElement(self, name, attrs, connection): ... def endElement(self, name, value, connection): ... class Error: key = ... # type: Any version_id = ... # type: Any code = ... # type: Any message = ... # type: Any def __init__(self, key: Optional[Any] = ..., version_id: Optional[Any] = ..., code: Optional[Any] = ..., message: Optional[Any] = ...) -> None: ... def startElement(self, name, attrs, connection): ... def endElement(self, name, value, connection): ... class MultiDeleteResult: bucket = ... # type: Any deleted = ... # type: Any errors = ... # type: Any def __init__(self, bucket: Optional[Any] = ...) -> None: ... def startElement(self, name, attrs, connection): ... def endElement(self, name, value, connection): ... mypy-0.560/typeshed/third_party/2and3/boto/s3/multipart.pyi0000644€tŠÔÚ€2›s®0000000417113215007212030011 0ustar jukkaDROPBOX\Domain Users00000000000000from typing import Any, Optional class CompleteMultiPartUpload: bucket = ... # type: Any location = ... # type: Any bucket_name = ... # type: Any key_name = ... # type: Any etag = ... # type: Any version_id = ... # type: Any encrypted = ... # type: Any def __init__(self, bucket: Optional[Any] = ...) -> None: ... def startElement(self, name, attrs, connection): ... def endElement(self, name, value, connection): ... class Part: bucket = ... # type: Any part_number = ... # type: Any last_modified = ... # type: Any etag = ... # type: Any size = ... # type: Any def __init__(self, bucket: Optional[Any] = ...) -> None: ... def startElement(self, name, attrs, connection): ... def endElement(self, name, value, connection): ... def part_lister(mpupload, part_number_marker: Optional[Any] = ...): ... class MultiPartUpload: bucket = ... # type: Any bucket_name = ... # type: Any key_name = ... # type: Any id = ... # type: Any initiator = ... # type: Any owner = ... # type: Any storage_class = ... # type: Any initiated = ... # type: Any part_number_marker = ... # type: Any next_part_number_marker = ... # type: Any max_parts = ... # type: Any is_truncated = ... # type: bool def __init__(self, bucket: Optional[Any] = ...) -> None: ... def __iter__(self): ... def to_xml(self): ... def startElement(self, name, attrs, connection): ... def endElement(self, name, value, connection): ... def get_all_parts(self, max_parts: Optional[Any] = ..., part_number_marker: Optional[Any] = ..., encoding_type: Optional[Any] = ...): ... def upload_part_from_file(self, fp, part_num, headers: Optional[Any] = ..., replace: bool = ..., cb: Optional[Any] = ..., num_cb: int = ..., md5: Optional[Any] = ..., size: Optional[Any] = ...): ... def copy_part_from_key(self, src_bucket_name, src_key_name, part_num, start: Optional[Any] = ..., end: Optional[Any] = ..., src_version_id: Optional[Any] = ..., headers: Optional[Any] = ...): ... def complete_upload(self): ... def cancel_upload(self): ... mypy-0.560/typeshed/third_party/2and3/boto/s3/prefix.pyi0000644€tŠÔÚ€2›s®0000000054013215007212027261 0ustar jukkaDROPBOX\Domain Users00000000000000from typing import Any, Optional class Prefix: bucket = ... # type: Any name = ... # type: Any def __init__(self, bucket: Optional[Any] = ..., name: Optional[Any] = ...) -> None: ... def startElement(self, name, attrs, connection): ... def endElement(self, name, value, connection): ... @property def provider(self): ... mypy-0.560/typeshed/third_party/2and3/boto/s3/tagging.pyi0000644€tŠÔÚ€2›s®0000000136513215007212027412 0ustar jukkaDROPBOX\Domain Users00000000000000from typing import Any, Optional class Tag: key = ... # type: Any value = ... # type: Any def __init__(self, key: Optional[Any] = ..., value: Optional[Any] = ...) -> None: ... def startElement(self, name, attrs, connection): ... def endElement(self, name, value, connection): ... def to_xml(self): ... def __eq__(self, other): ... class TagSet(list): def startElement(self, name, attrs, connection): ... def endElement(self, name, value, connection): ... def add_tag(self, key, value): ... def to_xml(self): ... class Tags(list): def startElement(self, name, attrs, connection): ... def endElement(self, name, value, connection): ... def to_xml(self): ... def add_tag_set(self, tag_set): ... mypy-0.560/typeshed/third_party/2and3/boto/s3/user.pyi0000644€tŠÔÚ€2›s®0000000062413215007212026745 0ustar jukkaDROPBOX\Domain Users00000000000000from typing import Any, Optional class User: type = ... # type: Any id = ... # type: Any display_name = ... # type: Any def __init__(self, parent: Optional[Any] = ..., id: str = ..., display_name: str = ...) -> None: ... def startElement(self, name, attrs, connection): ... def endElement(self, name, value, connection): ... def to_xml(self, element_name: str = ...): ... mypy-0.560/typeshed/third_party/2and3/boto/s3/website.pyi0000644€tŠÔÚ€2›s®0000000524413215007212027434 0ustar jukkaDROPBOX\Domain Users00000000000000from typing import Any, Optional def tag(key, value): ... class WebsiteConfiguration: suffix = ... # type: Any error_key = ... # type: Any redirect_all_requests_to = ... # type: Any routing_rules = ... # type: Any def __init__(self, suffix: Optional[Any] = ..., error_key: Optional[Any] = ..., redirect_all_requests_to: Optional[Any] = ..., routing_rules: Optional[Any] = ...) -> None: ... def startElement(self, name, attrs, connection): ... def endElement(self, name, value, connection): ... def to_xml(self): ... class _XMLKeyValue: translator = ... # type: Any container = ... # type: Any def __init__(self, translator, container: Optional[Any] = ...) -> None: ... def startElement(self, name, attrs, connection): ... def endElement(self, name, value, connection): ... def to_xml(self): ... class RedirectLocation(_XMLKeyValue): TRANSLATOR = ... # type: Any hostname = ... # type: Any protocol = ... # type: Any def __init__(self, hostname: Optional[Any] = ..., protocol: Optional[Any] = ...) -> None: ... def to_xml(self): ... class RoutingRules(list): def add_rule(self, rule): ... def startElement(self, name, attrs, connection): ... def endElement(self, name, value, connection): ... def to_xml(self): ... class RoutingRule: condition = ... # type: Any redirect = ... # type: Any def __init__(self, condition: Optional[Any] = ..., redirect: Optional[Any] = ...) -> None: ... def startElement(self, name, attrs, connection): ... def endElement(self, name, value, connection): ... def to_xml(self): ... @classmethod def when(cls, key_prefix: Optional[Any] = ..., http_error_code: Optional[Any] = ...): ... def then_redirect(self, hostname: Optional[Any] = ..., protocol: Optional[Any] = ..., replace_key: Optional[Any] = ..., replace_key_prefix: Optional[Any] = ..., http_redirect_code: Optional[Any] = ...): ... class Condition(_XMLKeyValue): TRANSLATOR = ... # type: Any key_prefix = ... # type: Any http_error_code = ... # type: Any def __init__(self, key_prefix: Optional[Any] = ..., http_error_code: Optional[Any] = ...) -> None: ... def to_xml(self): ... class Redirect(_XMLKeyValue): TRANSLATOR = ... # type: Any hostname = ... # type: Any protocol = ... # type: Any replace_key = ... # type: Any replace_key_prefix = ... # type: Any http_redirect_code = ... # type: Any def __init__(self, hostname: Optional[Any] = ..., protocol: Optional[Any] = ..., replace_key: Optional[Any] = ..., replace_key_prefix: Optional[Any] = ..., http_redirect_code: Optional[Any] = ...) -> None: ... def to_xml(self): ... mypy-0.560/typeshed/third_party/2and3/boto/utils.pyi0000644€tŠÔÚ€2›s®0000001456013215007212026606 0ustar jukkaDROPBOX\Domain Users00000000000000import datetime import logging.handlers import subprocess import sys import time import boto.connection from typing import ( Any, Callable, ContextManager, Dict, IO, Iterable, List, Mapping, Optional, Sequence, Tuple, Type, TypeVar, Union, ) _KT = TypeVar('_KT') _VT = TypeVar('_VT') if sys.version_info[0] >= 3: # TODO move _StringIO definition into boto.compat once stubs exist and rename to StringIO import io _StringIO = io.StringIO from hashlib import _Hash _HashType = _Hash from email.message import Message as _Message else: # TODO move _StringIO definition into boto.compat once stubs exist and rename to StringIO import StringIO _StringIO = StringIO.StringIO from hashlib import _hash _HashType = _hash # TODO use email.message.Message once stubs exist _Message = Any _Provider = Any # TODO replace this with boto.provider.Provider once stubs exist _LockType = Any # TODO replace this with _thread.LockType once stubs exist JSONDecodeError = ... # type: Type[ValueError] qsa_of_interest = ... # type: List[str] def unquote_v(nv: str) -> Union[str, Tuple[str, str]]: ... def canonical_string( method: str, path: str, headers: Mapping[str, Optional[str]], expires: Optional[int] = ..., provider: Optional[_Provider] = ..., ) -> str: ... def merge_meta( headers: Mapping[str, str], metadata: Mapping[str, str], provider: Optional[_Provider] = ..., ) -> Mapping[str, str]: ... def get_aws_metadata( headers: Mapping[str, str], provider: Optional[_Provider] = ..., ) -> Mapping[str, str]: ... def retry_url( url: str, retry_on_404: bool = ..., num_retries: int = ..., timeout: Optional[int] = ..., ) -> str: ... class LazyLoadMetadata(Dict[_KT, _VT]): def __init__( self, url: str, num_retries: int, timeout: Optional[int] = ..., ) -> None: ... def get_instance_metadata( version: str = ..., url: str = ..., data: str = ..., timeout: Optional[int] = ..., num_retries: int = ..., ) -> Optional[LazyLoadMetadata]: ... def get_instance_identity( version: str = ..., url: str = ..., timeout: Optional[int] = ..., num_retries: int = ..., ) -> Optional[Mapping[str, Any]]: ... def get_instance_userdata( version: str = ..., sep: Optional[str] = ..., url: str = ..., timeout: Optional[int] = ..., num_retries: int = ..., ) -> Mapping[str, str]: ... ISO8601 = ... # type: str ISO8601_MS = ... # type: str RFC1123 = ... # type: str LOCALE_LOCK = ... # type: _LockType def setlocale(name: Union[str, Tuple[str, str]]) -> ContextManager[str]: ... def get_ts(ts: Optional[time.struct_time] = ...) -> str: ... def parse_ts(ts: str) -> datetime.datetime: ... def find_class(module_name: str, class_name: Optional[str] = ...) -> Optional[Type[Any]]: ... def update_dme(username: str, password: str, dme_id: str, ip_address: str) -> str: ... def fetch_file( uri: str, file: Optional[IO[str]] = ..., username: Optional[str] = ..., password: Optional[str] = ..., ) -> Optional[IO[str]]: ... class ShellCommand: exit_code = ... # type: int command = ... # type: subprocess._CMD log_fp = ... # type: _StringIO wait = ... # type: bool fail_fast = ... # type: bool def __init__( self, command: subprocess._CMD, wait: bool = ..., fail_fast: bool = ..., cwd: Optional[subprocess._TXT] = ..., ) -> None: ... process = ... # type: subprocess.Popen def run(self, cwd: Optional[subprocess._CMD] = ...) -> Optional[int]: ... def setReadOnly(self, value) -> None: ... def getStatus(self) -> Optional[int]: ... status = ... # type: Optional[int] def getOutput(self) -> str: ... output = ... # type: str class AuthSMTPHandler(logging.handlers.SMTPHandler): username = ... # type: str password = ... # type: str def __init__( self, mailhost: str, username: str, password: str, fromaddr: str, toaddrs: Sequence[str], subject: str, ) -> None: ... class LRUCache(Dict[_KT, _VT]): class _Item: previous = ... # type: Optional[LRUCache._Item] next = ... # type: Optional[LRUCache._Item] key = ... value = ... def __init__(self, key, value) -> None: ... _dict = ... # type: Dict[_KT, LRUCache._Item] capacity = ... # type: int head = ... # type: Optional[LRUCache._Item] tail = ... # type: Optional[LRUCache._Item] def __init__(self, capacity: int) -> None: ... # This exists to work around Password.str's name shadowing the str type _str = str class Password: hashfunc = ... # type: Callable[[bytes], _HashType] str = ... # type: Optional[_str] def __init__( self, str: Optional[_str] = ..., hashfunc: Optional[Callable[[bytes], _HashType]] = ..., ) -> None: ... def set(self, value: Union[bytes, _str]) -> None: ... def __eq__(self, other: Any) -> bool: ... def __len__(self) -> int: ... def notify( subject: str, body: Optional[str] = ..., html_body: Optional[Union[Sequence[str], str]] = ..., to_string: Optional[str] = ..., attachments: Optional[Iterable[_Message]] = ..., append_instance_id: bool = ..., ) -> None: ... def get_utf8_value(value: str) -> bytes: ... def mklist(value: Any) -> List: ... def pythonize_name(name: str) -> str: ... def write_mime_multipart( content: List[Tuple[str, str]], compress: bool = ..., deftype: str = ..., delimiter: str = ..., ) -> str: ... def guess_mime_type(content: str, deftype: str) -> str: ... def compute_md5( fp: IO[Any], buf_size: int = ..., size: Optional[int] = ..., ) -> Tuple[str, str, int]: ... def compute_hash( fp: IO[Any], buf_size: int = ..., size: Optional[int] = ..., hash_algorithm: Any = ..., ) -> Tuple[str, str, int]: ... def find_matching_headers(name: str, headers: Mapping[str, Optional[str]]) -> List[str]: ... def merge_headers_by_name(name: str, headers: Mapping[str, Optional[str]]) -> str: ... class RequestHook: def handle_request_data( self, request: boto.connection.HTTPRequest, response: boto.connection.HTTPResponse, error: bool = ..., ) -> Any: ... def host_is_ipv6(hostname: str) -> bool: ... def parse_host(hostname: str) -> str: ... mypy-0.560/typeshed/third_party/2and3/certifi.pyi0000644€tŠÔÚ€2›s®0000000006413215007212026122 0ustar jukkaDROPBOX\Domain Users00000000000000def where() -> str: ... def old_where() -> str: ... mypy-0.560/typeshed/third_party/2and3/characteristic/0000755€tŠÔÚ€2›s®0000000000013215007244026747 5ustar jukkaDROPBOX\Domain Users00000000000000mypy-0.560/typeshed/third_party/2and3/characteristic/__init__.pyi0000644€tŠÔÚ€2›s®0000000243313215007212031226 0ustar jukkaDROPBOX\Domain Users00000000000000from typing import Sequence, Callable, Union, Any, Optional, AnyStr, TypeVar, Type def with_repr(attrs: Sequence[Union[AnyStr, Attribute]]) -> Callable[..., Any]: ... def with_cmp(attrs: Sequence[Union[AnyStr, Attribute]]) -> Callable[..., Any]: ... def with_init(attrs: Sequence[Union[AnyStr, Attribute]]) -> Callable[..., Any]: ... def immutable(attrs: Sequence[Union[AnyStr, Attribute]]) -> Callable[..., Any]: ... def strip_leading_underscores(attribute_name: AnyStr) -> AnyStr: ... NOTHING = Any _T = TypeVar('_T') def attributes( attrs: Sequence[Union[AnyStr, Attribute]], apply_with_cmp: bool = ..., apply_with_init: bool = ..., apply_with_repr: bool = ..., apply_immutable: bool = ..., store_attributes: Optional[Callable[[type, Attribute], Any]] = ..., **kw: Optional[dict]) -> Callable[[Type[_T]], Type[_T]]: ... class Attribute: def __init__( self, name: AnyStr, exclude_from_cmp: bool = ..., exclude_from_init: bool = ..., exclude_from_repr: bool = ..., exclude_from_immutable: bool = ..., default_value: Any = ..., default_factory: Optional[Callable[[None], Any]] = ..., instance_of: Optional[Any] = ..., init_aliaser: Optional[Callable[[AnyStr], AnyStr]] = ...) -> None: ... mypy-0.560/typeshed/third_party/2and3/click/0000755€tŠÔÚ€2›s®0000000000013215007244025044 5ustar jukkaDROPBOX\Domain Users00000000000000mypy-0.560/typeshed/third_party/2and3/click/__init__.pyi0000644€tŠÔÚ€2›s®0000000755213215007212027332 0ustar jukkaDROPBOX\Domain Users00000000000000# -*- coding: utf-8 -*- """ click ~~~~~ Click is a simple Python module that wraps the stdlib's optparse to make writing command line scripts fun. Unlike other modules, it's based around a simple API that does not come with too much magic and is composable. In case optparse ever gets removed from the stdlib, it will be shipped by this module. :copyright: (c) 2014 by Armin Ronacher. :license: BSD, see LICENSE for more details. """ # Core classes from .core import ( Context as Context, BaseCommand as BaseCommand, Command as Command, MultiCommand as MultiCommand, Group as Group, CommandCollection as CommandCollection, Parameter as Parameter, Option as Option, Argument as Argument, ) # Globals from .globals import get_current_context as get_current_context # Decorators from .decorators import ( pass_context as pass_context, pass_obj as pass_obj, make_pass_decorator as make_pass_decorator, command as command, group as group, argument as argument, option as option, confirmation_option as confirmation_option, password_option as password_option, version_option as version_option, help_option as help_option, ) # Types from .types import ( ParamType as ParamType, File as File, Path as Path, Choice as Choice, IntRange as IntRange, Tuple as Tuple, STRING as STRING, INT as INT, FLOAT as FLOAT, BOOL as BOOL, UUID as UUID, UNPROCESSED as UNPROCESSED, ) # Utilities from .utils import ( echo as echo, get_binary_stream as get_binary_stream, get_text_stream as get_text_stream, open_file as open_file, format_filename as format_filename, get_app_dir as get_app_dir, get_os_args as get_os_args, ) # Terminal functions from .termui import ( prompt as prompt, confirm as confirm, get_terminal_size as get_terminal_size, echo_via_pager as echo_via_pager, progressbar as progressbar, clear as clear, style as style, unstyle as unstyle, secho as secho, edit as edit, launch as launch, getchar as getchar, pause as pause, ) # Exceptions from .exceptions import ( ClickException as ClickException, UsageError as UsageError, BadParameter as BadParameter, FileError as FileError, Abort as Abort, NoSuchOption as NoSuchOption, BadOptionUsage as BadOptionUsage, BadArgumentUsage as BadArgumentUsage, MissingParameter as MissingParameter, ) # Formatting from .formatting import HelpFormatter as HelpFormatter, wrap_text as wrap_text # Parsing from .parser import OptionParser as OptionParser __all__ = [ # Core classes 'Context', 'BaseCommand', 'Command', 'MultiCommand', 'Group', 'CommandCollection', 'Parameter', 'Option', 'Argument', # Globals 'get_current_context', # Decorators 'pass_context', 'pass_obj', 'make_pass_decorator', 'command', 'group', 'argument', 'option', 'confirmation_option', 'password_option', 'version_option', 'help_option', # Types 'ParamType', 'File', 'Path', 'Choice', 'IntRange', 'Tuple', 'STRING', 'INT', 'FLOAT', 'BOOL', 'UUID', 'UNPROCESSED', # Utilities 'echo', 'get_binary_stream', 'get_text_stream', 'open_file', 'format_filename', 'get_app_dir', 'get_os_args', # Terminal functions 'prompt', 'confirm', 'get_terminal_size', 'echo_via_pager', 'progressbar', 'clear', 'style', 'unstyle', 'secho', 'edit', 'launch', 'getchar', 'pause', # Exceptions 'ClickException', 'UsageError', 'BadParameter', 'FileError', 'Abort', 'NoSuchOption', 'BadOptionUsage', 'BadArgumentUsage', 'MissingParameter', # Formatting 'HelpFormatter', 'wrap_text', # Parsing 'OptionParser', ] # Controls if click should emit the warning about the use of unicode # literals. disable_unicode_literals_warning = False __version__ = '6.6' mypy-0.560/typeshed/third_party/2and3/click/core.pyi0000644€tŠÔÚ€2›s®0000002442713215007212026523 0ustar jukkaDROPBOX\Domain Users00000000000000from contextlib import contextmanager from typing import ( Any, Callable, Dict, Generator, Iterable, List, Mapping, Optional, Sequence, Set, Tuple, TypeVar, Union, ) from click.formatting import HelpFormatter from click.parser import OptionParser def invoke_param_callback( callback: Callable[['Context', 'Parameter', Optional[str]], Any], ctx: 'Context', param: 'Parameter', value: Optional[str] ) -> Any: ... @contextmanager def augment_usage_errors( ctx: 'Context', param: Optional['Parameter'] = ... ) -> Generator[None, None, None]: ... def iter_params_for_processing( invocation_order: Sequence['Parameter'], declaration_order: Iterable['Parameter'], ) -> Iterable['Parameter']: ... class Context: parent: Optional['Context'] command: 'Command' info_name: Optional[str] params: Dict args: List[str] protected_args: List[str] obj: Any default_map: Mapping[str, Any] invoked_subcommand: Optional[str] terminal_width: Optional[int] max_content_width: Optional[int] allow_extra_args: bool allow_interspersed_args: bool ignore_unknown_options: bool help_option_names: List[str] token_normalize_func: Optional[Callable[[str], str]] resilient_parsing: bool auto_envvar_prefix: Optional[str] color: Optional[bool] _meta: Dict[str, Any] _close_callbacks: List _depth: int # properties meta: Dict[str, Any] command_path: str def __init__( self, command: 'Command', parent: Optional['Context'] = ..., info_name: Optional[str] = ..., obj: Optional[Any] = ..., auto_envvar_prefix: Optional[str] = ..., default_map: Optional[Mapping[str, Any]] = ..., terminal_width: Optional[int] = ..., max_content_width: Optional[int] = ..., resilient_parsing: bool = ..., allow_extra_args: Optional[bool] = ..., allow_interspersed_args: Optional[bool] = ..., ignore_unknown_options: Optional[bool] = ..., help_option_names: Optional[List[str]] = ..., token_normalize_func: Optional[Callable[[str], str]] = ..., color: Optional[bool] = ... ) -> None: ... @contextmanager def scope(self, cleanup: bool = ...) -> Generator['Context', None, None]: ... def make_formatter(self) -> HelpFormatter: ... def call_on_close(self, f: Callable) -> Callable: ... def close(self) -> None: ... def find_root(self) -> 'Context': ... def find_object(self, object_type: type) -> Any: ... def ensure_object(self, object_type: type) -> Any: ... def lookup_default(self, name: str) -> Any: ... def fail(self, message: str) -> None: ... def abort(self) -> None: ... def exit(self, code: Union[int, str] = ...) -> None: ... def get_usage(self) -> str: ... def get_help(self) -> str: ... def invoke( self, callback: Union['Command', Callable], *args, **kwargs ) -> Any: ... def forward( self, callback: Union['Command', Callable], *args, **kwargs ) -> Any: ... class BaseCommand: allow_extra_args: bool allow_interspersed_args: bool ignore_unknown_options: bool name: str context_settings: Dict def __init__(self, name: str, context_settings: Optional[Dict] = ...) -> None: ... def get_usage(self, ctx: Context) -> str: ... def get_help(self, ctx: Context) -> str: ... def make_context( self, info_name: str, args: List[str], parent: Optional[Context] = ..., **extra ) -> Context: ... def parse_args(self, ctx: Context, args: List[str]) -> List[str]: ... def invoke(self, ctx: Context) -> Any: ... def main( self, args: Optional[List[str]] = ..., prog_name: Optional[str] = ..., complete_var: Optional[str] = ..., standalone_mode: bool = ..., **extra ) -> Any: ... def __call__(self, *args, **kwargs) -> Any: ... class Command(BaseCommand): callback: Optional[Callable] params: List['Parameter'] help: Optional[str] epilog: Optional[str] short_help: Optional[str] options_metavar: str add_help_option: bool def __init__( self, name: str, context_settings: Optional[Dict] = ..., callback: Optional[Callable] = ..., params: Optional[List['Parameter']] = ..., help: Optional[str] = ..., epilog: Optional[str] = ..., short_help: Optional[str] = ..., options_metavar: str = ..., add_help_option: bool = ... ) -> None: ... def get_params(self, ctx: Context) -> List['Parameter']: ... def format_usage( self, ctx: Context, formatter: HelpFormatter ) -> None: ... def collect_usage_pieces(self, ctx: Context) -> List[str]: ... def get_help_option_names(self, ctx: Context) -> Set[str]: ... def get_help_option(self, ctx: Context) -> Optional['Option']: ... def make_parser(self, ctx: Context) -> OptionParser: ... def format_help(self, ctx: Context, formatter: HelpFormatter) -> None: ... def format_help_text(self, ctx: Context, formatter: HelpFormatter) -> None: ... def format_options(self, ctx: Context, formatter: HelpFormatter) -> None: ... def format_epilog(self, ctx: Context, formatter: HelpFormatter) -> None: ... _T = TypeVar('_T') _Decorator = Callable[[_T], _T] class MultiCommand(Command): no_args_is_help: bool invoke_without_command: bool subcommand_metavar: str chain: bool result_callback: Callable def __init__( self, name: Optional[str] = ..., invoke_without_command: bool = ..., no_args_is_help: Optional[bool] = ..., subcommand_metavar: Optional[str] = ..., chain: bool = ..., result_callback: Optional[Callable] = ..., **attrs ) -> None: ... def resultcallback( self, replace: bool = ... ) -> _Decorator: ... def format_commands(self, ctx: Context, formatter: HelpFormatter) -> None: ... def resolve_command( self, ctx: Context, args: List[str] ) -> Tuple[str, Command, List[str]]: ... def get_command(self, ctx: Context, cmd_name: str) -> Optional[Command]: ... def list_commands(self, ctx: Context) -> Iterable[Command]: ... class Group(MultiCommand): commands: Dict[str, Command] def __init__( self, name: Optional[str] = ..., commands: Optional[Dict[str, Command]] = ..., **attrs ) -> None: ... def add_command(self, cmd: Command, name: Optional[str] = ...): ... def command(self, *args, **kwargs) -> _Decorator: ... def group(self, *args, **kwargs) -> _Decorator: ... class CommandCollection(MultiCommand): sources: List[MultiCommand] def __init__( self, name: Optional[str] = ..., sources: Optional[List[MultiCommand]] = ..., **attrs ) -> None: ... def add_source(self, multi_cmd: MultiCommand) -> None: ... class Parameter: param_type_name: str name: str opts: List[str] secondary_opts: List[str] type: 'ParamType' required: bool callback: Optional[Callable[[Context, 'Parameter', str], Any]] nargs: int multiple: bool expose_value: bool default: Any is_eager: bool metavar: Optional[str] envvar: Union[str, List[str], None] # properties human_readable_name: str def __init__( self, param_decls: Optional[List[str]] = ..., type: Optional[Union[type, 'ParamType']] = ..., required: bool = ..., default: Optional[Any] = ..., callback: Optional[Callable[[Context, 'Parameter', str], Any]] = ..., nargs: Optional[int] = ..., metavar: Optional[str] = ..., expose_value: bool = ..., is_eager: bool = ..., envvar: Optional[Union[str, List[str]]] = ... ) -> None: ... def make_metavar(self) -> str: ... def get_default(self, ctx: Context) -> Any: ... def add_to_parser(self, parser: OptionParser, ctx: Context) -> None: ... def consume_value(self, ctx: Context, opts: Dict[str, Any]) -> Any: ... def type_cast_value(self, ctx: Context, value: Any) -> Any: ... def process_value(self, ctx: Context, value: Any) -> Any: ... def value_is_missing(self, value: Any) -> bool: ... def full_process_value(self, ctx: Context, value: Any) -> Any: ... def resolve_envvar_value(self, ctx: Context) -> str: ... def value_from_envvar(self, ctx: Context) -> Union[str, List[str]]: ... def handle_parse_result( self, ctx: Context, opts: Dict[str, Any], args: List[str] ) -> Tuple[Any, List[str]]: ... def get_help_record(self, ctx: Context) -> Tuple[str, str]: ... def get_usage_pieces(self, ctx: Context) -> List[str]: ... class Option(Parameter): prompt: str # sic confirmation_prompt: bool hide_input: bool is_flag: bool flag_value: Any is_bool_flag: bool count: bool multiple: bool allow_from_autoenv: bool help: Optional[str] show_default: bool def __init__( self, param_decls: Optional[List[str]] = ..., show_default: bool = ..., prompt: Union[bool, str] = ..., confirmation_prompt: bool = ..., hide_input: bool = ..., is_flag: Optional[bool] = ..., flag_value: Optional[Any] = ..., multiple: bool = ..., count: bool = ..., allow_from_autoenv: bool = ..., type: Optional[Union[type, 'ParamType']] = ..., help: Optional[str] = ..., **attrs ) -> None: ... def prompt_for_value(self, ctx: Context) -> Any: ... class Argument(Parameter): def __init__( self, param_decls: Optional[List[str]] = ..., required: Optional[bool] = ..., **attrs ) -> None: ... # cyclic dependency from click.types import ParamType # noqa: E402 mypy-0.560/typeshed/third_party/2and3/click/decorators.pyi0000644€tŠÔÚ€2›s®0000001342013215007212027727 0ustar jukkaDROPBOX\Domain Users00000000000000from distutils.version import Version from typing import Any, Callable, Dict, List, Optional, Type, TypeVar, Union, Text from click.core import Command, Group, Argument, Option, Parameter, Context from click.types import ParamType _T = TypeVar('_T') _Decorator = Callable[[_T], _T] _Callback = Callable[ [Context, Union[Option, Parameter], Union[bool, int, str]], Any ] def pass_context(_T) -> _T: ... def pass_obj(_T) -> _T: ... def make_pass_decorator( object_type: type, ensure: bool = ... ) -> Callable[[_T], _T]: ... # NOTE: Decorators below have **attrs converted to concrete constructor # arguments from core.pyi to help with type checking. def command( name: Optional[str] = ..., cls: Optional[Type[Command]] = ..., # Command context_settings: Optional[Dict] = ..., help: Optional[str] = ..., epilog: Optional[str] = ..., short_help: Optional[str] = ..., options_metavar: str = ..., add_help_option: bool = ..., ) -> _Decorator: ... # This inherits attrs from Group, MultiCommand and Command. def group( name: Optional[str] = ..., cls: Type[Command] = ..., # Group commands: Optional[Dict[str, Command]] = ..., # MultiCommand invoke_without_command: bool = ..., no_args_is_help: Optional[bool] = ..., subcommand_metavar: Optional[str] = ..., chain: bool = ..., result_callback: Optional[Callable] = ..., # Command help: Optional[str] = ..., epilog: Optional[str] = ..., short_help: Optional[str] = ..., options_metavar: str = ..., add_help_option: bool = ..., # User-defined **kwargs: Any, ) -> _Decorator: ... def argument( *param_decls: str, cls: Type[Argument] = ..., # Argument required: Optional[bool] = ..., # Parameter type: Optional[Union[type, ParamType]] = ..., default: Optional[Any] = ..., callback: Optional[_Callback] = ..., nargs: Optional[int] = ..., metavar: Optional[str] = ..., expose_value: bool = ..., is_eager: bool = ..., envvar: Optional[Union[str, List[str]]] = ... ) -> _Decorator: ... def option( *param_decls: str, cls: Type[Option] = ..., # Option show_default: bool = ..., prompt: Union[bool, Text] = ..., confirmation_prompt: bool = ..., hide_input: bool = ..., is_flag: Optional[bool] = ..., flag_value: Optional[Any] = ..., multiple: bool = ..., count: bool = ..., allow_from_autoenv: bool = ..., type: Optional[Union[type, ParamType]] = ..., help: Optional[str] = ..., # Parameter default: Optional[Any] = ..., required: bool = ..., callback: Optional[_Callback] = ..., nargs: Optional[int] = ..., metavar: Optional[str] = ..., expose_value: bool = ..., is_eager: bool = ..., envvar: Optional[Union[str, List[str]]] = ... ) -> _Decorator: ... def confirmation_option( *param_decls: str, cls: Type[Option] = ..., # Option show_default: bool = ..., prompt: Union[bool, Text] = ..., confirmation_prompt: bool = ..., hide_input: bool = ..., is_flag: bool = ..., flag_value: Optional[Any] = ..., multiple: bool = ..., count: bool = ..., allow_from_autoenv: bool = ..., type: Optional[Union[type, ParamType]] = ..., help: str = ..., # Parameter default: Optional[Any] = ..., callback: Optional[_Callback] = ..., nargs: Optional[int] = ..., metavar: Optional[str] = ..., expose_value: bool = ..., is_eager: bool = ..., envvar: Optional[Union[str, List[str]]] = ... ) -> _Decorator: ... def password_option( *param_decls: str, cls: Type[Option] = ..., # Option show_default: bool = ..., prompt: Union[bool, Text] = ..., confirmation_prompt: bool = ..., hide_input: bool = ..., is_flag: Optional[bool] = ..., flag_value: Optional[Any] = ..., multiple: bool = ..., count: bool = ..., allow_from_autoenv: bool = ..., type: Optional[Union[type, ParamType]] = ..., help: Optional[str] = ..., # Parameter default: Optional[Any] = ..., callback: Optional[_Callback] = ..., nargs: Optional[int] = ..., metavar: Optional[str] = ..., expose_value: bool = ..., is_eager: bool = ..., envvar: Optional[Union[str, List[str]]] = ... ) -> _Decorator: ... def version_option( version: Optional[Union[str, Version]] = ..., *param_decls: str, cls: Type[Option] = ..., # Option prog_name: Optional[str] = ..., show_default: bool = ..., prompt: Union[bool, Text] = ..., confirmation_prompt: bool = ..., hide_input: bool = ..., is_flag: bool = ..., flag_value: Optional[Any] = ..., multiple: bool = ..., count: bool = ..., allow_from_autoenv: bool = ..., type: Optional[Union[type, ParamType]] = ..., help: str = ..., # Parameter default: Optional[Any] = ..., callback: Optional[_Callback] = ..., nargs: Optional[int] = ..., metavar: Optional[str] = ..., expose_value: bool = ..., is_eager: bool = ..., envvar: Optional[Union[str, List[str]]] = ... ) -> _Decorator: ... def help_option( *param_decls: str, cls: Type[Option] = ..., # Option show_default: bool = ..., prompt: Union[bool, Text] = ..., confirmation_prompt: bool = ..., hide_input: bool = ..., is_flag: bool = ..., flag_value: Optional[Any] = ..., multiple: bool = ..., count: bool = ..., allow_from_autoenv: bool = ..., type: Optional[Union[type, ParamType]] = ..., help: str = ..., # Parameter default: Optional[Any] = ..., callback: Optional[_Callback] = ..., nargs: Optional[int] = ..., metavar: Optional[str] = ..., expose_value: bool = ..., is_eager: bool = ..., envvar: Optional[Union[str, List[str]]] = ... ) -> _Decorator: ... mypy-0.560/typeshed/third_party/2and3/click/exceptions.pyi0000644€tŠÔÚ€2›s®0000000364013215007212027746 0ustar jukkaDROPBOX\Domain Users00000000000000from typing import IO, List, Optional from click.core import Context, Parameter class ClickException(Exception): exit_code: int message: str def __init__(self, message: str) -> None: ... def format_message(self) -> str: ... def show(self, file=None) -> None: ... class UsageError(ClickException): ctx: Optional[Context] def __init__(self, message: str, ctx: Optional[Context] = ...) -> None: ... def show(self, file: Optional[IO] = ...) -> None: ... class BadParameter(UsageError): param: Optional[Parameter] param_hint: Optional[str] def __init__( self, message: str, ctx: Optional[Context] = ..., param: Optional[Parameter] = ..., param_hint: Optional[str] = ... ) -> None: ... class MissingParameter(BadParameter): param_type: str # valid values: 'parameter', 'option', 'argument' def __init__( self, message: Optional[str] = ..., ctx: Optional[Context] = ..., param: Optional[Parameter] = ..., param_hint: Optional[str] = ..., param_type: Optional[str] = ... ) -> None: ... class NoSuchOption(UsageError): option_name: str possibilities: Optional[List[str]] def __init__( self, option_name: str, message: Optional[str] = ..., possibilities: Optional[List[str]] = ..., ctx: Optional[Context] = ... ) -> None: ... class BadOptionUsage(UsageError): def __init__(self, message: str, ctx: Optional[Context] = ...) -> None: ... class BadArgumentUsage(UsageError): def __init__(self, message: str, ctx: Optional[Context] = ...) -> None: ... class FileError(ClickException): ui_filename: str filename: str def __init__(self, filename: str, hint: Optional[str] = ...) -> None: ... class Abort(RuntimeError): ... mypy-0.560/typeshed/third_party/2and3/click/formatting.pyi0000644€tŠÔÚ€2›s®0000000323313215007212027735 0ustar jukkaDROPBOX\Domain Users00000000000000from contextlib import contextmanager from typing import Generator, Iterable, List, Optional, Tuple FORCED_WIDTH: Optional[int] def measure_table(rows: Iterable[Iterable[str]]) -> Tuple[int, ...]: ... def iter_rows( rows: Iterable[Iterable[str]], col_count: int ) -> Generator[Tuple[str, ...], None, None]: ... def wrap_text( text: str, width: int = ..., initial_indent: str = ..., subsequent_indent: str = ..., preserve_paragraphs: bool = ... ) -> str: ... class HelpFormatter: indent_increment: int width: Optional[int] current_indent: int buffer: List[str] def __init__( self, indent_increment: int = ..., width: Optional[int] = ..., max_width: Optional[int] = ..., ) -> None: ... def write(self, string: str) -> None: ... def indent(self) -> None: ... def dedent(self) -> None: ... def write_usage( self, prog: str, args: str = ..., prefix: str = ..., ): ... def write_heading(self, heading: str) -> None: ... def write_paragraph(self) -> None: ... def write_text(self, text: str) -> None: ... def write_dl( self, rows: Iterable[Iterable[str]], col_max: int = ..., col_spacing: int = ..., ) -> None: ... @contextmanager def section(self, name) -> Generator[None, None, None]: ... @contextmanager def indentation(self) -> Generator[None, None, None]: ... def getvalue(self) -> str: ... def join_options(options: List[str]) -> Tuple[str, bool]: ... mypy-0.560/typeshed/third_party/2and3/click/globals.pyi0000644€tŠÔÚ€2›s®0000000045013215007212027204 0ustar jukkaDROPBOX\Domain Users00000000000000from click.core import Context from typing import Optional def get_current_context(silent: bool = ...) -> Context: ... def push_context(ctx: Context) -> None: ... def pop_context() -> None: ... def resolve_color_default(color: Optional[bool] = ...) -> Optional[bool]: ... mypy-0.560/typeshed/third_party/2and3/click/parser.pyi0000644€tŠÔÚ€2›s®0000000411213215007212027054 0ustar jukkaDROPBOX\Domain Users00000000000000from typing import Any, Dict, Iterable, List, Optional, Set, Tuple from click.core import Context def _unpack_args( args: Iterable[str], nargs_spec: Iterable[int] ) -> Tuple[Tuple[Optional[Tuple[str, ...]], ...], List[str]]: ... def split_opt(opt: str) -> Tuple[str, str]: ... def normalize_opt(opt: str, ctx: Context) -> str: ... def split_arg_string(string: str) -> List[str]: ... class Option: dest: str action: str nargs: int const: Any obj: Any prefixes: Set[str] _short_opts: List[str] _long_opts: List[str] # properties takes_value: bool def __init__( self, opts: Iterable[str], dest: str, action: Optional[str] = ..., nargs: int = ..., const: Optional[Any] = ..., obj: Optional[Any] = ... ) -> None: ... def process(self, value: Any, state: 'ParsingState') -> None: ... class Argument: dest: str nargs: int obj: Any def __init__(self, dest: str, nargs: int = ..., obj: Optional[Any] = ...) -> None: ... def process(self, value: Any, state: 'ParsingState') -> None: ... class ParsingState: opts: Dict[str, Any] largs: List[str] rargs: List[str] order: List[Any] def __init__(self, rargs: List[str]) -> None: ... class OptionParser: ctx: Optional[Context] allow_interspersed_args: bool ignore_unknown_options: bool _short_opt: Dict[str, Option] _long_opt: Dict[str, Option] _opt_prefixes: Set[str] _args: List[Argument] def __init__(self, ctx: Optional[Context] = ...) -> None: ... def add_option( self, opts: Iterable[str], dest: str, action: Optional[str] = ..., nargs: int = ..., const: Optional[Any] = ..., obj: Optional[Any] = ... ) -> None: ... def add_argument(self, dest: str, nargs: int = ..., obj: Optional[Any] = ...) -> None: ... def parse_args( self, args: List[str] ) -> Tuple[Dict[str, Any], List[str], List[Any]]: ... mypy-0.560/typeshed/third_party/2and3/click/termui.pyi0000644€tŠÔÚ€2›s®0000000531013215007212027066 0ustar jukkaDROPBOX\Domain Users00000000000000from contextlib import contextmanager from typing import ( Any, Callable, Generator, Iterable, IO, List, Optional, Tuple, TypeVar, ) def hidden_prompt_func(prompt: str) -> str: ... def _build_prompt( text: str, suffix: str, show_default: bool = ..., default: Optional[str] = ..., ) -> str: ... def prompt( text: str, default: Optional[str] = ..., hide_input: bool = ..., confirmation_prompt: bool = ..., type: Optional[Any] = ..., value_proc: Optional[Callable[[Optional[str]], Any]] = ..., prompt_suffix: str = ..., show_default: bool = ..., err: bool = ..., ) -> Any: ... def confirm( text: str, default: bool = ..., abort: bool = ..., prompt_suffix: str = ..., show_default: bool = ..., err: bool = ..., ) -> bool: ... def get_terminal_size() -> Tuple[int, int]: ... def echo_via_pager(text: str, color: Optional[bool] = ...) -> None: ... _T = TypeVar('_T') @contextmanager def progressbar( iterable: Optional[Iterable[_T]] = ..., length: Optional[int] = ..., label: Optional[str] = ..., show_eta: bool = ..., show_percent: Optional[bool] = ..., show_pos: bool = ..., item_show_func: Optional[Callable[[_T], str]] = ..., fill_char: str = ..., empty_char: str = ..., bar_template: str = ..., info_sep: str = ..., width: int = ..., file: Optional[IO] = ..., color: Optional[bool] = ..., ) -> Generator[_T, None, None]: ... def clear() -> None: ... def style( text: str, fg: Optional[str] = ..., bg: Optional[str] = ..., bold: Optional[bool] = ..., dim: Optional[bool] = ..., underline: Optional[bool] = ..., blink: Optional[bool] = ..., reverse: Optional[bool] = ..., reset: bool = ..., ): ... def unstyle(text: str) -> str: ... # Styling options copied from style() for nicer type checking. def secho( text: str, file: Optional[IO] = ..., nl: bool = ..., err: bool = ..., color: Optional[bool] = ..., fg: Optional[str] = ..., bg: Optional[str] = ..., bold: Optional[bool] = ..., dim: Optional[bool] = ..., underline: Optional[bool] = ..., blink: Optional[bool] = ..., reverse: Optional[bool] = ..., reset: bool = ..., ): ... def edit( text: Optional[str] = ..., editor: Optional[str] = ..., env: Optional[str] = ..., require_save: bool = ..., extension: str = ..., filename: Optional[str] = ..., ) -> str: ... def launch(url: str, wait: bool = ..., locate: bool = ...) -> int: ... def getchar(echo: bool = ...) -> str: ... def pause( info: str = ..., err: bool = ... ) -> None: ... mypy-0.560/typeshed/third_party/2and3/click/types.pyi0000644€tŠÔÚ€2›s®0000001313713215007212026733 0ustar jukkaDROPBOX\Domain Users00000000000000from typing import Any, Callable, IO, Iterable, List, Optional, TypeVar, Union import uuid from click.core import Context, Parameter class ParamType: name: str is_composite: bool envvar_list_splitter: Optional[str] def __call__( self, value: Optional[str], param: Optional[Parameter] = ..., ctx: Optional[Context] = ..., ) -> Any: ... def get_metavar(self, param: Parameter) -> str: ... def get_missing_message(self, param: Parameter) -> str: ... def convert( self, value: str, param: Optional[Parameter], ctx: Optional[Context], ) -> Any: ... def split_envvar_value(self, rv: str) -> List[str]: ... def fail(self, message: str, param: Optional[Parameter] = ..., ctx: Optional[Context] = ...) -> None: ... class BoolParamType(ParamType): def __call__( self, value: Optional[str], param: Optional[Parameter] = ..., ctx: Optional[Context] = ..., ) -> bool: ... def convert( self, value: str, param: Optional[Parameter], ctx: Optional[Context], ) -> bool: ... class CompositeParamType(ParamType): arity: int class Choice(ParamType): choices: Iterable[str] def __init__(self, choices: Iterable[str]) -> None: ... class FloatParamType(ParamType): def __call__( self, value: Optional[str], param: Optional[Parameter] = ..., ctx: Optional[Context] = ..., ) -> float: ... def convert( self, value: str, param: Optional[Parameter], ctx: Optional[Context], ) -> float: ... class FloatRange(FloatParamType): ... class File(ParamType): def __init__( self, mode: str = ..., encoding: Optional[str] = ..., errors: Optional[str] = ..., lazy: Optional[bool] = ..., atomic: Optional[bool] = ..., ) -> None: ... def __call__( self, value: Optional[str], param: Optional[Parameter] = ..., ctx: Optional[Context] = ..., ) -> IO: ... def convert( self, value: str, param: Optional[Parameter], ctx: Optional[Context], ) -> IO: ... def resolve_lazy_flag(self, value: str) -> bool: ... _F = TypeVar('_F') # result of the function _Func = Callable[[Optional[str]], _F] class FuncParamType(ParamType): func: _Func def __init__(self, func: _Func) -> None: ... def __call__( self, value: Optional[str], param: Optional[Parameter] = ..., ctx: Optional[Context] = ..., ) -> _F: ... def convert( self, value: str, param: Optional[Parameter], ctx: Optional[Context], ) -> _F: ... class IntParamType(ParamType): def __call__( self, value: Optional[str], param: Optional[Parameter] = ..., ctx: Optional[Context] = ..., ) -> int: ... def convert( self, value: str, param: Optional[Parameter], ctx: Optional[Context], ) -> int: ... class IntRange(IntParamType): def __init__( self, min: Optional[int] = ..., max: Optional[int] = ..., clamp: bool = ... ) -> None: ... _PathType = TypeVar('_PathType', str, bytes) class Path(ParamType): def __init__( self, exists: bool = ..., file_okay: bool = ..., dir_okay: bool = ..., writable: bool = ..., readable: bool = ..., resolve_path: bool = ..., allow_dash: bool = ..., path_type: Optional[_PathType] = ..., ) -> None: ... def coerce_path_result(self, rv: Union[str, bytes]) -> _PathType: ... def __call__( self, value: Optional[str], param: Optional[Parameter] = ..., ctx: Optional[Context] = ..., ) -> _PathType: ... def convert( self, value: str, param: Optional[Parameter], ctx: Optional[Context], ) -> _PathType: ... class StringParamType(ParamType): def __call__( self, value: Optional[str], param: Optional[Parameter] = ..., ctx: Optional[Context] = ..., ) -> str: ... def convert( self, value: str, param: Optional[Parameter], ctx: Optional[Context], ) -> str: ... class Tuple(CompositeParamType): types: List[ParamType] def __init__(self, types: Iterable[Any]) -> None: ... def __call__( self, value: Optional[str], param: Optional[Parameter] = ..., ctx: Optional[Context] = ..., ) -> Tuple: ... def convert( self, value: str, param: Optional[Parameter], ctx: Optional[Context], ) -> Tuple: ... class UnprocessedParamType(ParamType): ... class UUIDParameterType(ParamType): def __call__( self, value: Optional[str], param: Optional[Parameter] = ..., ctx: Optional[Context] = ..., ) -> uuid.UUID: ... def convert( self, value: str, param: Optional[Parameter], ctx: Optional[Context], ) -> uuid.UUID: ... def convert_type(ty: Any, default: Optional[Any] = ...) -> ParamType: ... # parameter type shortcuts BOOL = BoolParamType() FLOAT = FloatParamType() INT = IntParamType() STRING = StringParamType() UNPROCESSED = UnprocessedParamType() UUID = UUIDParameterType() mypy-0.560/typeshed/third_party/2and3/click/utils.pyi0000644€tŠÔÚ€2›s®0000000367113215007212026731 0ustar jukkaDROPBOX\Domain Users00000000000000from typing import Any, Callable, Iterator, IO, List, Optional, TypeVar, Union, Text _T = TypeVar('_T') _Decorator = Callable[[_T], _T] def _posixify(name: str) -> str: ... def safecall(func: _T) -> _T: ... def make_str(value: Any) -> str: ... def make_default_short_help(help: str, max_length: int = ...): ... class LazyFile: name: str mode: str encoding: Optional[str] errors: str atomic: bool def __init__( self, filename: str, mode: str = ..., encoding: Optional[str] = ..., errors: str = ..., atomic: bool = ... ) -> None: ... def open(self) -> IO: ... def close(self) -> None: ... def close_intelligently(self) -> None: ... def __enter__(self) -> 'LazyFile': ... def __exit__(self, exc_type, exc_value, tb): ... def __iter__(self) -> Iterator: ... class KeepOpenFile: _file: IO def __init__(self, file: IO) -> None: ... def __enter__(self) -> 'KeepOpenFile': ... def __exit__(self, exc_type, exc_value, tb): ... def __iter__(self) -> Iterator: ... def echo( message: Optional[Union[bytes, Text]] = ..., file: Optional[IO] = ..., nl: bool = ..., err: bool = ..., color: Optional[bool] = ..., ) -> None: ... def get_binary_stream(name: str) -> IO[bytes]: ... def get_text_stream( name: str, encoding: Optional[str] = ..., errors: str = ... ) -> IO[str]: ... def open_file( filename: str, mode: str = ..., encoding: Optional[str] = ..., errors: str = ..., lazy: bool = ..., atomic: bool = ... ) -> Union[IO, LazyFile, KeepOpenFile]: ... def get_os_args() -> List[str]: ... def format_filename(filename: str, shorten: bool = ...) -> str: ... def get_app_dir( app_name: str, roaming: bool = ..., force_posix: bool = ... ) -> str: ... mypy-0.560/typeshed/third_party/2and3/croniter.pyi0000644€tŠÔÚ€2›s®0000000360013215007212026321 0ustar jukkaDROPBOX\Domain Users00000000000000import datetime from typing import Any, Dict, Iterator, List, Optional, Text, Tuple, Type, TypeVar, Union _RetType = Union[Type[float], Type[datetime.datetime]] _SelfT = TypeVar('_SelfT', bound=croniter) class CroniterError(ValueError): ... class CroniterBadCronError(CroniterError): ... class CroniterBadDateError(CroniterError): ... class CroniterNotAlphaError(CroniterError): ... class croniter(Iterator[Any]): MONTHS_IN_YEAR: int RANGES: Tuple[Tuple[int, int], ...] DAYS: Tuple[int, ...] ALPHACONV: Tuple[Dict[str, Any], ...] LOWMAP: Tuple[Dict[int, Any], ...] bad_length: str tzinfo: Optional[datetime.tzinfo] cur: float expanded: List[List[str]] start_time: float dst_start_time: float nth_weekday_of_month: Dict[str, Any] def __init__(self, expr_format: Text, start_time: Optional[Union[float, datetime.datetime]] = ..., ret_type: Optional[_RetType] = ...) -> None: ... # Most return value depend on ret_type, which can be passed in both as a method argument and as # a constructor argument. def get_next(self, ret_type: Optional[_RetType] = ...) -> Any: ... def get_prev(self, ret_type: Optional[_RetType] = ...) -> Any: ... def get_current(self, ret_type: Optional[_RetType] = ...) -> Any: ... def __iter__(self: _SelfT) -> _SelfT: ... def __next__(self, ret_type: Optional[_RetType] = ...) -> Any: ... def next(self, ret_type: Optional[_RetType] = ...) -> Any: ... def all_next(self, ret_type: Optional[_RetType] = ...) -> Iterator[Any]: ... def all_prev(self, ret_type: Optional[_RetType] = ...) -> Iterator[Any]: ... def iter(self, ret_type: Optional[_RetType] = ...) -> Iterator[Any]: ... def is_leap(self, year: int) -> bool: ... @classmethod def expand(cls, expr_format: Text) -> Tuple[List[List[str]], Dict[str, Any]]: ... @classmethod def is_valid(cls, expression: Text) -> bool: ... mypy-0.560/typeshed/third_party/2and3/Crypto/0000755€tŠÔÚ€2›s®0000000000013215007244025237 5ustar jukkaDROPBOX\Domain Users00000000000000mypy-0.560/typeshed/third_party/2and3/Crypto/__init__.pyi0000644€tŠÔÚ€2›s®0000000015513215007212027515 0ustar jukkaDROPBOX\Domain Users00000000000000# Names in __all__ with no definition: # Cipher # Hash # Protocol # PublicKey # Signature # Util mypy-0.560/typeshed/third_party/2and3/Crypto/Cipher/0000755€tŠÔÚ€2›s®0000000000013215007244026451 5ustar jukkaDROPBOX\Domain Users00000000000000mypy-0.560/typeshed/third_party/2and3/Crypto/Cipher/__init__.pyi0000644€tŠÔÚ€2›s®0000000021613215007212030725 0ustar jukkaDROPBOX\Domain Users00000000000000# Names in __all__ with no definition: # AES # ARC2 # ARC4 # Blowfish # CAST # DES # DES3 # PKCS1_OAEP # PKCS1_v1_5 # XOR mypy-0.560/typeshed/third_party/2and3/Crypto/Cipher/AES.pyi0000644€tŠÔÚ€2›s®0000000103113215007212027572 0ustar jukkaDROPBOX\Domain Users00000000000000from typing import Any, Union, Text from .blockalgo import BlockAlgo __revision__ = ... # type: str class AESCipher(BlockAlgo): def __init__(self, key: Union[bytes, Text], *args, **kwargs) -> None: ... def new(key: Union[bytes, Text], *args, **kwargs) -> AESCipher: ... MODE_ECB = ... # type: int MODE_CBC = ... # type: int MODE_CFB = ... # type: int MODE_PGP = ... # type: int MODE_OFB = ... # type: int MODE_CTR = ... # type: int MODE_OPENPGP = ... # type: int block_size = ... # type: int key_size = ... # type: int mypy-0.560/typeshed/third_party/2and3/Crypto/Cipher/ARC2.pyi0000644€tŠÔÚ€2›s®0000000103113215007212027651 0ustar jukkaDROPBOX\Domain Users00000000000000from typing import Any, Union, Text from .blockalgo import BlockAlgo __revision__ = ... # type: str class RC2Cipher(BlockAlgo): def __init__(self, key: Union[bytes, Text], *args, **kwargs) -> None: ... def new(key: Union[bytes, Text], *args, **kwargs) -> RC2Cipher: ... MODE_ECB = ... # type: int MODE_CBC = ... # type: int MODE_CFB = ... # type: int MODE_PGP = ... # type: int MODE_OFB = ... # type: int MODE_CTR = ... # type: int MODE_OPENPGP = ... # type: int block_size = ... # type: int key_size = ... # type: int mypy-0.560/typeshed/third_party/2and3/Crypto/Cipher/ARC4.pyi0000644€tŠÔÚ€2›s®0000000066613215007212027670 0ustar jukkaDROPBOX\Domain Users00000000000000from typing import Any, Union, Text __revision__ = ... # type: str class ARC4Cipher: block_size = ... # type: int key_size = ... # type: int def __init__(self, key: Union[bytes, Text], *args, **kwargs) -> None: ... def encrypt(self, plaintext): ... def decrypt(self, ciphertext): ... def new(key: Union[bytes, Text], *args, **kwargs) -> ARC4Cipher: ... block_size = ... # type: int key_size = ... # type: int mypy-0.560/typeshed/third_party/2and3/Crypto/Cipher/blockalgo.pyi0000644€tŠÔÚ€2›s®0000000105513215007212031125 0ustar jukkaDROPBOX\Domain Users00000000000000from typing import Any, Union, Text MODE_ECB = ... # type: int MODE_CBC = ... # type: int MODE_CFB = ... # type: int MODE_PGP = ... # type: int MODE_OFB = ... # type: int MODE_CTR = ... # type: int MODE_OPENPGP = ... # type: int class BlockAlgo: mode = ... # type: int block_size = ... # type: int IV = ... # type: Any def __init__(self, factory: Any, key: Union[bytes, Text], *args, **kwargs) -> None: ... def encrypt(self, plaintext: Union[bytes, Text]) -> bytes: ... def decrypt(self, ciphertext: bytes) -> bytes: ... mypy-0.560/typeshed/third_party/2and3/Crypto/Cipher/Blowfish.pyi0000644€tŠÔÚ€2›s®0000000104313215007212030742 0ustar jukkaDROPBOX\Domain Users00000000000000from typing import Any, Union, Text from .blockalgo import BlockAlgo __revision__ = ... # type: str class BlowfishCipher(BlockAlgo): def __init__(self, key: Union[bytes, Text], *args, **kwargs) -> None: ... def new(key: Union[bytes, Text], *args, **kwargs) -> BlowfishCipher: ... MODE_ECB = ... # type: int MODE_CBC = ... # type: int MODE_CFB = ... # type: int MODE_PGP = ... # type: int MODE_OFB = ... # type: int MODE_CTR = ... # type: int MODE_OPENPGP = ... # type: int block_size = ... # type: int key_size = ... # type: Any mypy-0.560/typeshed/third_party/2and3/Crypto/Cipher/CAST.pyi0000644€tŠÔÚ€2›s®0000000104113215007212027715 0ustar jukkaDROPBOX\Domain Users00000000000000from typing import Any, Union, Text from .blockalgo import BlockAlgo __revision__ = ... # type: str class CAST128Cipher(BlockAlgo): def __init__(self, key: Union[bytes, Text], *args, **kwargs) -> None: ... def new(key: Union[bytes, Text], *args, **kwargs) -> CAST128Cipher: ... MODE_ECB = ... # type: int MODE_CBC = ... # type: int MODE_CFB = ... # type: int MODE_PGP = ... # type: int MODE_OFB = ... # type: int MODE_CTR = ... # type: int MODE_OPENPGP = ... # type: int block_size = ... # type: int key_size = ... # type: Any mypy-0.560/typeshed/third_party/2and3/Crypto/Cipher/DES.pyi0000644€tŠÔÚ€2›s®0000000103113215007212027575 0ustar jukkaDROPBOX\Domain Users00000000000000from typing import Any, Union, Text from .blockalgo import BlockAlgo __revision__ = ... # type: str class DESCipher(BlockAlgo): def __init__(self, key: Union[bytes, Text], *args, **kwargs) -> None: ... def new(key: Union[bytes, Text], *args, **kwargs) -> DESCipher: ... MODE_ECB = ... # type: int MODE_CBC = ... # type: int MODE_CFB = ... # type: int MODE_PGP = ... # type: int MODE_OFB = ... # type: int MODE_CTR = ... # type: int MODE_OPENPGP = ... # type: int block_size = ... # type: int key_size = ... # type: int mypy-0.560/typeshed/third_party/2and3/Crypto/Cipher/DES3.pyi0000644€tŠÔÚ€2›s®0000000103413215007212027663 0ustar jukkaDROPBOX\Domain Users00000000000000from typing import Any, Union, Text from .blockalgo import BlockAlgo __revision__ = ... # type: str class DES3Cipher(BlockAlgo): def __init__(self, key: Union[bytes, Text], *args, **kwargs) -> None: ... def new(key: Union[bytes, Text], *args, **kwargs) -> DES3Cipher: ... MODE_ECB = ... # type: int MODE_CBC = ... # type: int MODE_CFB = ... # type: int MODE_PGP = ... # type: int MODE_OFB = ... # type: int MODE_CTR = ... # type: int MODE_OPENPGP = ... # type: int block_size = ... # type: int key_size = ... # type: Any mypy-0.560/typeshed/third_party/2and3/Crypto/Cipher/PKCS1_OAEP.pyi0000644€tŠÔÚ€2›s®0000000077013215007212030620 0ustar jukkaDROPBOX\Domain Users00000000000000from typing import Any, Optional, Union, Text from Crypto.PublicKey.RSA import _RSAobj class PKCS1OAEP_Cipher: def __init__(self, key: _RSAobj, hashAlgo: Any, mgfunc: Any, label: Any) -> None: ... def can_encrypt(self): ... def can_decrypt(self): ... def encrypt(self, message: Union[bytes, Text]) -> bytes: ... def decrypt(self, ct: bytes) -> bytes: ... def new(key: _RSAobj, hashAlgo: Optional[Any] = ..., mgfunc: Optional[Any] = ..., label: Any = ...) -> PKCS1OAEP_Cipher: ... mypy-0.560/typeshed/third_party/2and3/Crypto/Cipher/PKCS1_v1_5.pyi0000644€tŠÔÚ€2›s®0000000065413215007212030647 0ustar jukkaDROPBOX\Domain Users00000000000000from typing import Any, Union, Text from Crypto.PublicKey.RSA import _RSAobj class PKCS115_Cipher: def __init__(self, key: _RSAobj) -> None: ... def can_encrypt(self) -> bool: ... def can_decrypt(self) -> bool: ... rf = ... # type: Any def encrypt(self, message: Union[bytes, Text]) -> bytes: ... def decrypt(self, ct: bytes, sentinel: Any) -> bytes: ... def new(key: _RSAobj) -> PKCS115_Cipher: ... mypy-0.560/typeshed/third_party/2and3/Crypto/Cipher/XOR.pyi0000644€tŠÔÚ€2›s®0000000074213215007212027642 0ustar jukkaDROPBOX\Domain Users00000000000000from typing import Any, Union, Text __revision__ = ... # type: str class XORCipher: block_size = ... # type: int key_size = ... # type: int def __init__(self, key: Union[bytes, Text], *args, **kwargs) -> None: ... def encrypt(self, plaintext: Union[bytes, Text]) -> bytes: ... def decrypt(self, ciphertext: bytes) -> bytes: ... def new(key: Union[bytes, Text], *args, **kwargs) -> XORCipher: ... block_size = ... # type: int key_size = ... # type: int mypy-0.560/typeshed/third_party/2and3/Crypto/Hash/0000755€tŠÔÚ€2›s®0000000000013215007244026122 5ustar jukkaDROPBOX\Domain Users00000000000000mypy-0.560/typeshed/third_party/2and3/Crypto/Hash/__init__.pyi0000644€tŠÔÚ€2›s®0000000020713215007212030376 0ustar jukkaDROPBOX\Domain Users00000000000000# Names in __all__ with no definition: # HMAC # MD2 # MD4 # MD5 # RIPEMD # SHA # SHA224 # SHA256 # SHA384 # SHA512 mypy-0.560/typeshed/third_party/2and3/Crypto/Hash/hashalgo.pyi0000644€tŠÔÚ€2›s®0000000054413215007212030431 0ustar jukkaDROPBOX\Domain Users00000000000000from typing import Any, Optional class HashAlgo: digest_size = ... # type: Any block_size = ... # type: Any def __init__(self, hashFactory, data: Optional[Any] = ...) -> None: ... def update(self, data): ... def digest(self): ... def hexdigest(self): ... def copy(self): ... def new(self, data: Optional[Any] = ...): ... mypy-0.560/typeshed/third_party/2and3/Crypto/Hash/HMAC.pyi0000644€tŠÔÚ€2›s®0000000075213215007212027354 0ustar jukkaDROPBOX\Domain Users00000000000000from typing import Any, Optional digest_size = ... # type: Any class HMAC: digest_size = ... # type: Any digestmod = ... # type: Any outer = ... # type: Any inner = ... # type: Any def __init__(self, key, msg: Optional[Any] = ..., digestmod: Optional[Any] = ...) -> None: ... def update(self, msg): ... def copy(self): ... def digest(self): ... def hexdigest(self): ... def new(key, msg: Optional[Any] = ..., digestmod: Optional[Any] = ...): ... mypy-0.560/typeshed/third_party/2and3/Crypto/Hash/MD2.pyi0000644€tŠÔÚ€2›s®0000000057713215007212027233 0ustar jukkaDROPBOX\Domain Users00000000000000from typing import Any, Optional from Crypto.Hash.hashalgo import HashAlgo class MD2Hash(HashAlgo): oid = ... # type: Any digest_size = ... # type: int block_size = ... # type: int def __init__(self, data: Optional[Any] = ...) -> None: ... def new(self, data: Optional[Any] = ...): ... def new(data: Optional[Any] = ...): ... digest_size = ... # type: Any mypy-0.560/typeshed/third_party/2and3/Crypto/Hash/MD4.pyi0000644€tŠÔÚ€2›s®0000000057713215007212027235 0ustar jukkaDROPBOX\Domain Users00000000000000from typing import Any, Optional from Crypto.Hash.hashalgo import HashAlgo class MD4Hash(HashAlgo): oid = ... # type: Any digest_size = ... # type: int block_size = ... # type: int def __init__(self, data: Optional[Any] = ...) -> None: ... def new(self, data: Optional[Any] = ...): ... def new(data: Optional[Any] = ...): ... digest_size = ... # type: Any mypy-0.560/typeshed/third_party/2and3/Crypto/Hash/MD5.pyi0000644€tŠÔÚ€2›s®0000000057713215007212027236 0ustar jukkaDROPBOX\Domain Users00000000000000from typing import Any, Optional from Crypto.Hash.hashalgo import HashAlgo class MD5Hash(HashAlgo): oid = ... # type: Any digest_size = ... # type: int block_size = ... # type: int def __init__(self, data: Optional[Any] = ...) -> None: ... def new(self, data: Optional[Any] = ...): ... def new(data: Optional[Any] = ...): ... digest_size = ... # type: Any mypy-0.560/typeshed/third_party/2and3/Crypto/Hash/RIPEMD.pyi0000644€tŠÔÚ€2›s®0000000060513215007212027621 0ustar jukkaDROPBOX\Domain Users00000000000000from typing import Any, Optional from Crypto.Hash.hashalgo import HashAlgo class RIPEMD160Hash(HashAlgo): oid = ... # type: Any digest_size = ... # type: int block_size = ... # type: int def __init__(self, data: Optional[Any] = ...) -> None: ... def new(self, data: Optional[Any] = ...): ... def new(data: Optional[Any] = ...): ... digest_size = ... # type: Any mypy-0.560/typeshed/third_party/2and3/Crypto/Hash/SHA.pyi0000644€tŠÔÚ€2›s®0000000060013215007212027247 0ustar jukkaDROPBOX\Domain Users00000000000000from typing import Any, Optional from Crypto.Hash.hashalgo import HashAlgo class SHA1Hash(HashAlgo): oid = ... # type: Any digest_size = ... # type: int block_size = ... # type: int def __init__(self, data: Optional[Any] = ...) -> None: ... def new(self, data: Optional[Any] = ...): ... def new(data: Optional[Any] = ...): ... digest_size = ... # type: Any mypy-0.560/typeshed/third_party/2and3/Crypto/Hash/SHA224.pyi0000644€tŠÔÚ€2›s®0000000060213215007212027501 0ustar jukkaDROPBOX\Domain Users00000000000000from typing import Any, Optional from Crypto.Hash.hashalgo import HashAlgo class SHA224Hash(HashAlgo): oid = ... # type: Any digest_size = ... # type: int block_size = ... # type: int def __init__(self, data: Optional[Any] = ...) -> None: ... def new(self, data: Optional[Any] = ...): ... def new(data: Optional[Any] = ...): ... digest_size = ... # type: Any mypy-0.560/typeshed/third_party/2and3/Crypto/Hash/SHA256.pyi0000644€tŠÔÚ€2›s®0000000060213215007212027506 0ustar jukkaDROPBOX\Domain Users00000000000000from typing import Any, Optional from Crypto.Hash.hashalgo import HashAlgo class SHA256Hash(HashAlgo): oid = ... # type: Any digest_size = ... # type: int block_size = ... # type: int def __init__(self, data: Optional[Any] = ...) -> None: ... def new(self, data: Optional[Any] = ...): ... def new(data: Optional[Any] = ...): ... digest_size = ... # type: Any mypy-0.560/typeshed/third_party/2and3/Crypto/Hash/SHA384.pyi0000644€tŠÔÚ€2›s®0000000060213215007212027510 0ustar jukkaDROPBOX\Domain Users00000000000000from typing import Any, Optional from Crypto.Hash.hashalgo import HashAlgo class SHA384Hash(HashAlgo): oid = ... # type: Any digest_size = ... # type: int block_size = ... # type: int def __init__(self, data: Optional[Any] = ...) -> None: ... def new(self, data: Optional[Any] = ...): ... def new(data: Optional[Any] = ...): ... digest_size = ... # type: Any mypy-0.560/typeshed/third_party/2and3/Crypto/Hash/SHA512.pyi0000644€tŠÔÚ€2›s®0000000060213215007212027501 0ustar jukkaDROPBOX\Domain Users00000000000000from typing import Any, Optional from Crypto.Hash.hashalgo import HashAlgo class SHA512Hash(HashAlgo): oid = ... # type: Any digest_size = ... # type: int block_size = ... # type: int def __init__(self, data: Optional[Any] = ...) -> None: ... def new(self, data: Optional[Any] = ...): ... def new(data: Optional[Any] = ...): ... digest_size = ... # type: Any mypy-0.560/typeshed/third_party/2and3/Crypto/pct_warnings.pyi0000644€tŠÔÚ€2›s®0000000063413215007212030456 0ustar jukkaDROPBOX\Domain Users00000000000000class CryptoWarning(Warning): ... class CryptoDeprecationWarning(DeprecationWarning, CryptoWarning): ... class CryptoRuntimeWarning(RuntimeWarning, CryptoWarning): ... class RandomPool_DeprecationWarning(CryptoDeprecationWarning): ... class ClockRewindWarning(CryptoRuntimeWarning): ... class GetRandomNumber_DeprecationWarning(CryptoDeprecationWarning): ... class PowmInsecureWarning(CryptoRuntimeWarning): ... mypy-0.560/typeshed/third_party/2and3/Crypto/Protocol/0000755€tŠÔÚ€2›s®0000000000013215007244027040 5ustar jukkaDROPBOX\Domain Users00000000000000mypy-0.560/typeshed/third_party/2and3/Crypto/Protocol/__init__.pyi0000644€tŠÔÚ€2›s®0000000011513215007212031312 0ustar jukkaDROPBOX\Domain Users00000000000000# Names in __all__ with no definition: # AllOrNothing # Chaffing # KDF mypy-0.560/typeshed/third_party/2and3/Crypto/Protocol/AllOrNothing.pyi0000644€tŠÔÚ€2›s®0000000042413215007212032116 0ustar jukkaDROPBOX\Domain Users00000000000000from typing import Any, Optional __revision__ = ... # type: str def isInt(x): ... class AllOrNothing: def __init__(self, ciphermodule, mode: Optional[Any] = ..., IV: Optional[Any] = ...) -> None: ... def digest(self, text): ... def undigest(self, blocks): ... mypy-0.560/typeshed/third_party/2and3/Crypto/Protocol/Chaffing.pyi0000644€tŠÔÚ€2›s®0000000023613215007212031264 0ustar jukkaDROPBOX\Domain Users00000000000000__revision__ = ... # type: str class Chaff: def __init__(self, factor: float = ..., blocksper: int = ...) -> None: ... def chaff(self, blocks): ... mypy-0.560/typeshed/third_party/2and3/Crypto/Protocol/KDF.pyi0000644€tŠÔÚ€2›s®0000000043513215007212030164 0ustar jukkaDROPBOX\Domain Users00000000000000from typing import Any, Optional from Crypto.Hash import SHA as SHA1 __revision__ = ... # type: str def PBKDF1(password, salt, dkLen, count: int = ..., hashAlgo: Optional[Any] = ...): ... def PBKDF2(password, salt, dkLen: int = ..., count: int = ..., prf: Optional[Any] = ...): ... mypy-0.560/typeshed/third_party/2and3/Crypto/PublicKey/0000755€tŠÔÚ€2›s®0000000000013215007244027126 5ustar jukkaDROPBOX\Domain Users00000000000000mypy-0.560/typeshed/third_party/2and3/Crypto/PublicKey/__init__.pyi0000644€tŠÔÚ€2›s®0000000010313215007212031375 0ustar jukkaDROPBOX\Domain Users00000000000000# Names in __all__ with no definition: # DSA # ElGamal # RSA mypy-0.560/typeshed/third_party/2and3/Crypto/PublicKey/DSA.pyi0000644€tŠÔÚ€2›s®0000000150713215007212030256 0ustar jukkaDROPBOX\Domain Users00000000000000from typing import Any, Optional from .pubkey import pubkey class _DSAobj(pubkey): keydata = ... # type: Any implementation = ... # type: Any key = ... # type: Any def __init__(self, implementation, key) -> None: ... def __getattr__(self, attrname): ... def sign(self, M, K): ... def verify(self, M, signature): ... def has_private(self): ... def size(self): ... def can_blind(self): ... def can_encrypt(self): ... def can_sign(self): ... def publickey(self): ... class DSAImplementation: error = ... # type: Any def __init__(self, **kwargs) -> None: ... def generate(self, bits, randfunc: Optional[Any] = ..., progress_func: Optional[Any] = ...): ... def construct(self, tup): ... generate = ... # type: Any construct = ... # type: Any error = ... # type: Any mypy-0.560/typeshed/third_party/2and3/Crypto/PublicKey/ElGamal.pyi0000644€tŠÔÚ€2›s®0000000104113215007212031142 0ustar jukkaDROPBOX\Domain Users00000000000000from typing import Any, Optional from Crypto.PublicKey.pubkey import pubkey from Crypto.PublicKey.pubkey import * # noqa: F403 class error(Exception): ... def generate(bits, randfunc, progress_func: Optional[Any] = ...): ... def construct(tup): ... class ElGamalobj(pubkey): keydata = ... # type: Any def encrypt(self, plaintext, K): ... def decrypt(self, ciphertext): ... def sign(self, M, K): ... def verify(self, M, signature): ... def size(self): ... def has_private(self): ... def publickey(self): ... mypy-0.560/typeshed/third_party/2and3/Crypto/PublicKey/pubkey.pyi0000644€tŠÔÚ€2›s®0000000116113215007212031142 0ustar jukkaDROPBOX\Domain Users00000000000000from Crypto.Util.number import * # noqa: F403 __revision__ = ... # type: str class pubkey: def __init__(self) -> None: ... def encrypt(self, plaintext, K): ... def decrypt(self, ciphertext): ... def sign(self, M, K): ... def verify(self, M, signature): ... def validate(self, M, signature): ... def blind(self, M, B): ... def unblind(self, M, B): ... def can_sign(self): ... def can_encrypt(self): ... def can_blind(self): ... def size(self): ... def has_private(self): ... def publickey(self): ... def __eq__(self, other): ... def __ne__(self, other): ... mypy-0.560/typeshed/third_party/2and3/Crypto/PublicKey/RSA.pyi0000644€tŠÔÚ€2›s®0000000226413215007212030275 0ustar jukkaDROPBOX\Domain Users00000000000000from typing import Any, Optional, Union, Text from .pubkey import pubkey class _RSAobj(pubkey): keydata = ... # type: Any implementation = ... # type: Any key = ... # type: Any def __init__(self, implementation, key, randfunc: Optional[Any] = ...) -> None: ... def __getattr__(self, attrname): ... def encrypt(self, plaintext, K): ... def decrypt(self, ciphertext): ... def sign(self, M, K): ... def verify(self, M, signature): ... def has_private(self): ... def size(self): ... def can_blind(self): ... def can_encrypt(self): ... def can_sign(self): ... def publickey(self): ... def exportKey(self, format: str = ..., passphrase: Optional[Any] = ..., pkcs: int = ...): ... class RSAImplementation: error = ... # type: Any def __init__(self, **kwargs) -> None: ... def generate(self, bits, randfunc: Optional[Any] = ..., progress_func: Optional[Any] = ..., e: int = ...): ... def construct(self, tup): ... def importKey(self, externKey: Any, passphrase: Union[None, bytes, Text] = ...) -> _RSAobj: ... generate = ... # type: Any construct = ... # type: Any importKey = ... # type: Any error = ... # type: Any mypy-0.560/typeshed/third_party/2and3/Crypto/Random/0000755€tŠÔÚ€2›s®0000000000013215007244026457 5ustar jukkaDROPBOX\Domain Users00000000000000mypy-0.560/typeshed/third_party/2and3/Crypto/Random/__init__.pyi0000644€tŠÔÚ€2›s®0000000003613215007212030733 0ustar jukkaDROPBOX\Domain Users00000000000000def new(*args, **kwargs): ... mypy-0.560/typeshed/third_party/2and3/Crypto/Random/Fortuna/0000755€tŠÔÚ€2›s®0000000000013215007244030075 5ustar jukkaDROPBOX\Domain Users00000000000000mypy-0.560/typeshed/third_party/2and3/Crypto/Random/Fortuna/__init__.pyi0000644€tŠÔÚ€2›s®0000000000013215007212032340 0ustar jukkaDROPBOX\Domain Users00000000000000mypy-0.560/typeshed/third_party/2and3/Crypto/Random/Fortuna/FortunaAccumulator.pyi0000644€tŠÔÚ€2›s®0000000127013215007212034431 0ustar jukkaDROPBOX\Domain Users00000000000000from typing import Any __revision__ = ... # type: str class FortunaPool: digest_size = ... # type: Any def __init__(self) -> None: ... def append(self, data): ... def digest(self): ... def hexdigest(self): ... length = ... # type: int def reset(self): ... def which_pools(r): ... class FortunaAccumulator: min_pool_size = ... # type: int reseed_interval = ... # type: float reseed_count = ... # type: int generator = ... # type: Any last_reseed = ... # type: Any pools = ... # type: Any def __init__(self) -> None: ... def random_data(self, bytes): ... def add_random_event(self, source_number, pool_number, data): ... mypy-0.560/typeshed/third_party/2and3/Crypto/Random/Fortuna/FortunaGenerator.pyi0000644€tŠÔÚ€2›s®0000000074313215007212034104 0ustar jukkaDROPBOX\Domain Users00000000000000from typing import Any __revision__ = ... # type: str class AESGenerator: block_size = ... # type: Any key_size = ... # type: int max_blocks_per_request = ... # type: Any counter = ... # type: Any key = ... # type: Any block_size_shift = ... # type: Any blocks_per_key = ... # type: Any max_bytes_per_request = ... # type: Any def __init__(self) -> None: ... def reseed(self, seed): ... def pseudo_random_data(self, bytes): ... mypy-0.560/typeshed/third_party/2and3/Crypto/Random/Fortuna/SHAd256.pyi0000644€tŠÔÚ€2›s®0000000052613215007212031632 0ustar jukkaDROPBOX\Domain Users00000000000000from typing import Any, Optional class _SHAd256: digest_size = ... # type: Any def __init__(self, internal_api_check, sha256_hash_obj) -> None: ... def copy(self): ... def digest(self): ... def hexdigest(self): ... def update(self, data): ... digest_size = ... # type: Any def new(data: Optional[Any] = ...): ... mypy-0.560/typeshed/third_party/2and3/Crypto/Random/OSRNG/0000755€tŠÔÚ€2›s®0000000000013215007244027347 5ustar jukkaDROPBOX\Domain Users00000000000000mypy-0.560/typeshed/third_party/2and3/Crypto/Random/OSRNG/__init__.pyi0000644€tŠÔÚ€2›s®0000000004013215007212031616 0ustar jukkaDROPBOX\Domain Users00000000000000__revision__ = ... # type: str mypy-0.560/typeshed/third_party/2and3/Crypto/Random/OSRNG/fallback.pyi0000644€tŠÔÚ€2›s®0000000020213215007212031616 0ustar jukkaDROPBOX\Domain Users00000000000000from .rng_base import BaseRNG class PythonOSURandomRNG(BaseRNG): name = ... # type: str def __init__(self) -> None: ... mypy-0.560/typeshed/third_party/2and3/Crypto/Random/OSRNG/posix.pyi0000644€tŠÔÚ€2›s®0000000027413215007212031232 0ustar jukkaDROPBOX\Domain Users00000000000000from typing import Any, Optional from .rng_base import BaseRNG class DevURandomRNG(BaseRNG): name = ... # type: str def __init__(self, devname: Optional[Any] = ...) -> None: ... mypy-0.560/typeshed/third_party/2and3/Crypto/Random/OSRNG/rng_base.pyi0000644€tŠÔÚ€2›s®0000000043713215007212031651 0ustar jukkaDROPBOX\Domain Users00000000000000__revision__ = ... # type: str class BaseRNG: closed = ... # type: bool def __init__(self) -> None: ... def __del__(self): ... def __enter__(self): ... def __exit__(self): ... def close(self): ... def flush(self): ... def read(self, N: int = ...): ... mypy-0.560/typeshed/third_party/2and3/Crypto/Random/random.pyi0000644€tŠÔÚ€2›s®0000000100713215007212030453 0ustar jukkaDROPBOX\Domain Users00000000000000from typing import Any, Optional class StrongRandom: def __init__(self, rng: Optional[Any] = ..., randfunc: Optional[Any] = ...) -> None: ... def getrandbits(self, k): ... def randrange(self, *args): ... def randint(self, a, b): ... def choice(self, seq): ... def shuffle(self, x): ... def sample(self, population, k): ... getrandbits = ... # type: Any randrange = ... # type: Any randint = ... # type: Any choice = ... # type: Any shuffle = ... # type: Any sample = ... # type: Any mypy-0.560/typeshed/third_party/2and3/Crypto/Signature/0000755€tŠÔÚ€2›s®0000000000013215007244027200 5ustar jukkaDROPBOX\Domain Users00000000000000mypy-0.560/typeshed/third_party/2and3/Crypto/Signature/__init__.pyi0000644€tŠÔÚ€2›s®0000000010413215007212031450 0ustar jukkaDROPBOX\Domain Users00000000000000# Names in __all__ with no definition: # PKCS1_PSS # PKCS1_v1_5 mypy-0.560/typeshed/third_party/2and3/Crypto/Signature/PKCS1_PSS.pyi0000644€tŠÔÚ€2›s®0000000043613215007212031267 0ustar jukkaDROPBOX\Domain Users00000000000000from typing import Any, Optional class PSS_SigScheme: def __init__(self, key, mgfunc, saltLen) -> None: ... def can_sign(self): ... def sign(self, mhash): ... def verify(self, mhash, S): ... def new(key, mgfunc: Optional[Any] = ..., saltLen: Optional[Any] = ...): ... mypy-0.560/typeshed/third_party/2and3/Crypto/Signature/PKCS1_v1_5.pyi0000644€tŠÔÚ€2›s®0000000026413215007212031373 0ustar jukkaDROPBOX\Domain Users00000000000000class PKCS115_SigScheme: def __init__(self, key) -> None: ... def can_sign(self): ... def sign(self, mhash): ... def verify(self, mhash, S): ... def new(key): ... mypy-0.560/typeshed/third_party/2and3/Crypto/Util/0000755€tŠÔÚ€2›s®0000000000013215007244026154 5ustar jukkaDROPBOX\Domain Users00000000000000mypy-0.560/typeshed/third_party/2and3/Crypto/Util/__init__.pyi0000644€tŠÔÚ€2›s®0000000013713215007212030432 0ustar jukkaDROPBOX\Domain Users00000000000000# Names in __all__ with no definition: # RFC1751 # asn1 # number # randpool # strxor mypy-0.560/typeshed/third_party/2and3/Crypto/Util/asn1.pyi0000644€tŠÔÚ€2›s®0000000274613215007212027545 0ustar jukkaDROPBOX\Domain Users00000000000000from typing import Any, Optional class DerObject: typeTags = ... # type: Any typeTag = ... # type: Any payload = ... # type: Any def __init__(self, ASN1Type: Optional[Any] = ..., payload: Any = ...) -> None: ... def isType(self, ASN1Type): ... def encode(self): ... def decode(self, derEle, noLeftOvers: int = ...): ... class DerInteger(DerObject): value = ... # type: Any def __init__(self, value: int = ...) -> None: ... payload = ... # type: Any def encode(self): ... def decode(self, derEle, noLeftOvers: int = ...): ... class DerSequence(DerObject): def __init__(self, startSeq: Optional[Any] = ...) -> None: ... def __delitem__(self, n): ... def __getitem__(self, n): ... def __setitem__(self, key, value): ... def __setslice__(self, i, j, sequence): ... def __delslice__(self, i, j): ... def __getslice__(self, i, j): ... def __len__(self): ... def append(self, item): ... def hasInts(self): ... def hasOnlyInts(self): ... payload = ... # type: Any def encode(self): ... def decode(self, derEle, noLeftOvers: int = ...): ... class DerOctetString(DerObject): payload = ... # type: Any def __init__(self, value: Any = ...) -> None: ... def decode(self, derEle, noLeftOvers: int = ...): ... class DerNull(DerObject): def __init__(self) -> None: ... class DerObjectId(DerObject): def __init__(self) -> None: ... def decode(self, derEle, noLeftOvers: int = ...): ... mypy-0.560/typeshed/third_party/2and3/Crypto/Util/Counter.pyi0000644€tŠÔÚ€2›s®0000000033013215007212030305 0ustar jukkaDROPBOX\Domain Users00000000000000from typing import Any def new(nbits, prefix: Any = ..., suffix: Any = ..., initial_value: int = ..., overflow: int = ..., little_endian: bool = ..., allow_wraparound: bool = ..., disable_shortcut: bool = ...): ... mypy-0.560/typeshed/third_party/2and3/Crypto/Util/number.pyi0000644€tŠÔÚ€2›s®0000000151313215007212030162 0ustar jukkaDROPBOX\Domain Users00000000000000from typing import Any, Optional from warnings import warn as _warn __revision__ = ... # type: str bignum = ... # type: Any def size(N): ... def getRandomNumber(N, randfunc: Optional[Any] = ...): ... def getRandomInteger(N, randfunc: Optional[Any] = ...): ... def getRandomRange(a, b, randfunc: Optional[Any] = ...): ... def getRandomNBitInteger(N, randfunc: Optional[Any] = ...): ... def GCD(x, y): ... def inverse(u, v): ... def getPrime(N, randfunc: Optional[Any] = ...): ... def getStrongPrime(N, e: int = ..., false_positive_prob: float = ..., randfunc: Optional[Any] = ...): ... def isPrime(N, false_positive_prob: float = ..., randfunc: Optional[Any] = ...): ... def long_to_bytes(n, blocksize: int = ...): ... def bytes_to_long(s): ... def long2str(n, blocksize: int = ...): ... def str2long(s): ... sieve_base = ... # type: Any mypy-0.560/typeshed/third_party/2and3/Crypto/Util/randpool.pyi0000644€tŠÔÚ€2›s®0000000111513215007212030506 0ustar jukkaDROPBOX\Domain Users00000000000000from typing import Any, Optional __revision__ = ... # type: str class RandomPool: bytes = ... # type: Any bits = ... # type: Any entropy = ... # type: Any def __init__(self, numbytes: int = ..., cipher: Optional[Any] = ..., hash: Optional[Any] = ..., file: Optional[Any] = ...) -> None: ... def get_bytes(self, N): ... def randomize(self, N: int = ...): ... def stir(self, s: str = ...): ... def stir_n(self, N: int = ...): ... def add_event(self, s: str = ...): ... def getBytes(self, N): ... def addEvent(self, event, s: str = ...): ... mypy-0.560/typeshed/third_party/2and3/Crypto/Util/RFC1751.pyi0000644€tŠÔÚ€2›s®0000000025013215007212027617 0ustar jukkaDROPBOX\Domain Users00000000000000from typing import Any __revision__ = ... # type: str binary = ... # type: Any def key_to_english(key): ... def english_to_key(s): ... wordlist = ... # type: Any mypy-0.560/typeshed/third_party/2and3/Crypto/Util/strxor.pyi0000644€tŠÔÚ€2›s®0000000010413215007212030226 0ustar jukkaDROPBOX\Domain Users00000000000000def strxor(*args, **kwargs): ... def strxor_c(*args, **kwargs): ... mypy-0.560/typeshed/third_party/2and3/emoji.pyi0000644€tŠÔÚ€2›s®0000000060713215007212025603 0ustar jukkaDROPBOX\Domain Users00000000000000from typing import Tuple, Pattern, List, Dict, Union _DEFAULT_DELIMITER = ... # type: str def emojize( string: str, use_aliases: bool=..., delimiters: Tuple[str, str]=... ) -> str: ... def demojize( string: str, delimiters: Tuple[str, str]=... ) -> str: ... def get_emoji_regexp() -> Pattern: ... def emoji_lis(string: str) -> List[Dict[str, Union[int, str]]]: ... mypy-0.560/typeshed/third_party/2and3/jinja2/0000755€tŠÔÚ€2›s®0000000000013215007244025134 5ustar jukkaDROPBOX\Domain Users00000000000000mypy-0.560/typeshed/third_party/2and3/jinja2/__init__.pyi0000644€tŠÔÚ€2›s®0000000253313215007212027414 0ustar jukkaDROPBOX\Domain Users00000000000000from jinja2.environment import Environment as Environment, Template as Template from jinja2.loaders import BaseLoader as BaseLoader, FileSystemLoader as FileSystemLoader, PackageLoader as PackageLoader, DictLoader as DictLoader, FunctionLoader as FunctionLoader, PrefixLoader as PrefixLoader, ChoiceLoader as ChoiceLoader, ModuleLoader as ModuleLoader from jinja2.bccache import BytecodeCache as BytecodeCache, FileSystemBytecodeCache as FileSystemBytecodeCache, MemcachedBytecodeCache as MemcachedBytecodeCache from jinja2.runtime import Undefined as Undefined, DebugUndefined as DebugUndefined, StrictUndefined as StrictUndefined, make_logging_undefined as make_logging_undefined from jinja2.exceptions import TemplateError as TemplateError, UndefinedError as UndefinedError, TemplateNotFound as TemplateNotFound, TemplatesNotFound as TemplatesNotFound, TemplateSyntaxError as TemplateSyntaxError, TemplateAssertionError as TemplateAssertionError from jinja2.filters import environmentfilter as environmentfilter, contextfilter as contextfilter, evalcontextfilter as evalcontextfilter from jinja2.utils import Markup as Markup, escape as escape, clear_caches as clear_caches, environmentfunction as environmentfunction, evalcontextfunction as evalcontextfunction, contextfunction as contextfunction, is_undefined as is_undefined, select_autoescape as select_autoescape mypy-0.560/typeshed/third_party/2and3/jinja2/_compat.pyi0000644€tŠÔÚ€2›s®0000000161613215007212027300 0ustar jukkaDROPBOX\Domain Users00000000000000from typing import Any, Optional import sys if sys.version_info[0] >= 3: from io import BytesIO from urllib.parse import quote_from_bytes as url_quote else: from cStringIO import StringIO as BytesIO from urllib import quote as url_quote PY2 = ... # type: Any PYPY = ... # type: Any unichr = ... # type: Any range_type = ... # type: Any text_type = ... # type: Any string_types = ... # type: Any integer_types = ... # type: Any iterkeys = ... # type: Any itervalues = ... # type: Any iteritems = ... # type: Any NativeStringIO = ... # type: Any def reraise(tp, value, tb: Optional[Any] = ...): ... ifilter = ... # type: Any imap = ... # type: Any izip = ... # type: Any intern = ... # type: Any implements_iterator = ... # type: Any implements_to_string = ... # type: Any encode_filename = ... # type: Any get_next = ... # type: Any def with_metaclass(meta, *bases): ... mypy-0.560/typeshed/third_party/2and3/jinja2/_stringdefs.pyi0000644€tŠÔÚ€2›s®0000000146613215007212030170 0ustar jukkaDROPBOX\Domain Users00000000000000from typing import Any Cc = ... # type: str Cf = ... # type: str Cn = ... # type: str Co = ... # type: str Cs = ... # type: Any Ll = ... # type: str Lm = ... # type: str Lo = ... # type: str Lt = ... # type: str Lu = ... # type: str Mc = ... # type: str Me = ... # type: str Mn = ... # type: str Nd = ... # type: str Nl = ... # type: str No = ... # type: str Pc = ... # type: str Pd = ... # type: str Pe = ... # type: str Pf = ... # type: str Pi = ... # type: str Po = ... # type: str Ps = ... # type: str Sc = ... # type: str Sk = ... # type: str Sm = ... # type: str So = ... # type: str Zl = ... # type: str Zp = ... # type: str Zs = ... # type: str cats = ... # type: Any def combine(*args): ... xid_start = ... # type: str xid_continue = ... # type: str def allexcept(*args): ... mypy-0.560/typeshed/third_party/2and3/jinja2/bccache.pyi0000644€tŠÔÚ€2›s®0000000307013215007212027222 0ustar jukkaDROPBOX\Domain Users00000000000000from typing import Any, Optional marshal_dump = ... # type: Any marshal_load = ... # type: Any bc_version = ... # type: int bc_magic = ... # type: Any class Bucket: environment = ... # type: Any key = ... # type: Any checksum = ... # type: Any def __init__(self, environment, key, checksum) -> None: ... code = ... # type: Any def reset(self): ... def load_bytecode(self, f): ... def write_bytecode(self, f): ... def bytecode_from_string(self, string): ... def bytecode_to_string(self): ... class BytecodeCache: def load_bytecode(self, bucket): ... def dump_bytecode(self, bucket): ... def clear(self): ... def get_cache_key(self, name, filename: Optional[Any] = ...): ... def get_source_checksum(self, source): ... def get_bucket(self, environment, name, filename, source): ... def set_bucket(self, bucket): ... class FileSystemBytecodeCache(BytecodeCache): directory = ... # type: Any pattern = ... # type: Any def __init__(self, directory: Optional[Any] = ..., pattern: str = ...) -> None: ... def load_bytecode(self, bucket): ... def dump_bytecode(self, bucket): ... def clear(self): ... class MemcachedBytecodeCache(BytecodeCache): client = ... # type: Any prefix = ... # type: Any timeout = ... # type: Any ignore_memcache_errors = ... # type: Any def __init__(self, client, prefix: str = ..., timeout: Optional[Any] = ..., ignore_memcache_errors: bool = ...) -> None: ... def load_bytecode(self, bucket): ... def dump_bytecode(self, bucket): ... mypy-0.560/typeshed/third_party/2and3/jinja2/compiler.pyi0000644€tŠÔÚ€2›s®0000001562013215007212027470 0ustar jukkaDROPBOX\Domain Users00000000000000from typing import Any, Optional from keyword import iskeyword as is_python_keyword from jinja2.visitor import NodeVisitor operators = ... # type: Any dict_item_iter = ... # type: str unoptimize_before_dead_code = ... # type: bool def generate(node, environment, name, filename, stream: Optional[Any] = ..., defer_init: bool = ...): ... def has_safe_repr(value): ... def find_undeclared(nodes, names): ... class Identifiers: declared = ... # type: Any outer_undeclared = ... # type: Any undeclared = ... # type: Any declared_locally = ... # type: Any declared_parameter = ... # type: Any def __init__(self) -> None: ... def add_special(self, name): ... def is_declared(self, name): ... def copy(self): ... class Frame: eval_ctx = ... # type: Any identifiers = ... # type: Any toplevel = ... # type: bool rootlevel = ... # type: bool require_output_check = ... # type: Any buffer = ... # type: Any block = ... # type: Any assigned_names = ... # type: Any parent = ... # type: Any def __init__(self, eval_ctx, parent: Optional[Any] = ...) -> None: ... def copy(self): ... def inspect(self, nodes): ... def find_shadowed(self, extra: Any = ...): ... def inner(self): ... def soft(self): ... __copy__ = ... # type: Any class VisitorExit(RuntimeError): ... class DependencyFinderVisitor(NodeVisitor): filters = ... # type: Any tests = ... # type: Any def __init__(self) -> None: ... def visit_Filter(self, node): ... def visit_Test(self, node): ... def visit_Block(self, node): ... class UndeclaredNameVisitor(NodeVisitor): names = ... # type: Any undeclared = ... # type: Any def __init__(self, names) -> None: ... def visit_Name(self, node): ... def visit_Block(self, node): ... class FrameIdentifierVisitor(NodeVisitor): identifiers = ... # type: Any def __init__(self, identifiers) -> None: ... def visit_Name(self, node): ... def visit_If(self, node): ... def visit_Macro(self, node): ... def visit_Import(self, node): ... def visit_FromImport(self, node): ... def visit_Assign(self, node): ... def visit_For(self, node): ... def visit_CallBlock(self, node): ... def visit_FilterBlock(self, node): ... def visit_AssignBlock(self, node): ... def visit_Scope(self, node): ... def visit_Block(self, node): ... class CompilerExit(Exception): ... class CodeGenerator(NodeVisitor): environment = ... # type: Any name = ... # type: Any filename = ... # type: Any stream = ... # type: Any created_block_context = ... # type: bool defer_init = ... # type: Any import_aliases = ... # type: Any blocks = ... # type: Any extends_so_far = ... # type: int has_known_extends = ... # type: bool code_lineno = ... # type: int tests = ... # type: Any filters = ... # type: Any debug_info = ... # type: Any def __init__(self, environment, name, filename, stream: Optional[Any] = ..., defer_init: bool = ...) -> None: ... def fail(self, msg, lineno): ... def temporary_identifier(self): ... def buffer(self, frame): ... def return_buffer_contents(self, frame): ... def indent(self): ... def outdent(self, step: int = ...): ... def start_write(self, frame, node: Optional[Any] = ...): ... def end_write(self, frame): ... def simple_write(self, s, frame, node: Optional[Any] = ...): ... def blockvisit(self, nodes, frame): ... def write(self, x): ... def writeline(self, x, node: Optional[Any] = ..., extra: int = ...): ... def newline(self, node: Optional[Any] = ..., extra: int = ...): ... def signature(self, node, frame, extra_kwargs: Optional[Any] = ...): ... def pull_locals(self, frame): ... def pull_dependencies(self, nodes): ... def unoptimize_scope(self, frame): ... def push_scope(self, frame, extra_vars: Any = ...): ... def pop_scope(self, aliases, frame): ... def function_scoping(self, node, frame, children: Optional[Any] = ..., find_special: bool = ...): ... def macro_body(self, node, frame, children: Optional[Any] = ...): ... def macro_def(self, node, frame): ... def position(self, node): ... def visit_Template(self, node, frame: Optional[Any] = ...): ... def visit_Block(self, node, frame): ... def visit_Extends(self, node, frame): ... def visit_Include(self, node, frame): ... def visit_Import(self, node, frame): ... def visit_FromImport(self, node, frame): ... def visit_For(self, node, frame): ... def visit_If(self, node, frame): ... def visit_Macro(self, node, frame): ... def visit_CallBlock(self, node, frame): ... def visit_FilterBlock(self, node, frame): ... def visit_ExprStmt(self, node, frame): ... def visit_Output(self, node, frame): ... def make_assignment_frame(self, frame): ... def export_assigned_vars(self, frame, assignment_frame): ... def visit_Assign(self, node, frame): ... def visit_AssignBlock(self, node, frame): ... def visit_Name(self, node, frame): ... def visit_Const(self, node, frame): ... def visit_TemplateData(self, node, frame): ... def visit_Tuple(self, node, frame): ... def visit_List(self, node, frame): ... def visit_Dict(self, node, frame): ... def binop(operator, interceptable: bool = ...): ... def uaop(operator, interceptable: bool = ...): ... visit_Add = ... # type: Any visit_Sub = ... # type: Any visit_Mul = ... # type: Any visit_Div = ... # type: Any visit_FloorDiv = ... # type: Any visit_Pow = ... # type: Any visit_Mod = ... # type: Any visit_And = ... # type: Any visit_Or = ... # type: Any visit_Pos = ... # type: Any visit_Neg = ... # type: Any visit_Not = ... # type: Any def visit_Concat(self, node, frame): ... def visit_Compare(self, node, frame): ... def visit_Operand(self, node, frame): ... def visit_Getattr(self, node, frame): ... def visit_Getitem(self, node, frame): ... def visit_Slice(self, node, frame): ... def visit_Filter(self, node, frame): ... def visit_Test(self, node, frame): ... def visit_CondExpr(self, node, frame): ... def visit_Call(self, node, frame, forward_caller: bool = ...): ... def visit_Keyword(self, node, frame): ... def visit_MarkSafe(self, node, frame): ... def visit_MarkSafeIfAutoescape(self, node, frame): ... def visit_EnvironmentAttribute(self, node, frame): ... def visit_ExtensionAttribute(self, node, frame): ... def visit_ImportedName(self, node, frame): ... def visit_InternalName(self, node, frame): ... def visit_ContextReference(self, node, frame): ... def visit_Continue(self, node, frame): ... def visit_Break(self, node, frame): ... def visit_Scope(self, node, frame): ... def visit_EvalContextModifier(self, node, frame): ... def visit_ScopedEvalContextModifier(self, node, frame): ... mypy-0.560/typeshed/third_party/2and3/jinja2/constants.pyi0000644€tŠÔÚ€2›s®0000000004513215007212027665 0ustar jukkaDROPBOX\Domain Users00000000000000LOREM_IPSUM_WORDS = ... # type: str mypy-0.560/typeshed/third_party/2and3/jinja2/debug.pyi0000644€tŠÔÚ€2›s®0000000213413215007212026740 0ustar jukkaDROPBOX\Domain Users00000000000000from typing import Any, Optional tproxy = ... # type: Any raise_helper = ... # type: str class TracebackFrameProxy: tb = ... # type: Any def __init__(self, tb) -> None: ... @property def tb_next(self): ... def set_next(self, next): ... @property def is_jinja_frame(self): ... def __getattr__(self, name): ... def make_frame_proxy(frame): ... class ProcessedTraceback: exc_type = ... # type: Any exc_value = ... # type: Any frames = ... # type: Any def __init__(self, exc_type, exc_value, frames) -> None: ... def render_as_text(self, limit: Optional[Any] = ...): ... def render_as_html(self, full: bool = ...): ... @property def is_template_syntax_error(self): ... @property def exc_info(self): ... @property def standard_exc_info(self): ... def make_traceback(exc_info, source_hint: Optional[Any] = ...): ... def translate_syntax_error(error, source: Optional[Any] = ...): ... def translate_exception(exc_info, initial_skip: int = ...): ... def fake_exc_info(exc_info, filename, lineno): ... tb_set_next = ... # type: Any mypy-0.560/typeshed/third_party/2and3/jinja2/defaults.pyi0000644€tŠÔÚ€2›s®0000000127113215007212027462 0ustar jukkaDROPBOX\Domain Users00000000000000from typing import Any from jinja2.filters import FILTERS as DEFAULT_FILTERS from jinja2.tests import TESTS as DEFAULT_TESTS BLOCK_START_STRING = ... # type: str BLOCK_END_STRING = ... # type: str VARIABLE_START_STRING = ... # type: str VARIABLE_END_STRING = ... # type: str COMMENT_START_STRING = ... # type: str COMMENT_END_STRING = ... # type: str LINE_STATEMENT_PREFIX = ... # type: Any LINE_COMMENT_PREFIX = ... # type: Any TRIM_BLOCKS = ... # type: bool LSTRIP_BLOCKS = ... # type: bool NEWLINE_SEQUENCE = ... # type: str KEEP_TRAILING_NEWLINE = ... # type: bool DEFAULT_NAMESPACE = ... # type: Any # Names in __all__ with no definition: # DEFAULT_FILTERS # DEFAULT_TESTS mypy-0.560/typeshed/third_party/2and3/jinja2/environment.pyi0000644€tŠÔÚ€2›s®0000001711213215007212030220 0ustar jukkaDROPBOX\Domain Users00000000000000from typing import Any, Callable, Dict, Iterator, List, Optional, Text, Type, Union from .bccache import BytecodeCache from .loaders import BaseLoader from .runtime import Context, Undefined def get_spontaneous_environment(*args): ... def create_cache(size): ... def copy_cache(cache): ... def load_extensions(environment, extensions): ... class Environment: sandboxed = ... # type: bool overlayed = ... # type: bool linked_to = ... # type: Any shared = ... # type: bool exception_handler = ... # type: Any exception_formatter = ... # type: Any code_generator_class = ... # type: Any context_class = ... # type: Any block_start_string = ... # type: Text block_end_string = ... # type: Text variable_start_string = ... # type: Text variable_end_string = ... # type: Text comment_start_string = ... # type: Text comment_end_string = ... # type: Text line_statement_prefix = ... # type: Text line_comment_prefix = ... # type: Text trim_blocks = ... # type: bool lstrip_blocks = ... # type: Any newline_sequence = ... # type: Text keep_trailing_newline = ... # type: bool undefined = ... # type: Type[Undefined] optimized = ... # type: bool finalize = ... # type: Callable autoescape = ... # type: Any filters = ... # type: Any tests = ... # type: Any globals = ... # type: Dict[str, Any] loader = ... # type: BaseLoader cache = ... # type: Any bytecode_cache = ... # type: BytecodeCache auto_reload = ... # type: bool extensions = ... # type: List def __init__(self, block_start_string: Text = ..., block_end_string: Text = ..., variable_start_string: Text = ..., variable_end_string: Text = ..., comment_start_string: Any = ..., comment_end_string: Text = ..., line_statement_prefix: Text = ..., line_comment_prefix: Text = ..., trim_blocks: bool = ..., lstrip_blocks: bool = ..., newline_sequence: Text = ..., keep_trailing_newline: bool = ..., extensions: List = ..., optimized: bool = ..., undefined: Type[Undefined] = ..., finalize: Optional[Callable] = ..., autoescape: Union[bool, Callable[[str], bool]] = ..., loader: Optional[BaseLoader] = ..., cache_size: int = ..., auto_reload: bool = ..., bytecode_cache: Optional[BytecodeCache] = ...) -> None: ... def add_extension(self, extension): ... def extend(self, **attributes): ... def overlay(self, block_start_string: Text = ..., block_end_string: Text = ..., variable_start_string: Text = ..., variable_end_string: Text = ..., comment_start_string: Any = ..., comment_end_string: Text = ..., line_statement_prefix: Text = ..., line_comment_prefix: Text = ..., trim_blocks: bool = ..., lstrip_blocks: bool = ..., extensions: List = ..., optimized: bool = ..., undefined: Type[Undefined] = ..., finalize: Callable = ..., autoescape: bool = ..., loader: Optional[BaseLoader] = ..., cache_size: int = ..., auto_reload: bool = ..., bytecode_cache: Optional[BytecodeCache] = ...): ... lexer = ... # type: Any def iter_extensions(self): ... def getitem(self, obj, argument): ... def getattr(self, obj, attribute): ... def call_filter(self, name, value, args: Optional[Any] = ..., kwargs: Optional[Any] = ..., context: Optional[Any] = ..., eval_ctx: Optional[Any] = ...): ... def call_test(self, name, value, args: Optional[Any] = ..., kwargs: Optional[Any] = ...): ... def parse(self, source, name: Optional[Any] = ..., filename: Optional[Any] = ...): ... def lex(self, source, name: Optional[Any] = ..., filename: Optional[Any] = ...): ... def preprocess(self, source: Text, name: Optional[Any] = ..., filename: Optional[Any] = ...): ... def compile(self, source, name: Optional[Any] = ..., filename: Optional[Any] = ..., raw: bool = ..., defer_init: bool = ...): ... def compile_expression(self, source: Text, undefined_to_none: bool = ...): ... def compile_templates(self, target, extensions: Optional[Any] = ..., filter_func: Optional[Any] = ..., zip: str = ..., log_function: Optional[Any] = ..., ignore_errors: bool = ..., py_compile: bool = ...): ... def list_templates(self, extensions: Optional[Any] = ..., filter_func: Optional[Any] = ...): ... def handle_exception(self, exc_info: Optional[Any] = ..., rendered: bool = ..., source_hint: Optional[Any] = ...): ... def join_path(self, template: Union[Template, Text], parent: Text) -> Text: ... def get_template(self, name: Union[Template, Text], parent: Optional[Text] = ..., globals: Optional[Any] = ...) -> Template: ... def select_template(self, names: List[Union[Template, Text]], parent: Optional[Text] = ..., globals: Optional[Dict[str, Any]] = ...) -> Template: ... def get_or_select_template(self, template_name_or_list: Union[Union[Template, Text], List[Union[Template, Text]]], parent: Optional[Text] = ..., globals: Optional[Dict[str, Any]] = ...) -> Template: ... def from_string(self, source: Text, globals: Optional[Dict[str, Any]] = ..., template_class: Optional[Type[Template]] = ...) -> Template: ... def make_globals(self, d: Optional[Dict[str, Any]]) -> Dict[str, Any]: ... # Frequently added extensions are included here: # from InternationalizationExtension: def install_gettext_translations(self, translations: Any, newstyle: Optional[bool]): ... def install_null_translations(self, newstyle: Optional[bool]): ... def install_gettext_callables(self, gettext: Callable, ngettext: Callable, newstyle: Optional[bool]): ... def uninstall_gettext_translations(self, translations: Any): ... def extract_translations(self, source: Any, gettext_functions: Any): ... newstyle_gettext = ... # type: bool class Template: def __new__(cls, source, block_start_string: Any = ..., block_end_string: Any = ..., variable_start_string: Any = ..., variable_end_string: Any = ..., comment_start_string: Any = ..., comment_end_string: Any = ..., line_statement_prefix: Any = ..., line_comment_prefix: Any = ..., trim_blocks: Any = ..., lstrip_blocks: Any = ..., newline_sequence: Any = ..., keep_trailing_newline: Any = ..., extensions: Any = ..., optimized: bool = ..., undefined: Any = ..., finalize: Optional[Any] = ..., autoescape: bool = ...): ... @classmethod def from_code(cls, environment, code, globals, uptodate: Optional[Any] = ...): ... @classmethod def from_module_dict(cls, environment, module_dict, globals): ... def render(self, *args, **kwargs) -> Text: ... def stream(self, *args, **kwargs) -> TemplateStream: ... def generate(self, *args, **kwargs) -> Iterator[Text]: ... def new_context(self, vars: Optional[Dict[str, Any]] = ..., shared: bool = ..., locals: Optional[Dict[str, Any]] = ...) -> Context: ... def make_module(self, vars: Optional[Dict[str, Any]] = ..., shared: bool = ..., locals: Optional[Dict[str, Any]] = ...) -> Context: ... @property def module(self) -> Any: ... def get_corresponding_lineno(self, lineno): ... @property def is_up_to_date(self) -> bool: ... @property def debug_info(self): ... class TemplateModule: __name__ = ... # type: Any def __init__(self, template, context) -> None: ... def __html__(self): ... class TemplateExpression: def __init__(self, template, undefined_to_none) -> None: ... def __call__(self, *args, **kwargs): ... class TemplateStream: def __init__(self, gen) -> None: ... def dump(self, fp, encoding: Optional[Text] = ..., errors: Text = ...): ... buffered = ... # type: bool def disable_buffering(self) -> None: ... def enable_buffering(self, size: int = ...) -> None: ... def __iter__(self): ... def __next__(self): ... mypy-0.560/typeshed/third_party/2and3/jinja2/exceptions.pyi0000644€tŠÔÚ€2›s®0000000230113215007212030027 0ustar jukkaDROPBOX\Domain Users00000000000000from typing import Any, Optional, Text class TemplateError(Exception): def __init__(self, message: Optional[Text] = ...) -> None: ... @property def message(self): ... def __unicode__(self): ... @property def message(self): ... class TemplateNotFound(IOError, LookupError, TemplateError): message = ... # type: Any name = ... # type: Any templates = ... # type: Any def __init__(self, name, message: Optional[Text] = ...) -> None: ... class TemplatesNotFound(TemplateNotFound): templates = ... # type: Any def __init__(self, names: Any = ..., message: Optional[Text] = ...) -> None: ... class TemplateSyntaxError(TemplateError): lineno = ... # type: int name = ... # type: Text filename = ... # type: Text source = ... # type: Text translated = ... # type: bool def __init__(self, message: Text, lineno: int, name: Optional[Text] = ..., filename: Optional[Text] = ...) -> None: ... class TemplateAssertionError(TemplateSyntaxError): ... class TemplateRuntimeError(TemplateError): ... class UndefinedError(TemplateRuntimeError): ... class SecurityError(TemplateRuntimeError): ... class FilterArgumentError(TemplateRuntimeError): ... mypy-0.560/typeshed/third_party/2and3/jinja2/ext.pyi0000644€tŠÔÚ€2›s®0000000352113215007212026453 0ustar jukkaDROPBOX\Domain Users00000000000000from typing import Any, Optional GETTEXT_FUNCTIONS = ... # type: Any class ExtensionRegistry(type): def __new__(cls, name, bases, d): ... class Extension: tags = ... # type: Any priority = ... # type: int environment = ... # type: Any def __init__(self, environment) -> None: ... def bind(self, environment): ... def preprocess(self, source, name, filename: Optional[Any] = ...): ... def filter_stream(self, stream): ... def parse(self, parser): ... def attr(self, name, lineno: Optional[Any] = ...): ... def call_method(self, name, args: Optional[Any] = ..., kwargs: Optional[Any] = ..., dyn_args: Optional[Any] = ..., dyn_kwargs: Optional[Any] = ..., lineno: Optional[Any] = ...): ... class InternationalizationExtension(Extension): tags = ... # type: Any def __init__(self, environment) -> None: ... def parse(self, parser): ... class ExprStmtExtension(Extension): tags = ... # type: Any def parse(self, parser): ... class LoopControlExtension(Extension): tags = ... # type: Any def parse(self, parser): ... class WithExtension(Extension): tags = ... # type: Any def parse(self, parser): ... class AutoEscapeExtension(Extension): tags = ... # type: Any def parse(self, parser): ... def extract_from_ast(node, gettext_functions: Any = ..., babel_style: bool = ...): ... class _CommentFinder: tokens = ... # type: Any comment_tags = ... # type: Any offset = ... # type: int last_lineno = ... # type: int def __init__(self, tokens, comment_tags) -> None: ... def find_backwards(self, offset): ... def find_comments(self, lineno): ... def babel_extract(fileobj, keywords, comment_tags, options): ... i18n = ... # type: Any do = ... # type: Any loopcontrols = ... # type: Any with_ = ... # type: Any autoescape = ... # type: Any mypy-0.560/typeshed/third_party/2and3/jinja2/filters.pyi0000644€tŠÔÚ€2›s®0000000467713215007212027340 0ustar jukkaDROPBOX\Domain Users00000000000000from typing import Any, Optional def contextfilter(f): ... def evalcontextfilter(f): ... def environmentfilter(f): ... def make_attrgetter(environment, attribute): ... def do_forceescape(value): ... def do_urlencode(value): ... def do_replace(eval_ctx, s, old, new, count: Optional[Any] = ...): ... def do_upper(s): ... def do_lower(s): ... def do_xmlattr(_eval_ctx, d, autospace: bool = ...): ... def do_capitalize(s): ... def do_title(s): ... def do_dictsort(value, case_sensitive: bool = ..., by: str = ...): ... def do_sort(environment, value, reverse: bool = ..., case_sensitive: bool = ..., attribute: Optional[Any] = ...): ... def do_default(value, default_value: str = ..., boolean: bool = ...): ... def do_join(eval_ctx, value, d: str = ..., attribute: Optional[Any] = ...): ... def do_center(value, width: int = ...): ... def do_first(environment, seq): ... def do_last(environment, seq): ... def do_random(environment, seq): ... def do_filesizeformat(value, binary: bool = ...): ... def do_pprint(value, verbose: bool = ...): ... def do_urlize(eval_ctx, value, trim_url_limit: Optional[Any] = ..., nofollow: bool = ..., target: Optional[Any] = ...): ... def do_indent(s, width: int = ..., indentfirst: bool = ...): ... def do_truncate(s, length: int = ..., killwords: bool = ..., end: str = ...): ... def do_wordwrap(environment, s, width: int = ..., break_long_words: bool = ..., wrapstring: Optional[Any] = ...): ... def do_wordcount(s): ... def do_int(value, default: int = ..., base: int = ...): ... def do_float(value, default: float = ...): ... def do_format(value, *args, **kwargs): ... def do_trim(value): ... def do_striptags(value): ... def do_slice(value, slices, fill_with: Optional[Any] = ...): ... def do_batch(value, linecount, fill_with: Optional[Any] = ...): ... def do_round(value, precision: int = ..., method: str = ...): ... def do_groupby(environment, value, attribute): ... class _GroupTuple(tuple): grouper = ... # type: Any list = ... # type: Any def __new__(cls, xxx_todo_changeme): ... def do_sum(environment, iterable, attribute: Optional[Any] = ..., start: int = ...): ... def do_list(value): ... def do_mark_safe(value): ... def do_mark_unsafe(value): ... def do_reverse(value): ... def do_attr(environment, obj, name): ... def do_map(*args, **kwargs): ... def do_select(*args, **kwargs): ... def do_reject(*args, **kwargs): ... def do_selectattr(*args, **kwargs): ... def do_rejectattr(*args, **kwargs): ... FILTERS = ... # type: Any mypy-0.560/typeshed/third_party/2and3/jinja2/lexer.pyi0000644€tŠÔÚ€2›s®0000000732013215007212026773 0ustar jukkaDROPBOX\Domain Users00000000000000from typing import Any, Optional whitespace_re = ... # type: Any string_re = ... # type: Any integer_re = ... # type: Any name_re = ... # type: Any float_re = ... # type: Any newline_re = ... # type: Any TOKEN_ADD = ... # type: Any TOKEN_ASSIGN = ... # type: Any TOKEN_COLON = ... # type: Any TOKEN_COMMA = ... # type: Any TOKEN_DIV = ... # type: Any TOKEN_DOT = ... # type: Any TOKEN_EQ = ... # type: Any TOKEN_FLOORDIV = ... # type: Any TOKEN_GT = ... # type: Any TOKEN_GTEQ = ... # type: Any TOKEN_LBRACE = ... # type: Any TOKEN_LBRACKET = ... # type: Any TOKEN_LPAREN = ... # type: Any TOKEN_LT = ... # type: Any TOKEN_LTEQ = ... # type: Any TOKEN_MOD = ... # type: Any TOKEN_MUL = ... # type: Any TOKEN_NE = ... # type: Any TOKEN_PIPE = ... # type: Any TOKEN_POW = ... # type: Any TOKEN_RBRACE = ... # type: Any TOKEN_RBRACKET = ... # type: Any TOKEN_RPAREN = ... # type: Any TOKEN_SEMICOLON = ... # type: Any TOKEN_SUB = ... # type: Any TOKEN_TILDE = ... # type: Any TOKEN_WHITESPACE = ... # type: Any TOKEN_FLOAT = ... # type: Any TOKEN_INTEGER = ... # type: Any TOKEN_NAME = ... # type: Any TOKEN_STRING = ... # type: Any TOKEN_OPERATOR = ... # type: Any TOKEN_BLOCK_BEGIN = ... # type: Any TOKEN_BLOCK_END = ... # type: Any TOKEN_VARIABLE_BEGIN = ... # type: Any TOKEN_VARIABLE_END = ... # type: Any TOKEN_RAW_BEGIN = ... # type: Any TOKEN_RAW_END = ... # type: Any TOKEN_COMMENT_BEGIN = ... # type: Any TOKEN_COMMENT_END = ... # type: Any TOKEN_COMMENT = ... # type: Any TOKEN_LINESTATEMENT_BEGIN = ... # type: Any TOKEN_LINESTATEMENT_END = ... # type: Any TOKEN_LINECOMMENT_BEGIN = ... # type: Any TOKEN_LINECOMMENT_END = ... # type: Any TOKEN_LINECOMMENT = ... # type: Any TOKEN_DATA = ... # type: Any TOKEN_INITIAL = ... # type: Any TOKEN_EOF = ... # type: Any operators = ... # type: Any reverse_operators = ... # type: Any operator_re = ... # type: Any ignored_tokens = ... # type: Any ignore_if_empty = ... # type: Any def describe_token(token): ... def describe_token_expr(expr): ... def count_newlines(value): ... def compile_rules(environment): ... class Failure: message = ... # type: Any error_class = ... # type: Any def __init__(self, message, cls: Any = ...) -> None: ... def __call__(self, lineno, filename): ... class Token(tuple): lineno = ... # type: Any type = ... # type: Any value = ... # type: Any def __new__(cls, lineno, type, value): ... def test(self, expr): ... def test_any(self, *iterable): ... class TokenStreamIterator: stream = ... # type: Any def __init__(self, stream) -> None: ... def __iter__(self): ... def __next__(self): ... class TokenStream: name = ... # type: Any filename = ... # type: Any closed = ... # type: bool current = ... # type: Any def __init__(self, generator, name, filename) -> None: ... def __iter__(self): ... def __bool__(self): ... __nonzero__ = ... # type: Any eos = ... # type: Any def push(self, token): ... def look(self): ... def skip(self, n: int = ...): ... def next_if(self, expr): ... def skip_if(self, expr): ... def __next__(self): ... def close(self): ... def expect(self, expr): ... def get_lexer(environment): ... class Lexer: newline_sequence = ... # type: Any keep_trailing_newline = ... # type: Any rules = ... # type: Any def __init__(self, environment) -> None: ... def tokenize(self, source, name: Optional[Any] = ..., filename: Optional[Any] = ..., state: Optional[Any] = ...): ... def wrap(self, stream, name: Optional[Any] = ..., filename: Optional[Any] = ...): ... def tokeniter(self, source, name, filename: Optional[Any] = ..., state: Optional[Any] = ...): ... mypy-0.560/typeshed/third_party/2and3/jinja2/loaders.pyi0000644€tŠÔÚ€2›s®0000000542413215007212027310 0ustar jukkaDROPBOX\Domain Users00000000000000from typing import Any, Callable, List, Optional, Text, Tuple from types import ModuleType from .environment import Environment def split_template_path(template: Text) -> List[Text]: ... class BaseLoader: has_source_access = ... # type: bool def get_source(self, environment, template): ... def list_templates(self): ... def load(self, environment, name, globals: Optional[Any] = ...): ... class FileSystemLoader(BaseLoader): searchpath = ... # type: Text encoding = ... # type: Any followlinks = ... # type: Any def __init__(self, searchpath: Text, encoding: Text = ..., followlinks: bool = ...) -> None: ... def get_source(self, environment: Environment, template: Text) -> Tuple[Text, Text, Callable]: ... def list_templates(self): ... class PackageLoader(BaseLoader): encoding = ... # type: Text manager = ... # type: Any filesystem_bound = ... # type: Any provider = ... # type: Any package_path = ... # type: Any def __init__(self, package_name: Text, package_path: Text = ..., encoding: Text = ...) -> None: ... def get_source(self, environment: Environment, template: Text) -> Tuple[Text, Text, Callable]: ... def list_templates(self): ... class DictLoader(BaseLoader): mapping = ... # type: Any def __init__(self, mapping) -> None: ... def get_source(self, environment: Environment, template: Text) -> Tuple[Text, Text, Callable]: ... def list_templates(self): ... class FunctionLoader(BaseLoader): load_func = ... # type: Any def __init__(self, load_func) -> None: ... def get_source(self, environment: Environment, template: Text) -> Tuple[Text, Text, Callable]: ... class PrefixLoader(BaseLoader): mapping = ... # type: Any delimiter = ... # type: Any def __init__(self, mapping, delimiter: str = ...) -> None: ... def get_loader(self, template): ... def get_source(self, environment: Environment, template: Text) -> Tuple[Text, Text, Callable]: ... def load(self, environment, name, globals: Optional[Any] = ...): ... def list_templates(self): ... class ChoiceLoader(BaseLoader): loaders = ... # type: Any def __init__(self, loaders) -> None: ... def get_source(self, environment: Environment, template: Text) -> Tuple[Text, Text, Callable]: ... def load(self, environment, name, globals: Optional[Any] = ...): ... def list_templates(self): ... class _TemplateModule(ModuleType): ... class ModuleLoader(BaseLoader): has_source_access = ... # type: bool module = ... # type: Any package_name = ... # type: Any def __init__(self, path) -> None: ... @staticmethod def get_template_key(name): ... @staticmethod def get_module_filename(name): ... def load(self, environment, name, globals: Optional[Any] = ...): ... mypy-0.560/typeshed/third_party/2and3/jinja2/meta.pyi0000644€tŠÔÚ€2›s®0000000054013215007212026577 0ustar jukkaDROPBOX\Domain Users00000000000000from typing import Any from jinja2.compiler import CodeGenerator class TrackingCodeGenerator(CodeGenerator): undeclared_identifiers = ... # type: Any def __init__(self, environment) -> None: ... def write(self, x): ... def pull_locals(self, frame): ... def find_undeclared_variables(ast): ... def find_referenced_templates(ast): ... mypy-0.560/typeshed/third_party/2and3/jinja2/nodes.pyi0000644€tŠÔÚ€2›s®0000001401013215007212026756 0ustar jukkaDROPBOX\Domain Users00000000000000from typing import Any, Optional class Impossible(Exception): ... class NodeType(type): def __new__(cls, name, bases, d): ... class EvalContext: environment = ... # type: Any autoescape = ... # type: Any volatile = ... # type: bool def __init__(self, environment, template_name: Optional[Any] = ...) -> None: ... def save(self): ... def revert(self, old): ... def get_eval_context(node, ctx): ... class Node: fields = ... # type: Any attributes = ... # type: Any abstract = ... # type: bool def __init__(self, *fields, **attributes) -> None: ... def iter_fields(self, exclude: Optional[Any] = ..., only: Optional[Any] = ...): ... def iter_child_nodes(self, exclude: Optional[Any] = ..., only: Optional[Any] = ...): ... def find(self, node_type): ... def find_all(self, node_type): ... def set_ctx(self, ctx): ... def set_lineno(self, lineno, override: bool = ...): ... def set_environment(self, environment): ... def __eq__(self, other): ... def __ne__(self, other): ... __hash__ = ... # type: Any class Stmt(Node): abstract = ... # type: bool class Helper(Node): abstract = ... # type: bool class Template(Node): fields = ... # type: Any class Output(Stmt): fields = ... # type: Any class Extends(Stmt): fields = ... # type: Any class For(Stmt): fields = ... # type: Any class If(Stmt): fields = ... # type: Any class Macro(Stmt): fields = ... # type: Any class CallBlock(Stmt): fields = ... # type: Any class FilterBlock(Stmt): fields = ... # type: Any class Block(Stmt): fields = ... # type: Any class Include(Stmt): fields = ... # type: Any class Import(Stmt): fields = ... # type: Any class FromImport(Stmt): fields = ... # type: Any class ExprStmt(Stmt): fields = ... # type: Any class Assign(Stmt): fields = ... # type: Any class AssignBlock(Stmt): fields = ... # type: Any class Expr(Node): abstract = ... # type: bool def as_const(self, eval_ctx: Optional[Any] = ...): ... def can_assign(self): ... class BinExpr(Expr): fields = ... # type: Any operator = ... # type: Any abstract = ... # type: bool def as_const(self, eval_ctx: Optional[Any] = ...): ... class UnaryExpr(Expr): fields = ... # type: Any operator = ... # type: Any abstract = ... # type: bool def as_const(self, eval_ctx: Optional[Any] = ...): ... class Name(Expr): fields = ... # type: Any def can_assign(self): ... class Literal(Expr): abstract = ... # type: bool class Const(Literal): fields = ... # type: Any def as_const(self, eval_ctx: Optional[Any] = ...): ... @classmethod def from_untrusted(cls, value, lineno: Optional[Any] = ..., environment: Optional[Any] = ...): ... class TemplateData(Literal): fields = ... # type: Any def as_const(self, eval_ctx: Optional[Any] = ...): ... class Tuple(Literal): fields = ... # type: Any def as_const(self, eval_ctx: Optional[Any] = ...): ... def can_assign(self): ... class List(Literal): fields = ... # type: Any def as_const(self, eval_ctx: Optional[Any] = ...): ... class Dict(Literal): fields = ... # type: Any def as_const(self, eval_ctx: Optional[Any] = ...): ... class Pair(Helper): fields = ... # type: Any def as_const(self, eval_ctx: Optional[Any] = ...): ... class Keyword(Helper): fields = ... # type: Any def as_const(self, eval_ctx: Optional[Any] = ...): ... class CondExpr(Expr): fields = ... # type: Any def as_const(self, eval_ctx: Optional[Any] = ...): ... class Filter(Expr): fields = ... # type: Any def as_const(self, eval_ctx: Optional[Any] = ...): ... class Test(Expr): fields = ... # type: Any class Call(Expr): fields = ... # type: Any def as_const(self, eval_ctx: Optional[Any] = ...): ... class Getitem(Expr): fields = ... # type: Any def as_const(self, eval_ctx: Optional[Any] = ...): ... def can_assign(self): ... class Getattr(Expr): fields = ... # type: Any def as_const(self, eval_ctx: Optional[Any] = ...): ... def can_assign(self): ... class Slice(Expr): fields = ... # type: Any def as_const(self, eval_ctx: Optional[Any] = ...): ... class Concat(Expr): fields = ... # type: Any def as_const(self, eval_ctx: Optional[Any] = ...): ... class Compare(Expr): fields = ... # type: Any def as_const(self, eval_ctx: Optional[Any] = ...): ... class Operand(Helper): fields = ... # type: Any class Mul(BinExpr): operator = ... # type: str class Div(BinExpr): operator = ... # type: str class FloorDiv(BinExpr): operator = ... # type: str class Add(BinExpr): operator = ... # type: str class Sub(BinExpr): operator = ... # type: str class Mod(BinExpr): operator = ... # type: str class Pow(BinExpr): operator = ... # type: str class And(BinExpr): operator = ... # type: str def as_const(self, eval_ctx: Optional[Any] = ...): ... class Or(BinExpr): operator = ... # type: str def as_const(self, eval_ctx: Optional[Any] = ...): ... class Not(UnaryExpr): operator = ... # type: str class Neg(UnaryExpr): operator = ... # type: str class Pos(UnaryExpr): operator = ... # type: str class EnvironmentAttribute(Expr): fields = ... # type: Any class ExtensionAttribute(Expr): fields = ... # type: Any class ImportedName(Expr): fields = ... # type: Any class InternalName(Expr): fields = ... # type: Any def __init__(self) -> None: ... class MarkSafe(Expr): fields = ... # type: Any def as_const(self, eval_ctx: Optional[Any] = ...): ... class MarkSafeIfAutoescape(Expr): fields = ... # type: Any def as_const(self, eval_ctx: Optional[Any] = ...): ... class ContextReference(Expr): ... class Continue(Stmt): ... class Break(Stmt): ... class Scope(Stmt): fields = ... # type: Any class EvalContextModifier(Stmt): fields = ... # type: Any class ScopedEvalContextModifier(EvalContextModifier): fields = ... # type: Any mypy-0.560/typeshed/third_party/2and3/jinja2/optimizer.pyi0000644€tŠÔÚ€2›s®0000000165413215007212027702 0ustar jukkaDROPBOX\Domain Users00000000000000from typing import Any from jinja2.visitor import NodeTransformer def optimize(node, environment): ... class Optimizer(NodeTransformer): environment = ... # type: Any def __init__(self, environment) -> None: ... def visit_If(self, node): ... def fold(self, node): ... visit_Add = ... # type: Any visit_Sub = ... # type: Any visit_Mul = ... # type: Any visit_Div = ... # type: Any visit_FloorDiv = ... # type: Any visit_Pow = ... # type: Any visit_Mod = ... # type: Any visit_And = ... # type: Any visit_Or = ... # type: Any visit_Pos = ... # type: Any visit_Neg = ... # type: Any visit_Not = ... # type: Any visit_Compare = ... # type: Any visit_Getitem = ... # type: Any visit_Getattr = ... # type: Any visit_Call = ... # type: Any visit_Filter = ... # type: Any visit_Test = ... # type: Any visit_CondExpr = ... # type: Any mypy-0.560/typeshed/third_party/2and3/jinja2/parser.pyi0000644€tŠÔÚ€2›s®0000000504713215007212027154 0ustar jukkaDROPBOX\Domain Users00000000000000from typing import Any, Optional class Parser: environment = ... # type: Any stream = ... # type: Any name = ... # type: Any filename = ... # type: Any closed = ... # type: bool extensions = ... # type: Any def __init__(self, environment, source, name: Optional[Any] = ..., filename: Optional[Any] = ..., state: Optional[Any] = ...) -> None: ... def fail(self, msg, lineno: Optional[Any] = ..., exc: Any = ...): ... def fail_unknown_tag(self, name, lineno: Optional[Any] = ...): ... def fail_eof(self, end_tokens: Optional[Any] = ..., lineno: Optional[Any] = ...): ... def is_tuple_end(self, extra_end_rules: Optional[Any] = ...): ... def free_identifier(self, lineno: Optional[Any] = ...): ... def parse_statement(self): ... def parse_statements(self, end_tokens, drop_needle: bool = ...): ... def parse_set(self): ... def parse_for(self): ... def parse_if(self): ... def parse_block(self): ... def parse_extends(self): ... def parse_import_context(self, node, default): ... def parse_include(self): ... def parse_import(self): ... def parse_from(self): ... def parse_signature(self, node): ... def parse_call_block(self): ... def parse_filter_block(self): ... def parse_macro(self): ... def parse_print(self): ... def parse_assign_target(self, with_tuple: bool = ..., name_only: bool = ..., extra_end_rules: Optional[Any] = ...): ... def parse_expression(self, with_condexpr: bool = ...): ... def parse_condexpr(self): ... def parse_or(self): ... def parse_and(self): ... def parse_not(self): ... def parse_compare(self): ... def parse_add(self): ... def parse_sub(self): ... def parse_concat(self): ... def parse_mul(self): ... def parse_div(self): ... def parse_floordiv(self): ... def parse_mod(self): ... def parse_pow(self): ... def parse_unary(self, with_filter: bool = ...): ... def parse_primary(self): ... def parse_tuple(self, simplified: bool = ..., with_condexpr: bool = ..., extra_end_rules: Optional[Any] = ..., explicit_parentheses: bool = ...): ... def parse_list(self): ... def parse_dict(self): ... def parse_postfix(self, node): ... def parse_filter_expr(self, node): ... def parse_subscript(self, node): ... def parse_subscribed(self): ... def parse_call(self, node): ... def parse_filter(self, node, start_inline: bool = ...): ... def parse_test(self, node): ... def subparse(self, end_tokens: Optional[Any] = ...): ... def parse(self): ... mypy-0.560/typeshed/third_party/2and3/jinja2/runtime.pyi0000644€tŠÔÚ€2›s®0000001040713215007212027337 0ustar jukkaDROPBOX\Domain Users00000000000000from typing import Any, Dict, Optional, Text, Union from jinja2.utils import Markup as Markup, escape as escape, missing as missing, concat as concat from jinja2.exceptions import TemplateRuntimeError as TemplateRuntimeError, TemplateNotFound as TemplateNotFound from jinja2.environment import Environment to_string = ... # type: Any identity = ... # type: Any def markup_join(seq): ... def unicode_join(seq): ... class TemplateReference: def __init__(self, context) -> None: ... def __getitem__(self, name): ... class Context: parent = ... # type: Union[Context, Dict[str, Any]] vars = ... # type: Dict[str, Any] environment = ... # type: Environment eval_ctx = ... # type: Any exported_vars = ... # type: Any name = ... # type: Text blocks = ... # type: Dict[str, Any] def __init__(self, environment: Environment, parent: Union[Context, Dict[str, Any]], name: Text, blocks: Dict[str, Any]) -> None: ... def super(self, name, current): ... def get(self, key, default: Optional[Any] = ...): ... def resolve(self, key): ... def get_exported(self): ... def get_all(self): ... def call(__self, __obj, *args, **kwargs): ... def derived(self, locals: Optional[Any] = ...): ... keys = ... # type: Any values = ... # type: Any items = ... # type: Any iterkeys = ... # type: Any itervalues = ... # type: Any iteritems = ... # type: Any def __contains__(self, name): ... def __getitem__(self, key): ... class BlockReference: name = ... # type: Any def __init__(self, name, context, stack, depth) -> None: ... @property def super(self): ... def __call__(self): ... class LoopContext: index0 = ... # type: int depth0 = ... # type: Any def __init__(self, iterable, recurse: Optional[Any] = ..., depth0: int = ...) -> None: ... def cycle(self, *args): ... first = ... # type: Any last = ... # type: Any index = ... # type: Any revindex = ... # type: Any revindex0 = ... # type: Any depth = ... # type: Any def __len__(self): ... def __iter__(self): ... def loop(self, iterable): ... __call__ = ... # type: Any @property def length(self): ... class LoopContextIterator: context = ... # type: Any def __init__(self, context) -> None: ... def __iter__(self): ... def __next__(self): ... class Macro: name = ... # type: Any arguments = ... # type: Any defaults = ... # type: Any catch_kwargs = ... # type: Any catch_varargs = ... # type: Any caller = ... # type: Any def __init__(self, environment, func, name, arguments, defaults, catch_kwargs, catch_varargs, caller) -> None: ... def __call__(self, *args, **kwargs): ... class Undefined: def __init__(self, hint: Optional[Any] = ..., obj: Any = ..., name: Optional[Any] = ..., exc: Any = ...) -> None: ... def __getattr__(self, name): ... __add__ = ... # type: Any __radd__ = ... # type: Any __mul__ = ... # type: Any __rmul__ = ... # type: Any __div__ = ... # type: Any __rdiv__ = ... # type: Any __truediv__ = ... # type: Any __rtruediv__ = ... # type: Any __floordiv__ = ... # type: Any __rfloordiv__ = ... # type: Any __mod__ = ... # type: Any __rmod__ = ... # type: Any __pos__ = ... # type: Any __neg__ = ... # type: Any __call__ = ... # type: Any __getitem__ = ... # type: Any __lt__ = ... # type: Any __le__ = ... # type: Any __gt__ = ... # type: Any __ge__ = ... # type: Any __int__ = ... # type: Any __float__ = ... # type: Any __complex__ = ... # type: Any __pow__ = ... # type: Any __rpow__ = ... # type: Any def __eq__(self, other): ... def __ne__(self, other): ... def __hash__(self): ... def __len__(self): ... def __iter__(self): ... def __nonzero__(self): ... __bool__ = ... # type: Any def make_logging_undefined(logger: Optional[Any] = ..., base: Optional[Any] = ...): ... class DebugUndefined(Undefined): ... class StrictUndefined(Undefined): __iter__ = ... # type: Any __len__ = ... # type: Any __nonzero__ = ... # type: Any __eq__ = ... # type: Any __ne__ = ... # type: Any __bool__ = ... # type: Any __hash__ = ... # type: Any mypy-0.560/typeshed/third_party/2and3/jinja2/sandbox.pyi0000644€tŠÔÚ€2›s®0000000242313215007212027311 0ustar jukkaDROPBOX\Domain Users00000000000000from typing import Any from jinja2.environment import Environment MAX_RANGE = ... # type: int UNSAFE_FUNCTION_ATTRIBUTES = ... # type: Any UNSAFE_METHOD_ATTRIBUTES = ... # type: Any UNSAFE_GENERATOR_ATTRIBUTES = ... # type: Any def safe_range(*args): ... def unsafe(f): ... def is_internal_attribute(obj, attr): ... def modifies_known_mutable(obj, attr): ... class SandboxedEnvironment(Environment): sandboxed = ... # type: bool default_binop_table = ... # type: Any default_unop_table = ... # type: Any intercepted_binops = ... # type: Any intercepted_unops = ... # type: Any def intercept_unop(self, operator): ... binop_table = ... # type: Any unop_table = ... # type: Any def __init__(self, *args, **kwargs) -> None: ... def is_safe_attribute(self, obj, attr, value): ... def is_safe_callable(self, obj): ... def call_binop(self, context, operator, left, right): ... def call_unop(self, context, operator, arg): ... def getitem(self, obj, argument): ... def getattr(self, obj, attribute): ... def unsafe_undefined(self, obj, attribute): ... def call(__self, __context, __obj, *args, **kwargs): ... class ImmutableSandboxedEnvironment(SandboxedEnvironment): def is_safe_attribute(self, obj, attr, value): ... mypy-0.560/typeshed/third_party/2and3/jinja2/tests.pyi0000644€tŠÔÚ€2›s®0000000115113215007212027012 0ustar jukkaDROPBOX\Domain Users00000000000000from typing import Any number_re = ... # type: Any regex_type = ... # type: Any test_callable = ... # type: Any def test_odd(value): ... def test_even(value): ... def test_divisibleby(value, num): ... def test_defined(value): ... def test_undefined(value): ... def test_none(value): ... def test_lower(value): ... def test_upper(value): ... def test_string(value): ... def test_mapping(value): ... def test_number(value): ... def test_sequence(value): ... def test_equalto(value, other): ... def test_sameas(value, other): ... def test_iterable(value): ... def test_escaped(value): ... TESTS = ... # type: Any mypy-0.560/typeshed/third_party/2and3/jinja2/utils.pyi0000644€tŠÔÚ€2›s®0000000416713215007212027022 0ustar jukkaDROPBOX\Domain Users00000000000000from typing import Any, Callable, Iterable, Optional from markupsafe import Markup as Markup, escape as escape, soft_unicode as soft_unicode missing = ... # type: Any internal_code = ... # type: Any concat = ... # type: Any def contextfunction(f): ... def evalcontextfunction(f): ... def environmentfunction(f): ... def internalcode(f): ... def is_undefined(obj): ... def select_autoescape(enabled_extensions: Iterable[str] = ..., disabled_extensions: Iterable[str] = ..., default_for_string: bool = ..., default: bool = ...) -> Callable[[str], bool]: ... def consume(iterable): ... def clear_caches(): ... def import_string(import_name, silent: bool = ...): ... def open_if_exists(filename, mode: str = ...): ... def object_type_repr(obj): ... def pformat(obj, verbose: bool = ...): ... def urlize(text, trim_url_limit: Optional[Any] = ..., nofollow: bool = ..., target: Optional[Any] = ...): ... def generate_lorem_ipsum(n: int = ..., html: bool = ..., min: int = ..., max: int = ...): ... def unicode_urlencode(obj, charset: str = ..., for_qs: bool = ...): ... class LRUCache: capacity = ... # type: Any def __init__(self, capacity) -> None: ... def __getnewargs__(self): ... def copy(self): ... def get(self, key, default: Optional[Any] = ...): ... def setdefault(self, key, default: Optional[Any] = ...): ... def clear(self): ... def __contains__(self, key): ... def __len__(self): ... def __getitem__(self, key): ... def __setitem__(self, key, value): ... def __delitem__(self, key): ... def items(self): ... def iteritems(self): ... def values(self): ... def itervalue(self): ... def keys(self): ... def iterkeys(self): ... __iter__ = ... # type: Any def __reversed__(self): ... __copy__ = ... # type: Any class Cycler: items = ... # type: Any def __init__(self, *items) -> None: ... pos = ... # type: int def reset(self): ... @property def current(self): ... def __next__(self): ... class Joiner: sep = ... # type: Any used = ... # type: bool def __init__(self, sep: str = ...) -> None: ... def __call__(self): ... mypy-0.560/typeshed/third_party/2and3/jinja2/visitor.pyi0000644€tŠÔÚ€2›s®0000000046213215007212027353 0ustar jukkaDROPBOX\Domain Users00000000000000class NodeVisitor: def get_visitor(self, node): ... def visit(self, node, *args, **kwargs): ... def generic_visit(self, node, *args, **kwargs): ... class NodeTransformer(NodeVisitor): def generic_visit(self, node, *args, **kwargs): ... def visit_list(self, node, *args, **kwargs): ... mypy-0.560/typeshed/third_party/2and3/markupsafe/0000755€tŠÔÚ€2›s®0000000000013215007244026115 5ustar jukkaDROPBOX\Domain Users00000000000000mypy-0.560/typeshed/third_party/2and3/markupsafe/__init__.pyi0000644€tŠÔÚ€2›s®0000000560113215007212030374 0ustar jukkaDROPBOX\Domain Users00000000000000import sys from typing import Any, Callable, Dict, Iterable, List, Optional, Sequence, Text, Tuple, Union from collections import Mapping from markupsafe._compat import text_type import string from markupsafe._speedups import escape as escape, escape_silent as escape_silent, soft_unicode as soft_unicode from markupsafe._native import escape as escape, escape_silent as escape_silent, soft_unicode as soft_unicode class Markup(text_type): def __new__(cls, base: Text = ..., encoding: Optional[Text] = ..., errors: Text = ...) -> Markup: ... def __html__(self) -> Markup: ... def __add__(self, other: text_type) -> Markup: ... def __radd__(self, other: text_type) -> Markup: ... def __mul__(self, num: int) -> Markup: ... def __rmul__(self, num: int) -> Markup: ... def __mod__(self, *args: Any) -> Markup: ... def join(self, seq: Iterable[text_type]): ... def split(self, sep: Optional[text_type] = ..., maxsplit: int = ...) -> List[text_type]: ... def rsplit(self, sep: Optional[text_type] = ..., maxsplit: int = ...) -> List[text_type]: ... def splitlines(self, keepends: bool = ...) -> List[text_type]: ... def unescape(self) -> Text: ... def striptags(self) -> Text: ... @classmethod def escape(cls, s: text_type) -> Markup: ... def partition(self, sep: text_type) -> Tuple[Markup, Markup, Markup]: ... def rpartition(self, sep: text_type) -> Tuple[Markup, Markup, Markup]: ... def format(*args, **kwargs) -> Markup: ... def __html_format__(self, format_spec) -> Markup: ... def __getslice__(self, start: int, stop: int) -> Markup: ... def __getitem__(self, i: Union[int, slice]) -> Markup: ... def capitalize(self) -> Markup: ... def title(self) -> Markup: ... def lower(self) -> Markup: ... def upper(self) -> Markup: ... def swapcase(self) -> Markup: ... def replace(self, old: text_type, new: text_type, count: int = ...) -> Markup: ... def ljust(self, width: int, fillchar: text_type = ...) -> Markup: ... def rjust(self, width: int, fillchar: text_type = ...) -> Markup: ... def lstrip(self, chars: Optional[text_type] = ...) -> Markup: ... def rstrip(self, chars: Optional[text_type] = ...) -> Markup: ... def strip(self, chars: Optional[text_type] = ...) -> Markup: ... def center(self, width: int, fillchar: text_type = ...) -> Markup: ... def zfill(self, width: int) -> Markup: ... def translate(self, table: Union[Mapping[int, Union[int, text_type, None]], Sequence[Union[int, text_type, None]]]) -> Markup: ... def expandtabs(self, tabsize: int = ...) -> Markup: ... class EscapeFormatter(string.Formatter): escape = ... # type: Callable[[text_type], Markup] def __init__(self, escape: Callable[[text_type], Markup]) -> None: ... def format_field(self, value: text_type, format_spec: text_type) -> Markup: ... if sys.version_info[0] >= 3: soft_str = soft_unicode mypy-0.560/typeshed/third_party/2and3/markupsafe/_compat.pyi0000644€tŠÔÚ€2›s®0000000067213215007212030262 0ustar jukkaDROPBOX\Domain Users00000000000000import sys from typing import Any, Iterator, Mapping, Text, Tuple, TypeVar _K = TypeVar('_K') _V = TypeVar('_V') PY2 = ... # type: bool def iteritems(d: Mapping[_K, _V]) -> Iterator[Tuple[_K, _V]]: ... if sys.version_info[0] >= 3: text_type = str string_types = str, unichr = chr int_types = int, else: text_type = unicode string_types = (str, unicode) unichr = __builtins__.unichr int_types = (int, long) mypy-0.560/typeshed/third_party/2and3/markupsafe/_constants.pyi0000644€tŠÔÚ€2›s®0000000012113215007212031000 0ustar jukkaDROPBOX\Domain Users00000000000000from typing import Any, Dict, Text HTML_ENTITIES = ... # type: Dict[Text, int] mypy-0.560/typeshed/third_party/2and3/markupsafe/_native.pyi0000644€tŠÔÚ€2›s®0000000037713215007212030267 0ustar jukkaDROPBOX\Domain Users00000000000000from . import Markup from ._compat import text_type, string_types from typing import Union, Text def escape(s: Union[Markup, Text]) -> Markup: ... def escape_silent(s: Union[None, Markup, Text]) -> Markup: ... def soft_unicode(s: Text) -> text_type: ... mypy-0.560/typeshed/third_party/2and3/markupsafe/_speedups.pyi0000644€tŠÔÚ€2›s®0000000037713215007212030631 0ustar jukkaDROPBOX\Domain Users00000000000000from . import Markup from ._compat import text_type, string_types from typing import Union, Text def escape(s: Union[Markup, Text]) -> Markup: ... def escape_silent(s: Union[None, Markup, Text]) -> Markup: ... def soft_unicode(s: Text) -> text_type: ... mypy-0.560/typeshed/third_party/2and3/mypy_extensions.pyi0000644€tŠÔÚ€2›s®0000000133313215007212027752 0ustar jukkaDROPBOX\Domain Users00000000000000from typing import Dict, Type, TypeVar, Optional, Union _T = TypeVar('_T') def TypedDict(typename: str, fields: Dict[str, Type[_T]], total: bool = ...) -> Type[dict]: ... def Arg(type: _T = ..., name: Optional[str] = ...) -> _T: ... def DefaultArg(type: _T = ..., name: Optional[str] = ...) -> _T: ... def NamedArg(type: _T = ..., name: Optional[str] = ...) -> _T: ... def DefaultNamedArg(type: _T = ..., name: Optional[str] = ...) -> _T: ... def VarArg(type: _T = ...) -> _T: ... def KwArg(type: _T = ...) -> _T: ... # Return type that indicates a function does not return. # This type is equivalent to the None type, but the no-op Union is necessary to # distinguish the None type from the None value. NoReturn = Union[None] mypy-0.560/typeshed/third_party/2and3/pymysql/0000755€tŠÔÚ€2›s®0000000000013215007244025475 5ustar jukkaDROPBOX\Domain Users00000000000000mypy-0.560/typeshed/third_party/2and3/pymysql/__init__.pyi0000644€tŠÔÚ€2›s®0000000276613215007212027765 0ustar jukkaDROPBOX\Domain Users00000000000000from typing import Union, Tuple, Callable from .connections import Connection from .constants import FIELD_TYPE as FIELD_TYPE from .converters import escape_dict as escape_dict, escape_sequence as escape_sequence, escape_string as escape_string from .err import Warning as Warning, Error as Error, InterfaceError as InterfaceError, DataError as DataError, DatabaseError as DatabaseError, OperationalError as OperationalError, IntegrityError as IntegrityError, InternalError as InternalError, NotSupportedError as NotSupportedError, ProgrammingError as ProgrammingError, MySQLError as MySQLError from .times import Date as Date, Time as Time, Timestamp as Timestamp, DateFromTicks as DateFromTicks, TimeFromTicks as TimeFromTicks, TimestampFromTicks as TimestampFromTicks threadsafety = ... # type: int apilevel = ... # type: str paramstyle = ... # type: str class DBAPISet(frozenset): def __ne__(self, other) -> bool: ... def __eq__(self, other) -> bool: ... def __hash__(self) -> int: ... STRING = ... # type: DBAPISet BINARY = ... # type: DBAPISet NUMBER = ... # type: DBAPISet DATE = ... # type: DBAPISet TIME = ... # type: DBAPISet TIMESTAMP = ... # type: DBAPISet ROWID = ... # type: DBAPISet def Binary(x) -> Union[bytearray, bytes]: ... def Connect(*args, **kwargs) -> Connection: ... def get_client_info() -> str: ... connect = ... # type: Callable[..., Connection] version_info = ... # type: Tuple[int, int, int, str, int] NULL = ... # type: str def install_as_MySQLdb() -> None: ... mypy-0.560/typeshed/third_party/2and3/pymysql/charset.pyi0000644€tŠÔÚ€2›s®0000000054313215007212027646 0ustar jukkaDROPBOX\Domain Users00000000000000from typing import Any MBLENGTH = ... # type: Any class Charset: is_default = ... # type: Any def __init__(self, id, name, collation, is_default): ... class Charsets: def __init__(self): ... def add(self, c): ... def by_id(self, id): ... def by_name(self, name): ... def charset_by_name(name): ... def charset_by_id(id): ... mypy-0.560/typeshed/third_party/2and3/pymysql/connections.pyi0000644€tŠÔÚ€2›s®0000001225513215007212030542 0ustar jukkaDROPBOX\Domain Users00000000000000from typing import Any, Optional, Type from .charset import MBLENGTH as MBLENGTH, charset_by_name as charset_by_name, charset_by_id as charset_by_id from .cursors import Cursor as Cursor from .constants import FIELD_TYPE as FIELD_TYPE, FLAG as FLAG from .constants import SERVER_STATUS as SERVER_STATUS from .constants import CLIENT as CLIENT from .constants import COMMAND as COMMAND from .util import join_bytes as join_bytes, byte2int as byte2int, int2byte as int2byte from .converters import escape_item as escape_item, encoders as encoders, decoders as decoders from .err import raise_mysql_exception as raise_mysql_exception, Warning as Warning, Error as Error, InterfaceError as InterfaceError, DataError as DataError, DatabaseError as DatabaseError, OperationalError as OperationalError, IntegrityError as IntegrityError, InternalError as InternalError, NotSupportedError as NotSupportedError, ProgrammingError as ProgrammingError sha_new = ... # type: Any SSL_ENABLED = ... # type: Any DEFAULT_USER = ... # type: Any DEBUG = ... # type: Any NULL_COLUMN = ... # type: Any UNSIGNED_CHAR_COLUMN = ... # type: Any UNSIGNED_SHORT_COLUMN = ... # type: Any UNSIGNED_INT24_COLUMN = ... # type: Any UNSIGNED_INT64_COLUMN = ... # type: Any UNSIGNED_CHAR_LENGTH = ... # type: Any UNSIGNED_SHORT_LENGTH = ... # type: Any UNSIGNED_INT24_LENGTH = ... # type: Any UNSIGNED_INT64_LENGTH = ... # type: Any DEFAULT_CHARSET = ... # type: Any def dump_packet(data): ... SCRAMBLE_LENGTH_323 = ... # type: Any class RandStruct_323: max_value = ... # type: Any seed1 = ... # type: Any seed2 = ... # type: Any def __init__(self, seed1, seed2): ... def my_rnd(self): ... def pack_int24(n): ... def unpack_uint16(n): ... def unpack_int24(n): ... def unpack_int32(n): ... def unpack_int64(n): ... def defaulterrorhandler(connection, cursor, errorclass, errorvalue): ... class MysqlPacket: connection = ... # type: Any def __init__(self, connection): ... def packet_number(self): ... def get_all_data(self): ... def read(self, size): ... def read_all(self): ... def advance(self, length): ... def rewind(self, position=0): ... def peek(self, size): ... def get_bytes(self, position, length=1): ... def read_length_coded_binary(self): ... def read_length_coded_string(self): ... def is_ok_packet(self): ... def is_eof_packet(self): ... def is_resultset_packet(self): ... def is_error_packet(self): ... def check_error(self): ... def dump(self): ... class FieldDescriptorPacket(MysqlPacket): def __init__(self, *args): ... def description(self): ... def get_column_length(self): ... class Connection: errorhandler = ... # type: Any ssl = ... # type: Any host = ... # type: Any port = ... # type: Any user = ... # type: Any password = ... # type: Any db = ... # type: Any unix_socket = ... # type: Any charset = ... # type: Any use_unicode = ... # type: Any client_flag = ... # type: Any cursorclass = ... # type: Any connect_timeout = ... # type: Any messages = ... # type: Any encoders = ... # type: Any decoders = ... # type: Any host_info = ... # type: Any def __init__(self, host='', user=None, passwd='', db=None, port=3306, unix_socket=None, charset='', sql_mode=None, read_default_file=None, conv=..., use_unicode=None, client_flag=0, cursorclass=..., init_command=None, connect_timeout=None, ssl=None, read_default_group=None, compress=None, named_pipe=None): ... socket = ... # type: Any rfile = ... # type: Any wfile = ... # type: Any def close(self): ... def autocommit(self, value): ... def commit(self): ... def begin(self) -> None: ... def rollback(self): ... def escape(self, obj): ... def literal(self, obj): ... def cursor(self, cursor: Optional[Type[Cursor]] = ...): ... def __enter__(self): ... def __exit__(self, exc, value, traceback): ... def query(self, sql): ... def next_result(self): ... def affected_rows(self): ... def kill(self, thread_id): ... def ping(self, reconnect=True): ... def set_charset(self, charset): ... def read_packet(self, packet_type=...): ... def insert_id(self): ... def thread_id(self): ... def character_set_name(self): ... def get_host_info(self): ... def get_proto_info(self): ... def get_server_info(self): ... Warning = ... # type: Any Error = ... # type: Any InterfaceError = ... # type: Any DatabaseError = ... # type: Any DataError = ... # type: Any OperationalError = ... # type: Any IntegrityError = ... # type: Any InternalError = ... # type: Any ProgrammingError = ... # type: Any NotSupportedError = ... # type: Any class MySQLResult: connection = ... # type: Any affected_rows = ... # type: Any insert_id = ... # type: Any server_status = ... # type: Any warning_count = ... # type: Any message = ... # type: Any field_count = ... # type: Any description = ... # type: Any rows = ... # type: Any has_next = ... # type: Any def __init__(self, connection): ... first_packet = ... # type: Any def read(self): ... mypy-0.560/typeshed/third_party/2and3/pymysql/constants/0000755€tŠÔÚ€2›s®0000000000013215007244027511 5ustar jukkaDROPBOX\Domain Users00000000000000mypy-0.560/typeshed/third_party/2and3/pymysql/constants/__init__.pyi0000644€tŠÔÚ€2›s®0000000000013215007212031754 0ustar jukkaDROPBOX\Domain Users00000000000000mypy-0.560/typeshed/third_party/2and3/pymysql/constants/CLIENT.pyi0000644€tŠÔÚ€2›s®0000000111013215007212031176 0ustar jukkaDROPBOX\Domain Users00000000000000from typing import Any LONG_PASSWORD = ... # type: Any FOUND_ROWS = ... # type: Any LONG_FLAG = ... # type: Any CONNECT_WITH_DB = ... # type: Any NO_SCHEMA = ... # type: Any COMPRESS = ... # type: Any ODBC = ... # type: Any LOCAL_FILES = ... # type: Any IGNORE_SPACE = ... # type: Any PROTOCOL_41 = ... # type: Any INTERACTIVE = ... # type: Any SSL = ... # type: Any IGNORE_SIGPIPE = ... # type: Any TRANSACTIONS = ... # type: Any SECURE_CONNECTION = ... # type: Any MULTI_STATEMENTS = ... # type: Any MULTI_RESULTS = ... # type: Any CAPABILITIES = ... # type: Any mypy-0.560/typeshed/third_party/2and3/pymysql/constants/COMMAND.pyi0000644€tŠÔÚ€2›s®0000000134313215007212031306 0ustar jukkaDROPBOX\Domain Users00000000000000from typing import Any COM_SLEEP = ... # type: Any COM_QUIT = ... # type: Any COM_INIT_DB = ... # type: Any COM_QUERY = ... # type: Any COM_FIELD_LIST = ... # type: Any COM_CREATE_DB = ... # type: Any COM_DROP_DB = ... # type: Any COM_REFRESH = ... # type: Any COM_SHUTDOWN = ... # type: Any COM_STATISTICS = ... # type: Any COM_PROCESS_INFO = ... # type: Any COM_CONNECT = ... # type: Any COM_PROCESS_KILL = ... # type: Any COM_DEBUG = ... # type: Any COM_PING = ... # type: Any COM_TIME = ... # type: Any COM_DELAYED_INSERT = ... # type: Any COM_CHANGE_USER = ... # type: Any COM_BINLOG_DUMP = ... # type: Any COM_TABLE_DUMP = ... # type: Any COM_CONNECT_OUT = ... # type: Any COM_REGISTER_SLAVE = ... # type: Any mypy-0.560/typeshed/third_party/2and3/pymysql/constants/ER.pyi0000644€tŠÔÚ€2›s®0000004275213215007212030547 0ustar jukkaDROPBOX\Domain Users00000000000000from typing import Any ERROR_FIRST = ... # type: Any HASHCHK = ... # type: Any NISAMCHK = ... # type: Any NO = ... # type: Any YES = ... # type: Any CANT_CREATE_FILE = ... # type: Any CANT_CREATE_TABLE = ... # type: Any CANT_CREATE_DB = ... # type: Any DB_CREATE_EXISTS = ... # type: Any DB_DROP_EXISTS = ... # type: Any DB_DROP_DELETE = ... # type: Any DB_DROP_RMDIR = ... # type: Any CANT_DELETE_FILE = ... # type: Any CANT_FIND_SYSTEM_REC = ... # type: Any CANT_GET_STAT = ... # type: Any CANT_GET_WD = ... # type: Any CANT_LOCK = ... # type: Any CANT_OPEN_FILE = ... # type: Any FILE_NOT_FOUND = ... # type: Any CANT_READ_DIR = ... # type: Any CANT_SET_WD = ... # type: Any CHECKREAD = ... # type: Any DISK_FULL = ... # type: Any DUP_KEY = ... # type: Any ERROR_ON_CLOSE = ... # type: Any ERROR_ON_READ = ... # type: Any ERROR_ON_RENAME = ... # type: Any ERROR_ON_WRITE = ... # type: Any FILE_USED = ... # type: Any FILSORT_ABORT = ... # type: Any FORM_NOT_FOUND = ... # type: Any GET_ERRNO = ... # type: Any ILLEGAL_HA = ... # type: Any KEY_NOT_FOUND = ... # type: Any NOT_FORM_FILE = ... # type: Any NOT_KEYFILE = ... # type: Any OLD_KEYFILE = ... # type: Any OPEN_AS_READONLY = ... # type: Any OUTOFMEMORY = ... # type: Any OUT_OF_SORTMEMORY = ... # type: Any UNEXPECTED_EOF = ... # type: Any CON_COUNT_ERROR = ... # type: Any OUT_OF_RESOURCES = ... # type: Any BAD_HOST_ERROR = ... # type: Any HANDSHAKE_ERROR = ... # type: Any DBACCESS_DENIED_ERROR = ... # type: Any ACCESS_DENIED_ERROR = ... # type: Any NO_DB_ERROR = ... # type: Any UNKNOWN_COM_ERROR = ... # type: Any BAD_NULL_ERROR = ... # type: Any BAD_DB_ERROR = ... # type: Any TABLE_EXISTS_ERROR = ... # type: Any BAD_TABLE_ERROR = ... # type: Any NON_UNIQ_ERROR = ... # type: Any SERVER_SHUTDOWN = ... # type: Any BAD_FIELD_ERROR = ... # type: Any WRONG_FIELD_WITH_GROUP = ... # type: Any WRONG_GROUP_FIELD = ... # type: Any WRONG_SUM_SELECT = ... # type: Any WRONG_VALUE_COUNT = ... # type: Any TOO_LONG_IDENT = ... # type: Any DUP_FIELDNAME = ... # type: Any DUP_KEYNAME = ... # type: Any DUP_ENTRY = ... # type: Any WRONG_FIELD_SPEC = ... # type: Any PARSE_ERROR = ... # type: Any EMPTY_QUERY = ... # type: Any NONUNIQ_TABLE = ... # type: Any INVALID_DEFAULT = ... # type: Any MULTIPLE_PRI_KEY = ... # type: Any TOO_MANY_KEYS = ... # type: Any TOO_MANY_KEY_PARTS = ... # type: Any TOO_LONG_KEY = ... # type: Any KEY_COLUMN_DOES_NOT_EXITS = ... # type: Any BLOB_USED_AS_KEY = ... # type: Any TOO_BIG_FIELDLENGTH = ... # type: Any WRONG_AUTO_KEY = ... # type: Any READY = ... # type: Any NORMAL_SHUTDOWN = ... # type: Any GOT_SIGNAL = ... # type: Any SHUTDOWN_COMPLETE = ... # type: Any FORCING_CLOSE = ... # type: Any IPSOCK_ERROR = ... # type: Any NO_SUCH_INDEX = ... # type: Any WRONG_FIELD_TERMINATORS = ... # type: Any BLOBS_AND_NO_TERMINATED = ... # type: Any TEXTFILE_NOT_READABLE = ... # type: Any FILE_EXISTS_ERROR = ... # type: Any LOAD_INFO = ... # type: Any ALTER_INFO = ... # type: Any WRONG_SUB_KEY = ... # type: Any CANT_REMOVE_ALL_FIELDS = ... # type: Any CANT_DROP_FIELD_OR_KEY = ... # type: Any INSERT_INFO = ... # type: Any UPDATE_TABLE_USED = ... # type: Any NO_SUCH_THREAD = ... # type: Any KILL_DENIED_ERROR = ... # type: Any NO_TABLES_USED = ... # type: Any TOO_BIG_SET = ... # type: Any NO_UNIQUE_LOGFILE = ... # type: Any TABLE_NOT_LOCKED_FOR_WRITE = ... # type: Any TABLE_NOT_LOCKED = ... # type: Any BLOB_CANT_HAVE_DEFAULT = ... # type: Any WRONG_DB_NAME = ... # type: Any WRONG_TABLE_NAME = ... # type: Any TOO_BIG_SELECT = ... # type: Any UNKNOWN_ERROR = ... # type: Any UNKNOWN_PROCEDURE = ... # type: Any WRONG_PARAMCOUNT_TO_PROCEDURE = ... # type: Any WRONG_PARAMETERS_TO_PROCEDURE = ... # type: Any UNKNOWN_TABLE = ... # type: Any FIELD_SPECIFIED_TWICE = ... # type: Any INVALID_GROUP_FUNC_USE = ... # type: Any UNSUPPORTED_EXTENSION = ... # type: Any TABLE_MUST_HAVE_COLUMNS = ... # type: Any RECORD_FILE_FULL = ... # type: Any UNKNOWN_CHARACTER_SET = ... # type: Any TOO_MANY_TABLES = ... # type: Any TOO_MANY_FIELDS = ... # type: Any TOO_BIG_ROWSIZE = ... # type: Any STACK_OVERRUN = ... # type: Any WRONG_OUTER_JOIN = ... # type: Any NULL_COLUMN_IN_INDEX = ... # type: Any CANT_FIND_UDF = ... # type: Any CANT_INITIALIZE_UDF = ... # type: Any UDF_NO_PATHS = ... # type: Any UDF_EXISTS = ... # type: Any CANT_OPEN_LIBRARY = ... # type: Any CANT_FIND_DL_ENTRY = ... # type: Any FUNCTION_NOT_DEFINED = ... # type: Any HOST_IS_BLOCKED = ... # type: Any HOST_NOT_PRIVILEGED = ... # type: Any PASSWORD_ANONYMOUS_USER = ... # type: Any PASSWORD_NOT_ALLOWED = ... # type: Any PASSWORD_NO_MATCH = ... # type: Any UPDATE_INFO = ... # type: Any CANT_CREATE_THREAD = ... # type: Any WRONG_VALUE_COUNT_ON_ROW = ... # type: Any CANT_REOPEN_TABLE = ... # type: Any INVALID_USE_OF_NULL = ... # type: Any REGEXP_ERROR = ... # type: Any MIX_OF_GROUP_FUNC_AND_FIELDS = ... # type: Any NONEXISTING_GRANT = ... # type: Any TABLEACCESS_DENIED_ERROR = ... # type: Any COLUMNACCESS_DENIED_ERROR = ... # type: Any ILLEGAL_GRANT_FOR_TABLE = ... # type: Any GRANT_WRONG_HOST_OR_USER = ... # type: Any NO_SUCH_TABLE = ... # type: Any NONEXISTING_TABLE_GRANT = ... # type: Any NOT_ALLOWED_COMMAND = ... # type: Any SYNTAX_ERROR = ... # type: Any DELAYED_CANT_CHANGE_LOCK = ... # type: Any TOO_MANY_DELAYED_THREADS = ... # type: Any ABORTING_CONNECTION = ... # type: Any NET_PACKET_TOO_LARGE = ... # type: Any NET_READ_ERROR_FROM_PIPE = ... # type: Any NET_FCNTL_ERROR = ... # type: Any NET_PACKETS_OUT_OF_ORDER = ... # type: Any NET_UNCOMPRESS_ERROR = ... # type: Any NET_READ_ERROR = ... # type: Any NET_READ_INTERRUPTED = ... # type: Any NET_ERROR_ON_WRITE = ... # type: Any NET_WRITE_INTERRUPTED = ... # type: Any TOO_LONG_STRING = ... # type: Any TABLE_CANT_HANDLE_BLOB = ... # type: Any TABLE_CANT_HANDLE_AUTO_INCREMENT = ... # type: Any DELAYED_INSERT_TABLE_LOCKED = ... # type: Any WRONG_COLUMN_NAME = ... # type: Any WRONG_KEY_COLUMN = ... # type: Any WRONG_MRG_TABLE = ... # type: Any DUP_UNIQUE = ... # type: Any BLOB_KEY_WITHOUT_LENGTH = ... # type: Any PRIMARY_CANT_HAVE_NULL = ... # type: Any TOO_MANY_ROWS = ... # type: Any REQUIRES_PRIMARY_KEY = ... # type: Any NO_RAID_COMPILED = ... # type: Any UPDATE_WITHOUT_KEY_IN_SAFE_MODE = ... # type: Any KEY_DOES_NOT_EXITS = ... # type: Any CHECK_NO_SUCH_TABLE = ... # type: Any CHECK_NOT_IMPLEMENTED = ... # type: Any CANT_DO_THIS_DURING_AN_TRANSACTION = ... # type: Any ERROR_DURING_COMMIT = ... # type: Any ERROR_DURING_ROLLBACK = ... # type: Any ERROR_DURING_FLUSH_LOGS = ... # type: Any ERROR_DURING_CHECKPOINT = ... # type: Any NEW_ABORTING_CONNECTION = ... # type: Any DUMP_NOT_IMPLEMENTED = ... # type: Any FLUSH_MASTER_BINLOG_CLOSED = ... # type: Any INDEX_REBUILD = ... # type: Any MASTER = ... # type: Any MASTER_NET_READ = ... # type: Any MASTER_NET_WRITE = ... # type: Any FT_MATCHING_KEY_NOT_FOUND = ... # type: Any LOCK_OR_ACTIVE_TRANSACTION = ... # type: Any UNKNOWN_SYSTEM_VARIABLE = ... # type: Any CRASHED_ON_USAGE = ... # type: Any CRASHED_ON_REPAIR = ... # type: Any WARNING_NOT_COMPLETE_ROLLBACK = ... # type: Any TRANS_CACHE_FULL = ... # type: Any SLAVE_MUST_STOP = ... # type: Any SLAVE_NOT_RUNNING = ... # type: Any BAD_SLAVE = ... # type: Any MASTER_INFO = ... # type: Any SLAVE_THREAD = ... # type: Any TOO_MANY_USER_CONNECTIONS = ... # type: Any SET_CONSTANTS_ONLY = ... # type: Any LOCK_WAIT_TIMEOUT = ... # type: Any LOCK_TABLE_FULL = ... # type: Any READ_ONLY_TRANSACTION = ... # type: Any DROP_DB_WITH_READ_LOCK = ... # type: Any CREATE_DB_WITH_READ_LOCK = ... # type: Any WRONG_ARGUMENTS = ... # type: Any NO_PERMISSION_TO_CREATE_USER = ... # type: Any UNION_TABLES_IN_DIFFERENT_DIR = ... # type: Any LOCK_DEADLOCK = ... # type: Any TABLE_CANT_HANDLE_FT = ... # type: Any CANNOT_ADD_FOREIGN = ... # type: Any NO_REFERENCED_ROW = ... # type: Any ROW_IS_REFERENCED = ... # type: Any CONNECT_TO_MASTER = ... # type: Any QUERY_ON_MASTER = ... # type: Any ERROR_WHEN_EXECUTING_COMMAND = ... # type: Any WRONG_USAGE = ... # type: Any WRONG_NUMBER_OF_COLUMNS_IN_SELECT = ... # type: Any CANT_UPDATE_WITH_READLOCK = ... # type: Any MIXING_NOT_ALLOWED = ... # type: Any DUP_ARGUMENT = ... # type: Any USER_LIMIT_REACHED = ... # type: Any SPECIFIC_ACCESS_DENIED_ERROR = ... # type: Any LOCAL_VARIABLE = ... # type: Any GLOBAL_VARIABLE = ... # type: Any NO_DEFAULT = ... # type: Any WRONG_VALUE_FOR_VAR = ... # type: Any WRONG_TYPE_FOR_VAR = ... # type: Any VAR_CANT_BE_READ = ... # type: Any CANT_USE_OPTION_HERE = ... # type: Any NOT_SUPPORTED_YET = ... # type: Any MASTER_FATAL_ERROR_READING_BINLOG = ... # type: Any SLAVE_IGNORED_TABLE = ... # type: Any INCORRECT_GLOBAL_LOCAL_VAR = ... # type: Any WRONG_FK_DEF = ... # type: Any KEY_REF_DO_NOT_MATCH_TABLE_REF = ... # type: Any OPERAND_COLUMNS = ... # type: Any SUBQUERY_NO_1_ROW = ... # type: Any UNKNOWN_STMT_HANDLER = ... # type: Any CORRUPT_HELP_DB = ... # type: Any CYCLIC_REFERENCE = ... # type: Any AUTO_CONVERT = ... # type: Any ILLEGAL_REFERENCE = ... # type: Any DERIVED_MUST_HAVE_ALIAS = ... # type: Any SELECT_REDUCED = ... # type: Any TABLENAME_NOT_ALLOWED_HERE = ... # type: Any NOT_SUPPORTED_AUTH_MODE = ... # type: Any SPATIAL_CANT_HAVE_NULL = ... # type: Any COLLATION_CHARSET_MISMATCH = ... # type: Any SLAVE_WAS_RUNNING = ... # type: Any SLAVE_WAS_NOT_RUNNING = ... # type: Any TOO_BIG_FOR_UNCOMPRESS = ... # type: Any ZLIB_Z_MEM_ERROR = ... # type: Any ZLIB_Z_BUF_ERROR = ... # type: Any ZLIB_Z_DATA_ERROR = ... # type: Any CUT_VALUE_GROUP_CONCAT = ... # type: Any WARN_TOO_FEW_RECORDS = ... # type: Any WARN_TOO_MANY_RECORDS = ... # type: Any WARN_NULL_TO_NOTNULL = ... # type: Any WARN_DATA_OUT_OF_RANGE = ... # type: Any WARN_DATA_TRUNCATED = ... # type: Any WARN_USING_OTHER_HANDLER = ... # type: Any CANT_AGGREGATE_2COLLATIONS = ... # type: Any DROP_USER = ... # type: Any REVOKE_GRANTS = ... # type: Any CANT_AGGREGATE_3COLLATIONS = ... # type: Any CANT_AGGREGATE_NCOLLATIONS = ... # type: Any VARIABLE_IS_NOT_STRUCT = ... # type: Any UNKNOWN_COLLATION = ... # type: Any SLAVE_IGNORED_SSL_PARAMS = ... # type: Any SERVER_IS_IN_SECURE_AUTH_MODE = ... # type: Any WARN_FIELD_RESOLVED = ... # type: Any BAD_SLAVE_UNTIL_COND = ... # type: Any MISSING_SKIP_SLAVE = ... # type: Any UNTIL_COND_IGNORED = ... # type: Any WRONG_NAME_FOR_INDEX = ... # type: Any WRONG_NAME_FOR_CATALOG = ... # type: Any WARN_QC_RESIZE = ... # type: Any BAD_FT_COLUMN = ... # type: Any UNKNOWN_KEY_CACHE = ... # type: Any WARN_HOSTNAME_WONT_WORK = ... # type: Any UNKNOWN_STORAGE_ENGINE = ... # type: Any WARN_DEPRECATED_SYNTAX = ... # type: Any NON_UPDATABLE_TABLE = ... # type: Any FEATURE_DISABLED = ... # type: Any OPTION_PREVENTS_STATEMENT = ... # type: Any DUPLICATED_VALUE_IN_TYPE = ... # type: Any TRUNCATED_WRONG_VALUE = ... # type: Any TOO_MUCH_AUTO_TIMESTAMP_COLS = ... # type: Any INVALID_ON_UPDATE = ... # type: Any UNSUPPORTED_PS = ... # type: Any GET_ERRMSG = ... # type: Any GET_TEMPORARY_ERRMSG = ... # type: Any UNKNOWN_TIME_ZONE = ... # type: Any WARN_INVALID_TIMESTAMP = ... # type: Any INVALID_CHARACTER_STRING = ... # type: Any WARN_ALLOWED_PACKET_OVERFLOWED = ... # type: Any CONFLICTING_DECLARATIONS = ... # type: Any SP_NO_RECURSIVE_CREATE = ... # type: Any SP_ALREADY_EXISTS = ... # type: Any SP_DOES_NOT_EXIST = ... # type: Any SP_DROP_FAILED = ... # type: Any SP_STORE_FAILED = ... # type: Any SP_LILABEL_MISMATCH = ... # type: Any SP_LABEL_REDEFINE = ... # type: Any SP_LABEL_MISMATCH = ... # type: Any SP_UNINIT_VAR = ... # type: Any SP_BADSELECT = ... # type: Any SP_BADRETURN = ... # type: Any SP_BADSTATEMENT = ... # type: Any UPDATE_LOG_DEPRECATED_IGNORED = ... # type: Any UPDATE_LOG_DEPRECATED_TRANSLATED = ... # type: Any QUERY_INTERRUPTED = ... # type: Any SP_WRONG_NO_OF_ARGS = ... # type: Any SP_COND_MISMATCH = ... # type: Any SP_NORETURN = ... # type: Any SP_NORETURNEND = ... # type: Any SP_BAD_CURSOR_QUERY = ... # type: Any SP_BAD_CURSOR_SELECT = ... # type: Any SP_CURSOR_MISMATCH = ... # type: Any SP_CURSOR_ALREADY_OPEN = ... # type: Any SP_CURSOR_NOT_OPEN = ... # type: Any SP_UNDECLARED_VAR = ... # type: Any SP_WRONG_NO_OF_FETCH_ARGS = ... # type: Any SP_FETCH_NO_DATA = ... # type: Any SP_DUP_PARAM = ... # type: Any SP_DUP_VAR = ... # type: Any SP_DUP_COND = ... # type: Any SP_DUP_CURS = ... # type: Any SP_CANT_ALTER = ... # type: Any SP_SUBSELECT_NYI = ... # type: Any STMT_NOT_ALLOWED_IN_SF_OR_TRG = ... # type: Any SP_VARCOND_AFTER_CURSHNDLR = ... # type: Any SP_CURSOR_AFTER_HANDLER = ... # type: Any SP_CASE_NOT_FOUND = ... # type: Any FPARSER_TOO_BIG_FILE = ... # type: Any FPARSER_BAD_HEADER = ... # type: Any FPARSER_EOF_IN_COMMENT = ... # type: Any FPARSER_ERROR_IN_PARAMETER = ... # type: Any FPARSER_EOF_IN_UNKNOWN_PARAMETER = ... # type: Any VIEW_NO_EXPLAIN = ... # type: Any FRM_UNKNOWN_TYPE = ... # type: Any WRONG_OBJECT = ... # type: Any NONUPDATEABLE_COLUMN = ... # type: Any VIEW_SELECT_DERIVED = ... # type: Any VIEW_SELECT_CLAUSE = ... # type: Any VIEW_SELECT_VARIABLE = ... # type: Any VIEW_SELECT_TMPTABLE = ... # type: Any VIEW_WRONG_LIST = ... # type: Any WARN_VIEW_MERGE = ... # type: Any WARN_VIEW_WITHOUT_KEY = ... # type: Any VIEW_INVALID = ... # type: Any SP_NO_DROP_SP = ... # type: Any SP_GOTO_IN_HNDLR = ... # type: Any TRG_ALREADY_EXISTS = ... # type: Any TRG_DOES_NOT_EXIST = ... # type: Any TRG_ON_VIEW_OR_TEMP_TABLE = ... # type: Any TRG_CANT_CHANGE_ROW = ... # type: Any TRG_NO_SUCH_ROW_IN_TRG = ... # type: Any NO_DEFAULT_FOR_FIELD = ... # type: Any DIVISION_BY_ZERO = ... # type: Any TRUNCATED_WRONG_VALUE_FOR_FIELD = ... # type: Any ILLEGAL_VALUE_FOR_TYPE = ... # type: Any VIEW_NONUPD_CHECK = ... # type: Any VIEW_CHECK_FAILED = ... # type: Any PROCACCESS_DENIED_ERROR = ... # type: Any RELAY_LOG_FAIL = ... # type: Any PASSWD_LENGTH = ... # type: Any UNKNOWN_TARGET_BINLOG = ... # type: Any IO_ERR_LOG_INDEX_READ = ... # type: Any BINLOG_PURGE_PROHIBITED = ... # type: Any FSEEK_FAIL = ... # type: Any BINLOG_PURGE_FATAL_ERR = ... # type: Any LOG_IN_USE = ... # type: Any LOG_PURGE_UNKNOWN_ERR = ... # type: Any RELAY_LOG_INIT = ... # type: Any NO_BINARY_LOGGING = ... # type: Any RESERVED_SYNTAX = ... # type: Any WSAS_FAILED = ... # type: Any DIFF_GROUPS_PROC = ... # type: Any NO_GROUP_FOR_PROC = ... # type: Any ORDER_WITH_PROC = ... # type: Any LOGGING_PROHIBIT_CHANGING_OF = ... # type: Any NO_FILE_MAPPING = ... # type: Any WRONG_MAGIC = ... # type: Any PS_MANY_PARAM = ... # type: Any KEY_PART_0 = ... # type: Any VIEW_CHECKSUM = ... # type: Any VIEW_MULTIUPDATE = ... # type: Any VIEW_NO_INSERT_FIELD_LIST = ... # type: Any VIEW_DELETE_MERGE_VIEW = ... # type: Any CANNOT_USER = ... # type: Any XAER_NOTA = ... # type: Any XAER_INVAL = ... # type: Any XAER_RMFAIL = ... # type: Any XAER_OUTSIDE = ... # type: Any XAER_RMERR = ... # type: Any XA_RBROLLBACK = ... # type: Any NONEXISTING_PROC_GRANT = ... # type: Any PROC_AUTO_GRANT_FAIL = ... # type: Any PROC_AUTO_REVOKE_FAIL = ... # type: Any DATA_TOO_LONG = ... # type: Any SP_BAD_SQLSTATE = ... # type: Any STARTUP = ... # type: Any LOAD_FROM_FIXED_SIZE_ROWS_TO_VAR = ... # type: Any CANT_CREATE_USER_WITH_GRANT = ... # type: Any WRONG_VALUE_FOR_TYPE = ... # type: Any TABLE_DEF_CHANGED = ... # type: Any SP_DUP_HANDLER = ... # type: Any SP_NOT_VAR_ARG = ... # type: Any SP_NO_RETSET = ... # type: Any CANT_CREATE_GEOMETRY_OBJECT = ... # type: Any FAILED_ROUTINE_BREAK_BINLOG = ... # type: Any BINLOG_UNSAFE_ROUTINE = ... # type: Any BINLOG_CREATE_ROUTINE_NEED_SUPER = ... # type: Any EXEC_STMT_WITH_OPEN_CURSOR = ... # type: Any STMT_HAS_NO_OPEN_CURSOR = ... # type: Any COMMIT_NOT_ALLOWED_IN_SF_OR_TRG = ... # type: Any NO_DEFAULT_FOR_VIEW_FIELD = ... # type: Any SP_NO_RECURSION = ... # type: Any TOO_BIG_SCALE = ... # type: Any TOO_BIG_PRECISION = ... # type: Any M_BIGGER_THAN_D = ... # type: Any WRONG_LOCK_OF_SYSTEM_TABLE = ... # type: Any CONNECT_TO_FOREIGN_DATA_SOURCE = ... # type: Any QUERY_ON_FOREIGN_DATA_SOURCE = ... # type: Any FOREIGN_DATA_SOURCE_DOESNT_EXIST = ... # type: Any FOREIGN_DATA_STRING_INVALID_CANT_CREATE = ... # type: Any FOREIGN_DATA_STRING_INVALID = ... # type: Any CANT_CREATE_FEDERATED_TABLE = ... # type: Any TRG_IN_WRONG_SCHEMA = ... # type: Any STACK_OVERRUN_NEED_MORE = ... # type: Any TOO_LONG_BODY = ... # type: Any WARN_CANT_DROP_DEFAULT_KEYCACHE = ... # type: Any TOO_BIG_DISPLAYWIDTH = ... # type: Any XAER_DUPID = ... # type: Any DATETIME_FUNCTION_OVERFLOW = ... # type: Any CANT_UPDATE_USED_TABLE_IN_SF_OR_TRG = ... # type: Any VIEW_PREVENT_UPDATE = ... # type: Any PS_NO_RECURSION = ... # type: Any SP_CANT_SET_AUTOCOMMIT = ... # type: Any MALFORMED_DEFINER = ... # type: Any VIEW_FRM_NO_USER = ... # type: Any VIEW_OTHER_USER = ... # type: Any NO_SUCH_USER = ... # type: Any FORBID_SCHEMA_CHANGE = ... # type: Any ROW_IS_REFERENCED_2 = ... # type: Any NO_REFERENCED_ROW_2 = ... # type: Any SP_BAD_VAR_SHADOW = ... # type: Any TRG_NO_DEFINER = ... # type: Any OLD_FILE_FORMAT = ... # type: Any SP_RECURSION_LIMIT = ... # type: Any SP_PROC_TABLE_CORRUPT = ... # type: Any SP_WRONG_NAME = ... # type: Any TABLE_NEEDS_UPGRADE = ... # type: Any SP_NO_AGGREGATE = ... # type: Any MAX_PREPARED_STMT_COUNT_REACHED = ... # type: Any VIEW_RECURSIVE = ... # type: Any NON_GROUPING_FIELD_USED = ... # type: Any TABLE_CANT_HANDLE_SPKEYS = ... # type: Any NO_TRIGGERS_ON_SYSTEM_SCHEMA = ... # type: Any USERNAME = ... # type: Any HOSTNAME = ... # type: Any WRONG_STRING_LENGTH = ... # type: Any ERROR_LAST = ... # type: Any mypy-0.560/typeshed/third_party/2and3/pymysql/constants/FIELD_TYPE.pyi0000644€tŠÔÚ€2›s®0000000142013215007212031710 0ustar jukkaDROPBOX\Domain Users00000000000000from typing import Any DECIMAL = ... # type: Any TINY = ... # type: Any SHORT = ... # type: Any LONG = ... # type: Any FLOAT = ... # type: Any DOUBLE = ... # type: Any NULL = ... # type: Any TIMESTAMP = ... # type: Any LONGLONG = ... # type: Any INT24 = ... # type: Any DATE = ... # type: Any TIME = ... # type: Any DATETIME = ... # type: Any YEAR = ... # type: Any NEWDATE = ... # type: Any VARCHAR = ... # type: Any BIT = ... # type: Any NEWDECIMAL = ... # type: Any ENUM = ... # type: Any SET = ... # type: Any TINY_BLOB = ... # type: Any MEDIUM_BLOB = ... # type: Any LONG_BLOB = ... # type: Any BLOB = ... # type: Any VAR_STRING = ... # type: Any STRING = ... # type: Any GEOMETRY = ... # type: Any CHAR = ... # type: Any INTERVAL = ... # type: Any mypy-0.560/typeshed/third_party/2and3/pymysql/constants/FLAG.pyi0000644€tŠÔÚ€2›s®0000000066413215007212030746 0ustar jukkaDROPBOX\Domain Users00000000000000from typing import Any NOT_NULL = ... # type: Any PRI_KEY = ... # type: Any UNIQUE_KEY = ... # type: Any MULTIPLE_KEY = ... # type: Any BLOB = ... # type: Any UNSIGNED = ... # type: Any ZEROFILL = ... # type: Any BINARY = ... # type: Any ENUM = ... # type: Any AUTO_INCREMENT = ... # type: Any TIMESTAMP = ... # type: Any SET = ... # type: Any PART_KEY = ... # type: Any GROUP = ... # type: Any UNIQUE = ... # type: Any mypy-0.560/typeshed/third_party/2and3/pymysql/constants/SERVER_STATUS.pyi0000644€tŠÔÚ€2›s®0000000075713215007212032351 0ustar jukkaDROPBOX\Domain Users00000000000000from typing import Any SERVER_STATUS_IN_TRANS = ... # type: Any SERVER_STATUS_AUTOCOMMIT = ... # type: Any SERVER_MORE_RESULTS_EXISTS = ... # type: Any SERVER_QUERY_NO_GOOD_INDEX_USED = ... # type: Any SERVER_QUERY_NO_INDEX_USED = ... # type: Any SERVER_STATUS_CURSOR_EXISTS = ... # type: Any SERVER_STATUS_LAST_ROW_SENT = ... # type: Any SERVER_STATUS_DB_DROPPED = ... # type: Any SERVER_STATUS_NO_BACKSLASH_ESCAPES = ... # type: Any SERVER_STATUS_METADATA_CHANGED = ... # type: Any mypy-0.560/typeshed/third_party/2and3/pymysql/converters.pyi0000644€tŠÔÚ€2›s®0000000264213215007212030411 0ustar jukkaDROPBOX\Domain Users00000000000000from typing import Any from .constants import FIELD_TYPE as FIELD_TYPE, FLAG as FLAG from .charset import charset_by_id as charset_by_id PYTHON3 = ... # type: Any ESCAPE_REGEX = ... # type: Any ESCAPE_MAP = ... # type: Any def escape_item(val, charset): ... def escape_dict(val, charset): ... def escape_sequence(val, charset): ... def escape_set(val, charset): ... def escape_bool(value): ... def escape_object(value): ... escape_int = ... # type: Any escape_long = ... # type: Any def escape_float(value): ... def escape_string(value): ... def escape_unicode(value): ... def escape_None(value): ... def escape_timedelta(obj): ... def escape_time(obj): ... def escape_datetime(obj): ... def escape_date(obj): ... def escape_struct_time(obj): ... def convert_datetime(connection, field, obj): ... def convert_timedelta(connection, field, obj): ... def convert_time(connection, field, obj): ... def convert_date(connection, field, obj): ... def convert_mysql_timestamp(connection, field, timestamp): ... def convert_set(s): ... def convert_bit(connection, field, b): ... def convert_characters(connection, field, data): ... def convert_int(connection, field, data): ... def convert_long(connection, field, data): ... def convert_float(connection, field, data): ... encoders = ... # type: Any decoders = ... # type: Any conversions = ... # type: Any def convert_decimal(connection, field, data): ... def escape_decimal(obj): ... mypy-0.560/typeshed/third_party/2and3/pymysql/cursors.pyi0000644€tŠÔÚ€2›s®0000000244513215007212027720 0ustar jukkaDROPBOX\Domain Users00000000000000from typing import Union, Tuple, Any, Dict, Optional, Text from .connections import Connection Gen = Union[Tuple[Any, ...], Dict[str, Any]] class Cursor: connection = ... # type: Connection description = ... # type: Tuple[Text, ...] rownumber = ... # type: int rowcount = ... # type: int arraysize = ... # type: int messages = ... # type: Any errorhandler = ... # type: Any lastrowid = ... # type: int def __init__(self, connection: Connection) -> None: ... def __del__(self) -> None: ... def close(self) -> None: ... def setinputsizes(self, *args): ... def setoutputsizes(self, *args): ... def nextset(self): ... def execute(self, query: str, args=None) -> int: ... def executemany(self, query: str, args) -> int: ... def callproc(self, procname, args=...): ... def fetchone(self) -> Optional[Gen]: ... def fetchmany(self, size: Optional[int] = ...) -> Optional[Gen]: ... def fetchall(self) -> Optional[Tuple[Gen, ...]]: ... def scroll(self, value, mode=''): ... def __iter__(self): ... class DictCursor(Cursor): def fetchone(self) -> Optional[Dict[str, Any]]: ... def fetchmany(self, size=None) -> Optional[Tuple[Dict[str, Any], ...]]: ... def fetchall(self) -> Optional[Tuple[Dict[str, Any], ...]]: ... mypy-0.560/typeshed/third_party/2and3/pymysql/err.pyi0000644€tŠÔÚ€2›s®0000000103313215007212027000 0ustar jukkaDROPBOX\Domain Users00000000000000from typing import Dict from .constants import ER as ER class MySQLError(Exception): ... class Warning(MySQLError): ... class Error(MySQLError): ... class InterfaceError(Error): ... class DatabaseError(Error): ... class DataError(DatabaseError): ... class OperationalError(DatabaseError): ... class IntegrityError(DatabaseError): ... class InternalError(DatabaseError): ... class ProgrammingError(DatabaseError): ... class NotSupportedError(DatabaseError): ... error_map = ... # type: Dict def raise_mysql_exception(data) -> None: ... mypy-0.560/typeshed/third_party/2and3/pymysql/times.pyi0000644€tŠÔÚ€2›s®0000000034213215007212027333 0ustar jukkaDROPBOX\Domain Users00000000000000from typing import Any Date = ... # type: Any Time = ... # type: Any TimeDelta = ... # type: Any Timestamp = ... # type: Any def DateFromTicks(ticks): ... def TimeFromTicks(ticks): ... def TimestampFromTicks(ticks): ... mypy-0.560/typeshed/third_party/2and3/pymysql/util.pyi0000644€tŠÔÚ€2›s®0000000010213215007212027161 0ustar jukkaDROPBOX\Domain Users00000000000000def byte2int(b): ... def int2byte(i): ... def join_bytes(bs): ... mypy-0.560/typeshed/third_party/2and3/pynamodb/0000755€tŠÔÚ€2›s®0000000000013215007244025570 5ustar jukkaDROPBOX\Domain Users00000000000000mypy-0.560/typeshed/third_party/2and3/pynamodb/__init__.pyi0000644€tŠÔÚ€2›s®0000000002113215007212030036 0ustar jukkaDROPBOX\Domain Users00000000000000__license__: str mypy-0.560/typeshed/third_party/2and3/pynamodb/attributes.pyi0000644€tŠÔÚ€2›s®0000000705713215007212030505 0ustar jukkaDROPBOX\Domain Users00000000000000from typing import Any, Callable, Dict, Generic, Iterable, List, Mapping, Optional, Text, Type, TypeVar, Union, Set from datetime import datetime _T = TypeVar('_T') _KT = TypeVar('_KT') _VT = TypeVar('_VT') _MT = TypeVar('_MT', bound='MapAttribute') Number = Union[int, float] class Attribute(Generic[_T]): attr_name: Optional[Text] attr_type: Text null: bool default: Any is_hash_key: bool is_range_key: bool def __init__(self, hash_key: bool = ..., range_key: bool = ..., null: Optional[bool] = ..., default: Optional[Union[_T, Callable[..., _T]]] = ..., attr_name: Optional[Text] = ...) -> None: ... def __set__(self, instance: Any, value: Optional[_T]) -> None: ... def serialize(self, value: Any) -> Any: ... def deserialize(self, value: Any) -> Any: ... def get_value(self, value: Any) -> Any: ... class SetMixin(object): def serialize(self, value): ... def deserialize(self, value): ... class BinaryAttribute(Attribute[bytes]): def __get__(self, instance: Any, owner: Any) -> bytes: ... class BinarySetAttribute(SetMixin, Attribute[Set[bytes]]): def __get__(self, instance: Any, owner: Any) -> Set[bytes]: ... class UnicodeSetAttribute(SetMixin, Attribute[Set[Text]]): def element_serialize(self, value: Any) -> Any: ... def element_deserialize(self, value: Any) -> Any: ... def __get__(self, instance: Any, owner: Any) -> Set[Text]: ... class UnicodeAttribute(Attribute[Text]): def __get__(self, instance: Any, owner: Any) -> Text: ... class JSONAttribute(Attribute[Any]): def __get__(self, instance: Any, owner: Any) -> Any: ... class LegacyBooleanAttribute(Attribute[bool]): def __get__(self, instance: Any, owner: Any) -> bool: ... class BooleanAttribute(Attribute[bool]): def __get__(self, instance: Any, owner: Any) -> bool: ... class NumberSetAttribute(SetMixin, Attribute[Set[Number]]): def __get__(self, instance: Any, owner: Any) -> Set[Number]: ... class NumberAttribute(Attribute[Number]): def __get__(self, instance: Any, owner: Any) -> Number: ... class UTCDateTimeAttribute(Attribute[datetime]): def __get__(self, instance: Any, owner: Any) -> datetime: ... class NullAttribute(Attribute[None]): def __get__(self, instance: Any, owner: Any) -> None: ... class MapAttributeMeta(type): def __init__(cls, name, bases, attrs) -> None: ... class MapAttribute(Generic[_KT, _VT], Attribute[Mapping[_KT, _VT]], metaclass=MapAttributeMeta): attribute_values: Any def __init__(self, hash_key: bool = ..., range_key: bool = ..., null: Optional[bool] = ..., default: Optional[Union[Any, Callable[..., Any]]] = ..., attr_name: Optional[Text] = ..., **attrs) -> None: ... def __iter__(self) -> Iterable[_VT]: ... def __getattr__(self, attr: str) -> _VT: ... def __getitem__(self, item: _KT) -> _VT: ... def __set__(self, instance: Any, value: Union[None, MapAttribute[_KT, _VT], Mapping[_KT, _VT]]) -> None: ... def __get__(self: _MT, instance: Any, owner: Any) -> _MT: ... def is_type_safe(self, key: Any, value: Any) -> bool: ... def validate(self) -> bool: ... class ListAttribute(Generic[_T], Attribute[List[_T]]): element_type: Any def __init__(self, hash_key: bool = ..., range_key: bool = ..., null: Optional[bool] = ..., default: Optional[Union[Any, Callable[..., Any]]] = ..., attr_name: Optional[Text] = ..., of: Optional[Type[_T]] = ...) -> None: ... def __get__(self, instance: Any, owner: Any) -> List[_T]: ... DESERIALIZE_CLASS_MAP: Dict[Text, Attribute] SERIALIZE_CLASS_MAP: Dict[Type, Attribute] SERIALIZE_KEY_MAP: Dict[Type, Text] mypy-0.560/typeshed/third_party/2and3/pynamodb/connection/0000755€tŠÔÚ€2›s®0000000000013215007244027727 5ustar jukkaDROPBOX\Domain Users00000000000000mypy-0.560/typeshed/third_party/2and3/pynamodb/connection/__init__.pyi0000644€tŠÔÚ€2›s®0000000014613215007212032205 0ustar jukkaDROPBOX\Domain Users00000000000000from pynamodb.connection.base import Connection from pynamodb.connection.table import TableConnection mypy-0.560/typeshed/third_party/2and3/pynamodb/connection/base.pyi0000644€tŠÔÚ€2›s®0000001221013215007212031353 0ustar jukkaDROPBOX\Domain Users00000000000000from typing import Any, Dict, Optional, Text BOTOCORE_EXCEPTIONS: Any log: Any class MetaTable: data: Dict def __init__(self, data: Dict) -> None: ... @property def range_keyname(self) -> Optional[Text]: ... @property def hash_keyname(self) -> Text: ... def get_index_hash_keyname(self, index_name: Text) -> Optional[Text]: ... def get_item_attribute_map(self, attributes, item_key: Any = ..., pythonic_key: bool = ...): ... def get_attribute_type(self, attribute_name, value: Optional[Any] = ...): ... def get_identifier_map(self, hash_key, range_key: Optional[Any] = ..., key: Any = ...): ... def get_exclusive_start_key_map(self, exclusive_start_key): ... class Connection: host: Any region: Any session_cls: Any def __init__(self, region: Optional[Any] = ..., host: Optional[Any] = ..., session_cls: Optional[Any] = ..., request_timeout_seconds: Optional[Any] = ..., max_retry_attempts: Optional[Any] = ..., base_backoff_ms: Optional[Any] = ...) -> None: ... def dispatch(self, operation_name, operation_kwargs): ... @property def session(self): ... @property def requests_session(self): ... @property def client(self): ... def get_meta_table(self, table_name: Text, refresh: bool = ...): ... def create_table(self, table_name: Text, attribute_definitions: Optional[Any] = ..., key_schema: Optional[Any] = ..., read_capacity_units: Optional[Any] = ..., write_capacity_units: Optional[Any] = ..., global_secondary_indexes: Optional[Any] = ..., local_secondary_indexes: Optional[Any] = ..., stream_specification: Optional[Any] = ...): ... def delete_table(self, table_name: Text): ... def update_table(self, table_name: Text, read_capacity_units: Optional[Any] = ..., write_capacity_units: Optional[Any] = ..., global_secondary_index_updates: Optional[Any] = ...): ... def list_tables(self, exclusive_start_table_name: Optional[Any] = ..., limit: Optional[Any] = ...): ... def describe_table(self, table_name: Text): ... def get_conditional_operator(self, operator): ... def get_item_attribute_map(self, table_name: Text, attributes, item_key: Any = ..., pythonic_key: bool = ...): ... def get_expected_map(self, table_name: Text, expected): ... def parse_attribute(self, attribute, return_type: bool = ...): ... def get_attribute_type(self, table_name: Text, attribute_name, value: Optional[Any] = ...): ... def get_identifier_map(self, table_name: Text, hash_key, range_key: Optional[Any] = ..., key: Any = ...): ... def get_query_filter_map(self, table_name: Text, query_filters): ... def get_consumed_capacity_map(self, return_consumed_capacity): ... def get_return_values_map(self, return_values): ... def get_item_collection_map(self, return_item_collection_metrics): ... def get_exclusive_start_key_map(self, table_name: Text, exclusive_start_key): ... def delete_item(self, table_name: Text, hash_key, range_key: Optional[Any] = ..., expected: Optional[Any] = ..., conditional_operator: Optional[Any] = ..., return_values: Optional[Any] = ..., return_consumed_capacity: Optional[Any] = ..., return_item_collection_metrics: Optional[Any] = ...): ... def update_item(self, table_name: Text, hash_key, range_key: Optional[Any] = ..., attribute_updates: Optional[Any] = ..., expected: Optional[Any] = ..., return_consumed_capacity: Optional[Any] = ..., conditional_operator: Optional[Any] = ..., return_item_collection_metrics: Optional[Any] = ..., return_values: Optional[Any] = ...): ... def put_item(self, table_name: Text, hash_key, range_key: Optional[Any] = ..., attributes: Optional[Any] = ..., expected: Optional[Any] = ..., conditional_operator: Optional[Any] = ..., return_values: Optional[Any] = ..., return_consumed_capacity: Optional[Any] = ..., return_item_collection_metrics: Optional[Any] = ...): ... def batch_write_item(self, table_name: Text, put_items: Optional[Any] = ..., delete_items: Optional[Any] = ..., return_consumed_capacity: Optional[Any] = ..., return_item_collection_metrics: Optional[Any] = ...): ... def batch_get_item(self, table_name: Text, keys, consistent_read: Optional[Any] = ..., return_consumed_capacity: Optional[Any] = ..., attributes_to_get: Optional[Any] = ...): ... def get_item(self, table_name: Text, hash_key, range_key: Optional[Any] = ..., consistent_read: bool = ..., attributes_to_get: Optional[Any] = ...): ... def scan(self, table_name: Text, attributes_to_get: Optional[Any] = ..., limit: Optional[Any] = ..., conditional_operator: Optional[Any] = ..., scan_filter: Optional[Any] = ..., return_consumed_capacity: Optional[Any] = ..., exclusive_start_key: Optional[Any] = ..., segment: Optional[Any] = ..., total_segments: Optional[Any] = ...): ... def query(self, table_name: Text, hash_key, attributes_to_get: Optional[Any] = ..., consistent_read: bool = ..., exclusive_start_key: Optional[Any] = ..., index_name: Optional[Any] = ..., key_conditions: Optional[Any] = ..., query_filters: Optional[Any] = ..., conditional_operator: Optional[Any] = ..., limit: Optional[Any] = ..., return_consumed_capacity: Optional[Any] = ..., scan_index_forward: Optional[Any] = ..., select: Optional[Any] = ...): ... mypy-0.560/typeshed/third_party/2and3/pynamodb/connection/table.pyi0000644€tŠÔÚ€2›s®0000000605513215007212031542 0ustar jukkaDROPBOX\Domain Users00000000000000from typing import Any, Optional class TableConnection: table_name: Any connection: Any def __init__(self, table_name, region: Optional[Any] = ..., host: Optional[Any] = ..., session_cls: Optional[Any] = ..., request_timeout_seconds: Optional[Any] = ..., max_retry_attempts: Optional[Any] = ..., base_backoff_ms: Optional[Any] = ...) -> None: ... def delete_item(self, hash_key, range_key: Optional[Any] = ..., expected: Optional[Any] = ..., conditional_operator: Optional[Any] = ..., return_values: Optional[Any] = ..., return_consumed_capacity: Optional[Any] = ..., return_item_collection_metrics: Optional[Any] = ...): ... def update_item(self, hash_key, range_key: Optional[Any] = ..., attribute_updates: Optional[Any] = ..., expected: Optional[Any] = ..., conditional_operator: Optional[Any] = ..., return_consumed_capacity: Optional[Any] = ..., return_item_collection_metrics: Optional[Any] = ..., return_values: Optional[Any] = ...): ... def put_item(self, hash_key, range_key: Optional[Any] = ..., attributes: Optional[Any] = ..., expected: Optional[Any] = ..., conditional_operator: Optional[Any] = ..., return_values: Optional[Any] = ..., return_consumed_capacity: Optional[Any] = ..., return_item_collection_metrics: Optional[Any] = ...): ... def batch_write_item(self, put_items: Optional[Any] = ..., delete_items: Optional[Any] = ..., return_consumed_capacity: Optional[Any] = ..., return_item_collection_metrics: Optional[Any] = ...): ... def batch_get_item(self, keys, consistent_read: Optional[Any] = ..., return_consumed_capacity: Optional[Any] = ..., attributes_to_get: Optional[Any] = ...): ... def get_item(self, hash_key, range_key: Optional[Any] = ..., consistent_read: bool = ..., attributes_to_get: Optional[Any] = ...): ... def scan(self, attributes_to_get: Optional[Any] = ..., limit: Optional[Any] = ..., conditional_operator: Optional[Any] = ..., scan_filter: Optional[Any] = ..., return_consumed_capacity: Optional[Any] = ..., segment: Optional[Any] = ..., total_segments: Optional[Any] = ..., exclusive_start_key: Optional[Any] = ...): ... def query(self, hash_key, attributes_to_get: Optional[Any] = ..., consistent_read: bool = ..., exclusive_start_key: Optional[Any] = ..., index_name: Optional[Any] = ..., key_conditions: Optional[Any] = ..., query_filters: Optional[Any] = ..., limit: Optional[Any] = ..., return_consumed_capacity: Optional[Any] = ..., scan_index_forward: Optional[Any] = ..., conditional_operator: Optional[Any] = ..., select: Optional[Any] = ...): ... def describe_table(self): ... def delete_table(self): ... def update_table(self, read_capacity_units: Optional[Any] = ..., write_capacity_units: Optional[Any] = ..., global_secondary_index_updates: Optional[Any] = ...): ... def create_table(self, attribute_definitions: Optional[Any] = ..., key_schema: Optional[Any] = ..., read_capacity_units: Optional[Any] = ..., write_capacity_units: Optional[Any] = ..., global_secondary_indexes: Optional[Any] = ..., local_secondary_indexes: Optional[Any] = ..., stream_specification: Optional[Any] = ...): ... mypy-0.560/typeshed/third_party/2and3/pynamodb/connection/util.pyi0000644€tŠÔÚ€2›s®0000000010313215007212031414 0ustar jukkaDROPBOX\Domain Users00000000000000from typing import Text def pythonic(var_name: Text) -> Text: ... mypy-0.560/typeshed/third_party/2and3/pynamodb/constants.pyi0000644€tŠÔÚ€2›s®0000000573613215007212030335 0ustar jukkaDROPBOX\Domain Users00000000000000from typing import Any BATCH_WRITE_ITEM: str DESCRIBE_TABLE: str BATCH_GET_ITEM: str CREATE_TABLE: str UPDATE_TABLE: str DELETE_TABLE: str LIST_TABLES: str UPDATE_ITEM: str DELETE_ITEM: str GET_ITEM: str PUT_ITEM: str QUERY: str SCAN: str GLOBAL_SECONDARY_INDEX_UPDATES: str RETURN_ITEM_COLL_METRICS: str EXCLUSIVE_START_TABLE_NAME: str RETURN_CONSUMED_CAPACITY: str COMPARISON_OPERATOR: str SCAN_INDEX_FORWARD: str ATTR_DEFINITIONS: str ATTR_VALUE_LIST: str TABLE_DESCRIPTION: str UNPROCESSED_KEYS: str UNPROCESSED_ITEMS: str CONSISTENT_READ: str DELETE_REQUEST: str RETURN_VALUES: str REQUEST_ITEMS: str ATTRS_TO_GET: str ATTR_UPDATES: str TABLE_STATUS: str SCAN_FILTER: str TABLE_NAME: str KEY_SCHEMA: str ATTR_NAME: str ATTR_TYPE: str ITEM_COUNT: str CAMEL_COUNT: str PUT_REQUEST: str INDEX_NAME: str ATTRIBUTES: str TABLE_KEY: str RESPONSES: str RANGE_KEY: str KEY_TYPE: str ACTION: str UPDATE: str EXISTS: str SELECT: str ACTIVE: str LIMIT: str ITEMS: str ITEM: str KEYS: str UTC: str KEY: str DEFAULT_ENCODING: str DEFAULT_REGION: str DATETIME_FORMAT: str SERVICE_NAME: str HTTP_OK: int HTTP_BAD_REQUEST: int PROVISIONED_THROUGHPUT: str READ_CAPACITY_UNITS: str WRITE_CAPACITY_UNITS: str STRING_SHORT: str STRING_SET_SHORT: str NUMBER_SHORT: str NUMBER_SET_SHORT: str BINARY_SHORT: str BINARY_SET_SHORT: str MAP_SHORT: str LIST_SHORT: str BOOLEAN: str BOOLEAN_SHORT: str STRING: str STRING_SET: str NUMBER: str NUMBER_SET: str BINARY: str BINARY_SET: str MAP: str LIST: str SHORT_ATTR_TYPES: Any ATTR_TYPE_MAP: Any LOCAL_SECONDARY_INDEX: str LOCAL_SECONDARY_INDEXES: str GLOBAL_SECONDARY_INDEX: str GLOBAL_SECONDARY_INDEXES: str PROJECTION: str PROJECTION_TYPE: str NON_KEY_ATTRIBUTES: str KEYS_ONLY: str ALL: str INCLUDE: str STREAM_VIEW_TYPE: str STREAM_SPECIFICATION: str STREAM_ENABLED: str STREAM_NEW_IMAGE: str STREAM_OLD_IMAGE: str STREAM_NEW_AND_OLD_IMAGE: str STREAM_KEYS_ONLY: str EXCLUSIVE_START_KEY: str LAST_EVALUATED_KEY: str QUERY_FILTER: str BEGINS_WITH: str BETWEEN: str EQ: str NE: str LE: str LT: str GE: str GT: str IN: str KEY_CONDITIONS: str COMPARISON_OPERATOR_VALUES: Any QUERY_OPERATOR_MAP: Any NOT_NULL: str NULL: str CONTAINS: str NOT_CONTAINS: str ALL_ATTRIBUTES: str ALL_PROJECTED_ATTRIBUTES: str SPECIFIC_ATTRIBUTES: str COUNT: str SELECT_VALUES: Any SCAN_OPERATOR_MAP: Any QUERY_FILTER_OPERATOR_MAP: Any DELETE_FILTER_OPERATOR_MAP: Any UPDATE_FILTER_OPERATOR_MAP: Any PUT_FILTER_OPERATOR_MAP: Any SEGMENT: str TOTAL_SEGMENTS: str SCAN_FILTER_VALUES: Any QUERY_FILTER_VALUES: Any DELETE_FILTER_VALUES: Any VALUE: str EXPECTED: str CONSUMED_CAPACITY: str CAPACITY_UNITS: str INDEXES: str TOTAL: str NONE: str RETURN_CONSUMED_CAPACITY_VALUES: Any SIZE: str RETURN_ITEM_COLL_METRICS_VALUES: Any ALL_OLD: str UPDATED_OLD: str ALL_NEW: str UPDATED_NEW: str RETURN_VALUES_VALUES: Any PUT: str DELETE: str ADD: str ATTR_UPDATE_ACTIONS: Any BATCH_GET_PAGE_LIMIT: int BATCH_WRITE_PAGE_LIMIT: int META_CLASS_NAME: str REGION: str HOST: str CONDITIONAL_OPERATOR: str AND: str OR: str CONDITIONAL_OPERATORS: Any mypy-0.560/typeshed/third_party/2and3/pynamodb/exceptions.pyi0000644€tŠÔÚ€2›s®0000000165413215007212030475 0ustar jukkaDROPBOX\Domain Users00000000000000from typing import Any, Optional, Text class PynamoDBException(Exception): msg: str cause: Any def __init__(self, msg: Optional[Text] = ..., cause: Optional[Exception] = ...) -> None: ... class PynamoDBConnectionError(PynamoDBException): pass class DeleteError(PynamoDBConnectionError): pass class QueryError(PynamoDBConnectionError): pass class ScanError(PynamoDBConnectionError): pass class PutError(PynamoDBConnectionError): pass class UpdateError(PynamoDBConnectionError): pass class GetError(PynamoDBConnectionError): pass class TableError(PynamoDBConnectionError): pass class DoesNotExist(PynamoDBException): pass class TableDoesNotExist(PynamoDBException): def __init__(self, table_name) -> None: ... class VerboseClientError(Exception): MSG_TEMPLATE: Any def __init__(self, error_response, operation_name, verbose_properties: Optional[Any] = ...) -> None: ... mypy-0.560/typeshed/third_party/2and3/pynamodb/indexes.pyi0000644€tŠÔÚ€2›s®0000000172513215007212027752 0ustar jukkaDROPBOX\Domain Users00000000000000from typing import Any, Optional class IndexMeta(type): def __init__(cls, name, bases, attrs) -> None: ... class Index(metaclass=IndexMeta): Meta: Any def __init__(self) -> None: ... @classmethod def count(cls, hash_key, consistent_read: bool = ..., **filters) -> int: ... @classmethod def query(self, hash_key, scan_index_forward: Optional[Any] = ..., consistent_read: bool = ..., limit: Optional[Any] = ..., last_evaluated_key: Optional[Any] = ..., attributes_to_get: Optional[Any] = ..., **filters): ... class GlobalSecondaryIndex(Index): ... class LocalSecondaryIndex(Index): ... class Projection(object): projection_type: Any non_key_attributes: Any class KeysOnlyProjection(Projection): projection_type: Any class IncludeProjection(Projection): projection_type: Any non_key_attributes: Any def __init__(self, non_attr_keys: Optional[Any] = ...) -> None: ... class AllProjection(Projection): projection_type: Any mypy-0.560/typeshed/third_party/2and3/pynamodb/models.pyi0000644€tŠÔÚ€2›s®0000001102613215007212027571 0ustar jukkaDROPBOX\Domain Users00000000000000from .exceptions import DoesNotExist from typing import Any, Dict, Generic, Iterable, Iterator, List, Optional, Sequence, Tuple, Type, TypeVar, Text, Union log: Any class DefaultMeta: ... class ResultSet(Iterable): results: Any operation: Any arguments: Any def __init__(self, results, operation, arguments) -> None: ... def __iter__(self): ... class MetaModel(type): def __init__(self, name: Text, bases: Tuple[type, ...], attrs: Dict[Any, Any]) -> None: ... _T = TypeVar('_T', bound='Model') KeyType = Union[Text, bytes, float, int, Tuple] class Model(metaclass=MetaModel): DoesNotExist = DoesNotExist attribute_values: Dict[Text, Any] def __init__(self, hash_key: Optional[KeyType] = ..., range_key: Optional[Any] = ..., **attrs) -> None: ... @classmethod def has_map_or_list_attributes(cls: Type[_T]) -> bool: ... @classmethod def batch_get(cls: Type[_T], items: Iterable[Union[KeyType, Iterable[KeyType]]], consistent_read: Optional[bool] = ..., attributes_to_get: Optional[Sequence[Text]] = ...) -> Iterator[_T]: ... @classmethod def batch_write(cls: Type[_T], auto_commit: bool = ...) -> BatchWrite[_T]: ... def delete(self, conditional_operator: Optional[Text] = ..., **expected_values) -> Any: ... def update(self, attributes: Dict[Text, Dict[Text, Any]], conditional_operator: Optional[Text] = ..., **expected_values) -> Any: ... def update_item(self, attribute: Text, value: Optional[Any] = ..., action: Optional[Text] = ..., conditional_operator: Optional[Text] = ..., **expected_values): ... def save(self, conditional_operator: Optional[Text] = ..., **expected_values) -> Dict[str, Any]: ... def refresh(self, consistent_read: bool = ...): ... @classmethod def get(cls: Type[_T], hash_key: KeyType, range_key: Optional[KeyType] = ..., consistent_read: bool = ...) -> _T: ... @classmethod def from_raw_data(cls: Type[_T], data) -> _T: ... @classmethod def count(cls: Type[_T], hash_key: Optional[KeyType] = ..., consistent_read: bool = ..., index_name: Optional[Text] = ..., limit: Optional[int] = ..., **filters) -> int: ... @classmethod def query(cls: Type[_T], hash_key: KeyType, consistent_read: bool = ..., index_name: Optional[Text] = ..., scan_index_forward: Optional[Any] = ..., conditional_operator: Optional[Text] = ..., limit: Optional[int] = ..., last_evaluated_key: Optional[Any] = ..., attributes_to_get: Optional[Iterable[Text]] = ..., page_size: Optional[int] = ..., **filters) -> Iterator[_T]: ... @classmethod def rate_limited_scan(cls: Type[_T], attributes_to_get: Optional[Sequence[Text]], segment: Optional[int] = ..., total_segments: Optional[int] = ..., limit: Optional[int] = ..., conditional_operator: Optional[Text] = ..., last_evaluated_key: Optional[Any] = ..., page_size: Optional[int] = ..., timeout_seconds: Optional[int] = ..., read_capacity_to_consume_per_second: int = ..., max_sleep_between_retry: int = ..., max_consecutive_exceptions: int = ..., **filters: Any): ... @classmethod def scan(cls: Type[_T], segment: Optional[int] = ..., total_segments: Optional[int] = ..., limit: Optional[int] = ..., conditional_operator: Optional[Text] = ..., last_evaluated_key: Optional[Any] = ..., page_size: Optional[int] = ..., **filters) -> Iterator[_T]: ... @classmethod def exists(cls: Type[_T]) -> bool: ... @classmethod def delete_table(cls): ... @classmethod def describe_table(cls): ... @classmethod def create_table(cls: Type[_T], wait: bool = ..., read_capacity_units: Optional[Any] = ..., write_capacity_units: Optional[Any] = ...): ... @classmethod def dumps(cls): ... @classmethod def dump(cls, filename): ... @classmethod def loads(cls, data): ... @classmethod def load(cls, filename): ... @classmethod def add_throttle_record(cls, records): ... @classmethod def get_throttle(cls): ... @classmethod def _get_attributes(cls) -> Dict[str, Any]: ... class ModelContextManager(Generic[_T]): model: Type[_T] auto_commit: bool max_operations: int pending_operations: List[Dict[Text, Any]] def __init__(self, model: Type[_T], auto_commit: bool = ...) -> None: ... def __enter__(self) -> ModelContextManager[_T]: ... class BatchWrite(Generic[_T], ModelContextManager[_T]): def save(self, put_item: _T) -> None: ... def delete(self, del_item: _T) -> None: ... def __enter__(self) -> BatchWrite[_T]: ... def __exit__(self, exc_type, exc_val, exc_tb) -> None: ... pending_operations: Any def commit(self) -> None: ... mypy-0.560/typeshed/third_party/2and3/pynamodb/settings.pyi0000644€tŠÔÚ€2›s®0000000022113215007212030141 0ustar jukkaDROPBOX\Domain Users00000000000000from typing import Any log: Any default_settings_dict: Any OVERRIDE_SETTINGS_PATH: Any override_settings: Any def get_settings_value(key): ... mypy-0.560/typeshed/third_party/2and3/pynamodb/throttle.pyi0000644€tŠÔÚ€2›s®0000000073013215007212030153 0ustar jukkaDROPBOX\Domain Users00000000000000from typing import Any, Optional log: Any class ThrottleBase: capacity: Any window: Any records: Any sleep_interval: Any def __init__(self, capacity, window: int = ..., initial_sleep: Optional[Any] = ...) -> None: ... def add_record(self, record): ... def throttle(self): ... class NoThrottle(ThrottleBase): def __init__(self) -> None: ... def add_record(self, record): ... class Throttle(ThrottleBase): def throttle(self): ... mypy-0.560/typeshed/third_party/2and3/pynamodb/types.pyi0000644€tŠÔÚ€2›s®0000000007113215007212027450 0ustar jukkaDROPBOX\Domain Users00000000000000STRING: str NUMBER: str BINARY: str HASH: str RANGE: str mypy-0.560/typeshed/third_party/2and3/pytz/0000755€tŠÔÚ€2›s®0000000000013215007244024765 5ustar jukkaDROPBOX\Domain Users00000000000000mypy-0.560/typeshed/third_party/2and3/pytz/__init__.pyi0000644€tŠÔÚ€2›s®0000000262113215007212027243 0ustar jukkaDROPBOX\Domain Users00000000000000# Stubs for pytz (Python 3.5) import datetime from typing import Optional, List, Set, Dict, Union all_timezones = ... # type: List all_timezones_set = ... # type: Set common_timezones = ... # type: List common_timezones_set = ... # type: Set country_timezones = ... # type: Dict country_names = ... # type: Dict class _UTCclass(datetime.tzinfo): zone = ... # type: str def fromutc(self, dt: datetime.datetime) -> datetime.datetime: ... def utcoffset(self, dt: Optional[datetime.datetime]) -> datetime.timedelta: ... def tzname(self, dt: Optional[datetime.datetime]) -> str: ... def dst(self, dt: Optional[datetime.datetime]) -> datetime.timedelta: ... def localize(self, dt: datetime.datetime, is_dst: bool = ...) -> datetime.datetime: ... def normalize(self, dt: datetime.datetime, is_dst: bool = ...) -> datetime.datetime: ... utc = ... # type: _UTCclass UTC = ... # type: _UTCclass class _BaseTzInfo(datetime.tzinfo): zone = ... # type: str def fromutc(self, dt: datetime.datetime) -> datetime.datetime: ... def localize(self, dt: datetime.datetime, is_dst: Optional[bool] = ...) -> datetime.datetime: ... def normalize(self, dt: datetime.datetime) -> datetime.datetime: ... class _StaticTzInfo(_BaseTzInfo): def normalize(self, dt: datetime.datetime, is_dst: Optional[bool] = ...) -> datetime.datetime: ... def timezone(zone: str) -> _BaseTzInfo: ... mypy-0.560/typeshed/third_party/2and3/pytz/lazy.pyi0000644€tŠÔÚ€2›s®0000000055413215007212026466 0ustar jukkaDROPBOX\Domain Users00000000000000from typing import Iterator, List, Set, TypeVar from collections import Mapping _T = TypeVar('_T') _KT = TypeVar('_KT') _VT = TypeVar('_VT') class LazyDict(Mapping[_KT, _VT]): def __getitem__(self, key: _KT) -> _VT: ... def __iter__(self) -> Iterator[_KT]: ... def __len__(self) -> int: ... class LazyList(List[_T]): ... class LazySet(Set[_T]): ... mypy-0.560/typeshed/third_party/2and3/requests/0000755€tŠÔÚ€2›s®0000000000013215007244025632 5ustar jukkaDROPBOX\Domain Users00000000000000mypy-0.560/typeshed/third_party/2and3/requests/__init__.pyi0000644€tŠÔÚ€2›s®0000000176213215007212030115 0ustar jukkaDROPBOX\Domain Users00000000000000# Stubs for requests (based on version 2.6.0, Python 3) from typing import Any from . import models from . import api from . import sessions from . import status_codes from . import exceptions from . import packages import logging __title__ = ... # type: Any __build__ = ... # type: Any __license__ = ... # type: Any __copyright__ = ... # type: Any __version__ = ... # type: Any Request = models.Request Response = models.Response PreparedRequest = models.PreparedRequest request = api.request get = api.get head = api.head post = api.post patch = api.patch put = api.put delete = api.delete options = api.options session = sessions.session Session = sessions.Session codes = status_codes.codes RequestException = exceptions.RequestException Timeout = exceptions.Timeout URLRequired = exceptions.URLRequired TooManyRedirects = exceptions.TooManyRedirects HTTPError = exceptions.HTTPError ConnectionError = exceptions.ConnectionError class NullHandler(logging.Handler): def emit(self, record): ... mypy-0.560/typeshed/third_party/2and3/requests/adapters.pyi0000644€tŠÔÚ€2›s®0000000560513215007212030161 0ustar jukkaDROPBOX\Domain Users00000000000000# Stubs for requests.adapters (Python 3) from typing import Any, Container, Union, Text, Tuple from . import models from .packages.urllib3 import poolmanager from .packages.urllib3 import response from .packages.urllib3.util import retry from . import compat from . import utils from . import structures from .packages.urllib3 import exceptions as urllib3_exceptions from . import cookies from . import exceptions from . import auth PreparedRequest = models.PreparedRequest Response = models.Response PoolManager = poolmanager.PoolManager proxy_from_url = poolmanager.proxy_from_url HTTPResponse = response.HTTPResponse Retry = retry.Retry DEFAULT_CA_BUNDLE_PATH = utils.DEFAULT_CA_BUNDLE_PATH get_encoding_from_headers = utils.get_encoding_from_headers prepend_scheme_if_needed = utils.prepend_scheme_if_needed get_auth_from_url = utils.get_auth_from_url urldefragauth = utils.urldefragauth CaseInsensitiveDict = structures.CaseInsensitiveDict ConnectTimeoutError = urllib3_exceptions.ConnectTimeoutError MaxRetryError = urllib3_exceptions.MaxRetryError ProtocolError = urllib3_exceptions.ProtocolError ReadTimeoutError = urllib3_exceptions.ReadTimeoutError ResponseError = urllib3_exceptions.ResponseError extract_cookies_to_jar = cookies.extract_cookies_to_jar ConnectionError = exceptions.ConnectionError ConnectTimeout = exceptions.ConnectTimeout ReadTimeout = exceptions.ReadTimeout SSLError = exceptions.SSLError ProxyError = exceptions.ProxyError RetryError = exceptions.RetryError DEFAULT_POOLBLOCK = ... # type: Any DEFAULT_POOLSIZE = ... # type: Any DEFAULT_RETRIES = ... # type: Any class BaseAdapter: def __init__(self) -> None: ... def send(self, request: PreparedRequest, stream: bool = ..., timeout: Union[None, float, Tuple[float, float]] = ..., verify: bool = ..., cert: Union[None, Union[bytes, Text], Container[Union[bytes, Text]]] = ... ) -> Response: ... def close(self) -> None: ... class HTTPAdapter(BaseAdapter): __attrs__ = ... # type: Any max_retries = ... # type: Any config = ... # type: Any proxy_manager = ... # type: Any def __init__(self, pool_connections=..., pool_maxsize=..., max_retries=..., pool_block=...) -> None: ... poolmanager = ... # type: Any def init_poolmanager(self, connections, maxsize, block=..., **pool_kwargs): ... def proxy_manager_for(self, proxy, **proxy_kwargs): ... def cert_verify(self, conn, url, verify, cert): ... def build_response(self, req, resp): ... def get_connection(self, url, proxies=...): ... def close(self): ... def request_url(self, request, proxies): ... def add_headers(self, request, **kwargs): ... def proxy_headers(self, proxy): ... # TODO: "request" is not actually optional, modified to please mypy. def send(self, request=..., stream=..., timeout=..., verify=..., cert=..., proxies=...): ... mypy-0.560/typeshed/third_party/2and3/requests/api.pyi0000644€tŠÔÚ€2›s®0000000255513215007212027130 0ustar jukkaDROPBOX\Domain Users00000000000000# Stubs for requests.api (Python 3) from typing import Optional, Union, Any, Iterable, Mapping, MutableMapping, Tuple, IO, Text from .models import Response _ParamsMappingValueType = Union[Text, bytes, int, float, Iterable[Union[Text, bytes, int, float]]] _Data = Union[None, bytes, MutableMapping[Text, Text], Iterable[Tuple[Text, Text]], IO] def request(method: str, url: str, **kwargs) -> Response: ... def get(url: Union[Text, bytes], params: Optional[ Union[Mapping[Union[Text, bytes, int, float], _ParamsMappingValueType], Union[Text, bytes], Tuple[Union[Text, bytes, int, float], _ParamsMappingValueType], Mapping[Text, _ParamsMappingValueType], Mapping[bytes, _ParamsMappingValueType], Mapping[int, _ParamsMappingValueType], Mapping[float, _ParamsMappingValueType]]] = ..., **kwargs) -> Response: ... def options(url: Union[str, Text], **kwargs) -> Response: ... def head(url: Union[str, Text], **kwargs) -> Response: ... def post(url: Union[str, Text], data: _Data=..., json=..., **kwargs) -> Response: ... def put(url: Union[str, Text], data: _Data=..., json=..., **kwargs) -> Response: ... def patch(url: Union[str, Text], data: _Data=..., json=..., **kwargs) -> Response: ... def delete(url: Union[str, Text], **kwargs) -> Response: ... mypy-0.560/typeshed/third_party/2and3/requests/auth.pyi0000644€tŠÔÚ€2›s®0000000243213215007212027312 0ustar jukkaDROPBOX\Domain Users00000000000000# Stubs for requests.auth (Python 3) from typing import Any, Text, Union from . import compat from . import cookies from . import utils from . import status_codes extract_cookies_to_jar = cookies.extract_cookies_to_jar parse_dict_header = utils.parse_dict_header to_native_string = utils.to_native_string codes = status_codes.codes CONTENT_TYPE_FORM_URLENCODED = ... # type: Any CONTENT_TYPE_MULTI_PART = ... # type: Any def _basic_auth_str(username: Union[bytes, Text], password: Union[bytes, Text]) -> str: ... class AuthBase: def __call__(self, r): ... class HTTPBasicAuth(AuthBase): username = ... # type: Any password = ... # type: Any def __init__(self, username, password) -> None: ... def __call__(self, r): ... class HTTPProxyAuth(HTTPBasicAuth): def __call__(self, r): ... class HTTPDigestAuth(AuthBase): username = ... # type: Any password = ... # type: Any last_nonce = ... # type: Any nonce_count = ... # type: Any chal = ... # type: Any pos = ... # type: Any num_401_calls = ... # type: Any def __init__(self, username, password) -> None: ... def build_digest_header(self, method, url): ... def handle_redirect(self, r, **kwargs): ... def handle_401(self, r, **kwargs): ... def __call__(self, r): ... mypy-0.560/typeshed/third_party/2and3/requests/compat.pyi0000644€tŠÔÚ€2›s®0000000017313215007212027634 0ustar jukkaDROPBOX\Domain Users00000000000000# Stubs for requests.compat (Python 3.4) from typing import Any import collections OrderedDict = collections.OrderedDict mypy-0.560/typeshed/third_party/2and3/requests/cookies.pyi0000644€tŠÔÚ€2›s®0000000405013215007212030003 0ustar jukkaDROPBOX\Domain Users00000000000000# Stubs for requests.cookies (Python 3) import sys from typing import Any, MutableMapping import collections from . import compat if sys.version_info < (3, 0): from cookielib import CookieJar else: from http.cookiejar import CookieJar class MockRequest: type = ... # type: Any def __init__(self, request) -> None: ... def get_type(self): ... def get_host(self): ... def get_origin_req_host(self): ... def get_full_url(self): ... def is_unverifiable(self): ... def has_header(self, name): ... def get_header(self, name, default=...): ... def add_header(self, key, val): ... def add_unredirected_header(self, name, value): ... def get_new_headers(self): ... @property def unverifiable(self): ... @property def origin_req_host(self): ... @property def host(self): ... class MockResponse: def __init__(self, headers) -> None: ... def info(self): ... def getheaders(self, name): ... def extract_cookies_to_jar(jar, request, response): ... def get_cookie_header(jar, request): ... def remove_cookie_by_name(cookiejar, name, domain=..., path=...): ... class CookieConflictError(RuntimeError): ... class RequestsCookieJar(CookieJar, MutableMapping): def get(self, name, default=..., domain=..., path=...): ... def set(self, name, value, **kwargs): ... def iterkeys(self): ... def keys(self): ... def itervalues(self): ... def values(self): ... def iteritems(self): ... def items(self): ... def list_domains(self): ... def list_paths(self): ... def multiple_domains(self): ... def get_dict(self, domain=..., path=...): ... def __getitem__(self, name): ... def __setitem__(self, name, value): ... def __delitem__(self, name): ... def set_cookie(self, cookie, *args, **kwargs): ... def update(self, other): ... def copy(self): ... def create_cookie(name, value, **kwargs): ... def morsel_to_cookie(morsel): ... def cookiejar_from_dict(cookie_dict, cookiejar=..., overwrite=...): ... def merge_cookies(cookiejar, cookies): ... mypy-0.560/typeshed/third_party/2and3/requests/exceptions.pyi0000644€tŠÔÚ€2›s®0000000200713215007212030530 0ustar jukkaDROPBOX\Domain Users00000000000000# Stubs for requests.exceptions (Python 3) from typing import Any from .packages.urllib3.exceptions import HTTPError as BaseHTTPError class RequestException(IOError): response = ... # type: Any request = ... # type: Any def __init__(self, *args, **kwargs) -> None: ... class HTTPError(RequestException): ... class ConnectionError(RequestException): ... class ProxyError(ConnectionError): ... class SSLError(ConnectionError): ... class Timeout(RequestException): ... class ConnectTimeout(ConnectionError, Timeout): ... class ReadTimeout(Timeout): ... class URLRequired(RequestException): ... class TooManyRedirects(RequestException): ... class MissingSchema(RequestException, ValueError): ... class InvalidSchema(RequestException, ValueError): ... class InvalidURL(RequestException, ValueError): ... class ChunkedEncodingError(RequestException): ... class ContentDecodingError(RequestException, BaseHTTPError): ... class StreamConsumedError(RequestException, TypeError): ... class RetryError(RequestException): ... mypy-0.560/typeshed/third_party/2and3/requests/hooks.pyi0000644€tŠÔÚ€2›s®0000000025213215007212027472 0ustar jukkaDROPBOX\Domain Users00000000000000# Stubs for requests.hooks (Python 3) from typing import Any HOOKS = ... # type: Any def default_hooks(): ... def dispatch_hook(key, hooks, hook_data, **kwargs): ... mypy-0.560/typeshed/third_party/2and3/requests/models.pyi0000644€tŠÔÚ€2›s®0000001161213215007212027634 0ustar jukkaDROPBOX\Domain Users00000000000000# Stubs for requests.models (Python 3) from typing import Any, List, MutableMapping, Iterator, Dict, Text import datetime import types from . import hooks from . import structures from . import auth from . import cookies from .cookies import RequestsCookieJar from .packages.urllib3 import fields from .packages.urllib3 import filepost from .packages.urllib3 import util from .packages.urllib3 import exceptions as urllib3_exceptions from . import exceptions from . import utils from . import compat from . import status_codes from typing import Optional, Union default_hooks = hooks.default_hooks CaseInsensitiveDict = structures.CaseInsensitiveDict HTTPBasicAuth = auth.HTTPBasicAuth cookiejar_from_dict = cookies.cookiejar_from_dict get_cookie_header = cookies.get_cookie_header RequestField = fields.RequestField encode_multipart_formdata = filepost.encode_multipart_formdata parse_url = util.parse_url DecodeError = urllib3_exceptions.DecodeError ReadTimeoutError = urllib3_exceptions.ReadTimeoutError ProtocolError = urllib3_exceptions.ProtocolError LocationParseError = urllib3_exceptions.LocationParseError HTTPError = exceptions.HTTPError MissingSchema = exceptions.MissingSchema InvalidURL = exceptions.InvalidURL ChunkedEncodingError = exceptions.ChunkedEncodingError ContentDecodingError = exceptions.ContentDecodingError ConnectionError = exceptions.ConnectionError StreamConsumedError = exceptions.StreamConsumedError guess_filename = utils.guess_filename get_auth_from_url = utils.get_auth_from_url requote_uri = utils.requote_uri stream_decode_response_unicode = utils.stream_decode_response_unicode to_key_val_list = utils.to_key_val_list parse_header_links = utils.parse_header_links iter_slices = utils.iter_slices guess_json_utf = utils.guess_json_utf super_len = utils.super_len to_native_string = utils.to_native_string codes = status_codes.codes REDIRECT_STATI = ... # type: Any DEFAULT_REDIRECT_LIMIT = ... # type: Any CONTENT_CHUNK_SIZE = ... # type: Any ITER_CHUNK_SIZE = ... # type: Any json_dumps = ... # type: Any class RequestEncodingMixin: @property def path_url(self): ... class RequestHooksMixin: def register_hook(self, event, hook): ... def deregister_hook(self, event, hook): ... class Request(RequestHooksMixin): hooks = ... # type: Any method = ... # type: Any url = ... # type: Any headers = ... # type: Any files = ... # type: Any data = ... # type: Any json = ... # type: Any params = ... # type: Any auth = ... # type: Any cookies = ... # type: Any def __init__(self, method=..., url=..., headers=..., files=..., data=..., params=..., auth=..., cookies=..., hooks=..., json=...) -> None: ... def prepare(self): ... class PreparedRequest(RequestEncodingMixin, RequestHooksMixin): method = ... # type: Optional[Union[str, Text]] url = ... # type: Optional[Union[str, Text]] headers = ... # type: CaseInsensitiveDict body = ... # type: Optional[Union[bytes, Text]] hooks = ... # type: Any def __init__(self) -> None: ... def prepare(self, method=..., url=..., headers=..., files=..., data=..., params=..., auth=..., cookies=..., hooks=..., json=...): ... def copy(self): ... def prepare_method(self, method): ... def prepare_url(self, url, params): ... def prepare_headers(self, headers): ... def prepare_body(self, data, files, json=...): ... def prepare_content_length(self, body): ... def prepare_auth(self, auth, url=...): ... def prepare_cookies(self, cookies): ... def prepare_hooks(self, hooks): ... class Response: __attrs__ = ... # type: Any status_code = ... # type: int headers = ... # type: MutableMapping[str, str] raw = ... # type: Any url = ... # type: str encoding = ... # type: str history = ... # type: List[Response] reason = ... # type: str cookies = ... # type: RequestsCookieJar elapsed = ... # type: datetime.timedelta request = ... # type: PreparedRequest def __init__(self) -> None: ... def __bool__(self) -> bool: ... def __nonzero__(self) -> bool: ... def __iter__(self) -> Iterator[bytes]: ... def __enter__(self) -> Response: ... def __exit__(self, *args: Any) -> None: ... @property def ok(self) -> bool: ... @property def is_redirect(self) -> bool: ... @property def is_permanent_redirect(self) -> bool: ... @property def apparent_encoding(self) -> str: ... def iter_content(self, chunk_size: int = ..., decode_unicode: bool = ...) -> Iterator[Any]: ... def iter_lines(self, chunk_size=..., decode_unicode=..., delimiter=...): ... @property def content(self) -> bytes: ... @property def text(self) -> str: ... def json(self, **kwargs) -> Any: ... @property def links(self) -> Dict[Any, Any]: ... def raise_for_status(self) -> None: ... def close(self) -> None: ... mypy-0.560/typeshed/third_party/2and3/requests/packages/0000755€tŠÔÚ€2›s®0000000000013215007244027410 5ustar jukkaDROPBOX\Domain Users00000000000000mypy-0.560/typeshed/third_party/2and3/requests/packages/__init__.pyi0000644€tŠÔÚ€2›s®0000000023613215007212031666 0ustar jukkaDROPBOX\Domain Users00000000000000class VendorAlias: def __init__(self, package_names) -> None: ... def find_module(self, fullname, path=...): ... def load_module(self, name): ... mypy-0.560/typeshed/third_party/2and3/requests/packages/urllib3/0000755€tŠÔÚ€2›s®0000000000013215007244030764 5ustar jukkaDROPBOX\Domain Users00000000000000mypy-0.560/typeshed/third_party/2and3/requests/packages/urllib3/__init__.pyi0000644€tŠÔÚ€2›s®0000000164013215007212033242 0ustar jukkaDROPBOX\Domain Users00000000000000from typing import Any from . import connectionpool from . import filepost from . import poolmanager from . import response from .util import request as _request from .util import url from .util import timeout from .util import retry import logging __license__ = ... # type: Any HTTPConnectionPool = connectionpool.HTTPConnectionPool HTTPSConnectionPool = connectionpool.HTTPSConnectionPool connection_from_url = connectionpool.connection_from_url encode_multipart_formdata = filepost.encode_multipart_formdata PoolManager = poolmanager.PoolManager ProxyManager = poolmanager.ProxyManager proxy_from_url = poolmanager.proxy_from_url HTTPResponse = response.HTTPResponse make_headers = _request.make_headers get_host = url.get_host Timeout = timeout.Timeout Retry = retry.Retry class NullHandler(logging.Handler): def emit(self, record): ... def add_stderr_logger(level=...): ... def disable_warnings(category=...): ... mypy-0.560/typeshed/third_party/2and3/requests/packages/urllib3/_collections.pyi0000644€tŠÔÚ€2›s®0000000275513215007212034170 0ustar jukkaDROPBOX\Domain Users00000000000000from typing import Any from collections import MutableMapping class RLock: def __enter__(self): ... def __exit__(self, exc_type, exc_value, traceback): ... class RecentlyUsedContainer(MutableMapping): ContainerCls = ... # type: Any dispose_func = ... # type: Any lock = ... # type: Any def __init__(self, maxsize=..., dispose_func=...) -> None: ... def __getitem__(self, key): ... def __setitem__(self, key, value): ... def __delitem__(self, key): ... def __len__(self): ... def __iter__(self): ... def clear(self): ... def keys(self): ... class HTTPHeaderDict(dict): def __init__(self, headers=..., **kwargs) -> None: ... def __setitem__(self, key, val): ... def __getitem__(self, key): ... def __delitem__(self, key): ... def __contains__(self, key): ... def __eq__(self, other): ... def __ne__(self, other): ... values = ... # type: Any get = ... # type: Any update = ... # type: Any iterkeys = ... # type: Any itervalues = ... # type: Any def pop(self, key, default=...): ... def discard(self, key): ... def add(self, key, val): ... def extend(*args, **kwargs): ... def getlist(self, key): ... getheaders = ... # type: Any getallmatchingheaders = ... # type: Any iget = ... # type: Any def copy(self): ... def iteritems(self): ... def itermerged(self): ... def items(self): ... @classmethod def from_httplib(cls, message, duplicates=...): ... mypy-0.560/typeshed/third_party/2and3/requests/packages/urllib3/connection.pyi0000644€tŠÔÚ€2›s®0000000445613215007212033652 0ustar jukkaDROPBOX\Domain Users00000000000000# Stubs for requests.packages.urllib3.connection (Python 3.4) import sys from typing import Any from . import packages import ssl from . import exceptions from .packages import ssl_match_hostname from .util import ssl_ from . import util if sys.version_info < (3, 0): from httplib import HTTPConnection as _HTTPConnection from httplib import HTTPException as HTTPException class ConnectionError(Exception): ... else: from http.client import HTTPConnection as _HTTPConnection from http.client import HTTPException as HTTPException from builtins import ConnectionError as ConnectionError class DummyConnection: ... BaseSSLError = ssl.SSLError ConnectTimeoutError = exceptions.ConnectTimeoutError SystemTimeWarning = exceptions.SystemTimeWarning SecurityWarning = exceptions.SecurityWarning match_hostname = ssl_match_hostname.match_hostname resolve_cert_reqs = ssl_.resolve_cert_reqs resolve_ssl_version = ssl_.resolve_ssl_version ssl_wrap_socket = ssl_.ssl_wrap_socket assert_fingerprint = ssl_.assert_fingerprint connection = util.connection port_by_scheme = ... # type: Any RECENT_DATE = ... # type: Any class HTTPConnection(_HTTPConnection): default_port = ... # type: Any default_socket_options = ... # type: Any is_verified = ... # type: Any source_address = ... # type: Any socket_options = ... # type: Any def __init__(self, *args, **kw) -> None: ... def connect(self): ... class HTTPSConnection(HTTPConnection): default_port = ... # type: Any key_file = ... # type: Any cert_file = ... # type: Any def __init__(self, host, port=..., key_file=..., cert_file=..., strict=..., timeout=..., **kw) -> None: ... sock = ... # type: Any def connect(self): ... class VerifiedHTTPSConnection(HTTPSConnection): cert_reqs = ... # type: Any ca_certs = ... # type: Any ssl_version = ... # type: Any assert_fingerprint = ... # type: Any key_file = ... # type: Any cert_file = ... # type: Any assert_hostname = ... # type: Any def set_cert(self, key_file=..., cert_file=..., cert_reqs=..., ca_certs=..., assert_hostname=..., assert_fingerprint=...): ... sock = ... # type: Any auto_open = ... # type: Any is_verified = ... # type: Any def connect(self): ... UnverifiedHTTPSConnection = HTTPSConnection mypy-0.560/typeshed/third_party/2and3/requests/packages/urllib3/connectionpool.pyi0000644€tŠÔÚ€2›s®0000000636013215007212034540 0ustar jukkaDROPBOX\Domain Users00000000000000from typing import Any from . import exceptions from .packages import ssl_match_hostname from . import packages from .connection import ( HTTPException as HTTPException, BaseSSLError as BaseSSLError, ConnectionError as ConnectionError, ) from . import request from . import response from . import connection from .util import connection as _connection from .util import retry from .util import timeout from .util import url ClosedPoolError = exceptions.ClosedPoolError ProtocolError = exceptions.ProtocolError EmptyPoolError = exceptions.EmptyPoolError HostChangedError = exceptions.HostChangedError LocationValueError = exceptions.LocationValueError MaxRetryError = exceptions.MaxRetryError ProxyError = exceptions.ProxyError ReadTimeoutError = exceptions.ReadTimeoutError SSLError = exceptions.SSLError TimeoutError = exceptions.TimeoutError InsecureRequestWarning = exceptions.InsecureRequestWarning CertificateError = ssl_match_hostname.CertificateError port_by_scheme = connection.port_by_scheme DummyConnection = connection.DummyConnection HTTPConnection = connection.HTTPConnection HTTPSConnection = connection.HTTPSConnection VerifiedHTTPSConnection = connection.VerifiedHTTPSConnection RequestMethods = request.RequestMethods HTTPResponse = response.HTTPResponse is_connection_dropped = _connection.is_connection_dropped Retry = retry.Retry Timeout = timeout.Timeout get_host = url.get_host xrange = ... # type: Any log = ... # type: Any class ConnectionPool: scheme = ... # type: Any QueueCls = ... # type: Any host = ... # type: Any port = ... # type: Any def __init__(self, host, port=...) -> None: ... def __enter__(self): ... def __exit__(self, exc_type, exc_val, exc_tb): ... def close(self): ... class HTTPConnectionPool(ConnectionPool, RequestMethods): scheme = ... # type: Any ConnectionCls = ... # type: Any strict = ... # type: Any timeout = ... # type: Any retries = ... # type: Any pool = ... # type: Any block = ... # type: Any proxy = ... # type: Any proxy_headers = ... # type: Any num_connections = ... # type: Any num_requests = ... # type: Any conn_kw = ... # type: Any def __init__(self, host, port=..., strict=..., timeout=..., maxsize=..., block=..., headers=..., retries=..., _proxy=..., _proxy_headers=..., **conn_kw) -> None: ... def close(self): ... def is_same_host(self, url): ... def urlopen(self, method, url, body=..., headers=..., retries=..., redirect=..., assert_same_host=..., timeout=..., pool_timeout=..., release_conn=..., **response_kw): ... class HTTPSConnectionPool(HTTPConnectionPool): scheme = ... # type: Any ConnectionCls = ... # type: Any key_file = ... # type: Any cert_file = ... # type: Any cert_reqs = ... # type: Any ca_certs = ... # type: Any ssl_version = ... # type: Any assert_hostname = ... # type: Any assert_fingerprint = ... # type: Any def __init__(self, host, port=..., strict=..., timeout=..., maxsize=..., block=..., headers=..., retries=..., _proxy=..., _proxy_headers=..., key_file=..., cert_file=..., cert_reqs=..., ca_certs=..., ssl_version=..., assert_hostname=..., assert_fingerprint=..., **conn_kw) -> None: ... def connection_from_url(url, **kw): ... mypy-0.560/typeshed/third_party/2and3/requests/packages/urllib3/contrib/0000755€tŠÔÚ€2›s®0000000000013215007244032424 5ustar jukkaDROPBOX\Domain Users00000000000000mypy-0.560/typeshed/third_party/2and3/requests/packages/urllib3/contrib/__init__.pyi0000644€tŠÔÚ€2›s®0000000000013215007212034667 0ustar jukkaDROPBOX\Domain Users00000000000000mypy-0.560/typeshed/third_party/2and3/requests/packages/urllib3/exceptions.pyi0000644€tŠÔÚ€2›s®0000000276513215007212033675 0ustar jukkaDROPBOX\Domain Users00000000000000from typing import Any class HTTPError(Exception): ... class HTTPWarning(Warning): ... class PoolError(HTTPError): pool = ... # type: Any def __init__(self, pool, message) -> None: ... def __reduce__(self): ... class RequestError(PoolError): url = ... # type: Any def __init__(self, pool, url, message) -> None: ... def __reduce__(self): ... class SSLError(HTTPError): ... class ProxyError(HTTPError): ... class DecodeError(HTTPError): ... class ProtocolError(HTTPError): ... ConnectionError = ... # type: Any class MaxRetryError(RequestError): reason = ... # type: Any def __init__(self, pool, url, reason=...) -> None: ... class HostChangedError(RequestError): retries = ... # type: Any def __init__(self, pool, url, retries=...) -> None: ... class TimeoutStateError(HTTPError): ... class TimeoutError(HTTPError): ... class ReadTimeoutError(TimeoutError, RequestError): ... class ConnectTimeoutError(TimeoutError): ... class EmptyPoolError(PoolError): ... class ClosedPoolError(PoolError): ... class LocationValueError(ValueError, HTTPError): ... class LocationParseError(LocationValueError): location = ... # type: Any def __init__(self, location) -> None: ... class ResponseError(HTTPError): GENERIC_ERROR = ... # type: Any SPECIFIC_ERROR = ... # type: Any class SecurityWarning(HTTPWarning): ... class InsecureRequestWarning(SecurityWarning): ... class SystemTimeWarning(SecurityWarning): ... class InsecurePlatformWarning(SecurityWarning): ... mypy-0.560/typeshed/third_party/2and3/requests/packages/urllib3/fields.pyi0000644€tŠÔÚ€2›s®0000000105013215007212032744 0ustar jukkaDROPBOX\Domain Users00000000000000# Stubs for requests.packages.urllib3.fields (Python 3.4) from typing import Any from . import packages def guess_content_type(filename, default=...): ... def format_header_param(name, value): ... class RequestField: data = ... # type: Any headers = ... # type: Any def __init__(self, name, data, filename=..., headers=...) -> None: ... @classmethod def from_tuples(cls, fieldname, value): ... def render_headers(self): ... def make_multipart(self, content_disposition=..., content_type=..., content_location=...): ... mypy-0.560/typeshed/third_party/2and3/requests/packages/urllib3/filepost.pyi0000644€tŠÔÚ€2›s®0000000052513215007212033331 0ustar jukkaDROPBOX\Domain Users00000000000000from typing import Any from . import packages # from .packages import six from . import fields # six = packages.six # b = six.b RequestField = fields.RequestField writer = ... # type: Any def choose_boundary(): ... def iter_field_objects(fields): ... def iter_fields(fields): ... def encode_multipart_formdata(fields, boundary=...): ... mypy-0.560/typeshed/third_party/2and3/requests/packages/urllib3/packages/0000755€tŠÔÚ€2›s®0000000000013215007244032542 5ustar jukkaDROPBOX\Domain Users00000000000000mypy-0.560/typeshed/third_party/2and3/requests/packages/urllib3/packages/__init__.pyi0000644€tŠÔÚ€2›s®0000000000013215007212035005 0ustar jukkaDROPBOX\Domain Users00000000000000mypy-0.560/typeshed/third_party/2and3/requests/packages/urllib3/packages/ssl_match_hostname/0000755€tŠÔÚ€2›s®0000000000013215007244036415 5ustar jukkaDROPBOX\Domain Users00000000000000././@LongLink0000000000000000000000000000015100000000000011212 Lustar 00000000000000mypy-0.560/typeshed/third_party/2and3/requests/packages/urllib3/packages/ssl_match_hostname/__init__.pyimypy-0.560/typeshed/third_party/2and3/requests/packages/urllib3/packages/ssl_match_hostname/__init__0000644€tŠÔÚ€2›s®0000000013013215007212040064 0ustar jukkaDROPBOX\Domain Users00000000000000import ssl CertificateError = ssl.CertificateError match_hostname = ssl.match_hostname ././@LongLink0000000000000000000000000000016000000000000011212 Lustar 00000000000000mypy-0.560/typeshed/third_party/2and3/requests/packages/urllib3/packages/ssl_match_hostname/_implementation.pyimypy-0.560/typeshed/third_party/2and3/requests/packages/urllib3/packages/ssl_match_hostname/_impleme0000644€tŠÔÚ€2›s®0000000012113215007212040114 0ustar jukkaDROPBOX\Domain Users00000000000000class CertificateError(ValueError): ... def match_hostname(cert, hostname): ... mypy-0.560/typeshed/third_party/2and3/requests/packages/urllib3/poolmanager.pyi0000644€tŠÔÚ€2›s®0000000254213215007212034011 0ustar jukkaDROPBOX\Domain Users00000000000000from typing import Any from .request import RequestMethods class PoolManager(RequestMethods): proxy = ... # type: Any connection_pool_kw = ... # type: Any pools = ... # type: Any def __init__(self, num_pools=..., headers=..., **connection_pool_kw) -> None: ... def __enter__(self): ... def __exit__(self, exc_type, exc_val, exc_tb): ... def clear(self): ... def connection_from_host(self, host, port=..., scheme=...): ... def connection_from_url(self, url): ... # TODO: This was the original signature -- copied another one from base class to fix complaint. # def urlopen(self, method, url, redirect=True, **kw): ... def urlopen(self, method, url, body=..., headers=..., encode_multipart=..., multipart_boundary=..., **kw): ... class ProxyManager(PoolManager): proxy = ... # type: Any proxy_headers = ... # type: Any def __init__(self, proxy_url, num_pools=..., headers=..., proxy_headers=..., **connection_pool_kw) -> None: ... def connection_from_host(self, host, port=..., scheme=...): ... # TODO: This was the original signature -- copied another one from base class to fix complaint. # def urlopen(self, method, url, redirect=True, **kw): ... def urlopen(self, method, url, body=..., headers=..., encode_multipart=..., multipart_boundary=..., **kw): ... def proxy_from_url(url, **kw): ... mypy-0.560/typeshed/third_party/2and3/requests/packages/urllib3/request.pyi0000644€tŠÔÚ€2›s®0000000102613215007212033171 0ustar jukkaDROPBOX\Domain Users00000000000000from typing import Any class RequestMethods: headers = ... # type: Any def __init__(self, headers=...) -> None: ... def urlopen(self, method, url, body=..., headers=..., encode_multipart=..., multipart_boundary=..., **kw): ... def request(self, method, url, fields=..., headers=..., **urlopen_kw): ... def request_encode_url(self, method, url, fields=..., **urlopen_kw): ... def request_encode_body(self, method, url, fields=..., headers=..., encode_multipart=..., multipart_boundary=..., **urlopen_kw): ... mypy-0.560/typeshed/third_party/2and3/requests/packages/urllib3/response.pyi0000644€tŠÔÚ€2›s®0000000342113215007212033340 0ustar jukkaDROPBOX\Domain Users00000000000000from typing import Any, IO from . import _collections from . import exceptions from .connection import HTTPException as HTTPException, BaseSSLError as BaseSSLError from .util import response HTTPHeaderDict = _collections.HTTPHeaderDict ProtocolError = exceptions.ProtocolError DecodeError = exceptions.DecodeError ReadTimeoutError = exceptions.ReadTimeoutError binary_type = bytes # six.binary_type PY3 = True # six.PY3 is_fp_closed = response.is_fp_closed class DeflateDecoder: def __init__(self) -> None: ... def __getattr__(self, name): ... def decompress(self, data): ... class GzipDecoder: def __init__(self) -> None: ... def __getattr__(self, name): ... def decompress(self, data): ... class HTTPResponse(IO[Any]): CONTENT_DECODERS = ... # type: Any REDIRECT_STATUSES = ... # type: Any headers = ... # type: Any status = ... # type: Any version = ... # type: Any reason = ... # type: Any strict = ... # type: Any decode_content = ... # type: Any def __init__(self, body=..., headers=..., status=..., version=..., reason=..., strict=..., preload_content=..., decode_content=..., original_response=..., pool=..., connection=...) -> None: ... def get_redirect_location(self): ... def release_conn(self): ... @property def data(self): ... def tell(self): ... def read(self, amt=..., decode_content=..., cache_content=...): ... def stream(self, amt=..., decode_content=...): ... @classmethod def from_httplib(ResponseCls, r, **response_kw): ... def getheaders(self): ... def getheader(self, name, default=...): ... def close(self): ... @property def closed(self): ... def fileno(self): ... def flush(self): ... def readable(self): ... def readinto(self, b): ... mypy-0.560/typeshed/third_party/2and3/requests/packages/urllib3/util/0000755€tŠÔÚ€2›s®0000000000013215007244031741 5ustar jukkaDROPBOX\Domain Users00000000000000mypy-0.560/typeshed/third_party/2and3/requests/packages/urllib3/util/__init__.pyi0000644€tŠÔÚ€2›s®0000000126413215007212034221 0ustar jukkaDROPBOX\Domain Users00000000000000from . import connection from . import request from . import response from . import ssl_ from . import timeout from . import retry from . import url import ssl is_connection_dropped = connection.is_connection_dropped make_headers = request.make_headers is_fp_closed = response.is_fp_closed SSLContext = ssl.SSLContext HAS_SNI = ssl_.HAS_SNI assert_fingerprint = ssl_.assert_fingerprint resolve_cert_reqs = ssl_.resolve_cert_reqs resolve_ssl_version = ssl_.resolve_ssl_version ssl_wrap_socket = ssl_.ssl_wrap_socket current_time = timeout.current_time Timeout = timeout.Timeout Retry = retry.Retry get_host = url.get_host parse_url = url.parse_url split_first = url.split_first Url = url.Url mypy-0.560/typeshed/third_party/2and3/requests/packages/urllib3/util/connection.pyi0000644€tŠÔÚ€2›s®0000000034613215007212034621 0ustar jukkaDROPBOX\Domain Users00000000000000from typing import Any poll = ... # type: Any select = ... # type: Any HAS_IPV6 = ... # type: bool def is_connection_dropped(conn): ... def create_connection(address, timeout=..., source_address=..., socket_options=...): ... mypy-0.560/typeshed/third_party/2and3/requests/packages/urllib3/util/request.pyi0000644€tŠÔÚ€2›s®0000000035213215007212034147 0ustar jukkaDROPBOX\Domain Users00000000000000from typing import Any # from ..packages import six # b = six.b ACCEPT_ENCODING = ... # type: Any def make_headers(keep_alive=..., accept_encoding=..., user_agent=..., basic_auth=..., proxy_basic_auth=..., disable_cache=...): ... mypy-0.560/typeshed/third_party/2and3/requests/packages/urllib3/util/response.pyi0000644€tŠÔÚ€2›s®0000000003313215007212034311 0ustar jukkaDROPBOX\Domain Users00000000000000def is_fp_closed(obj): ... mypy-0.560/typeshed/third_party/2and3/requests/packages/urllib3/util/retry.pyi0000644€tŠÔÚ€2›s®0000000234313215007212033626 0ustar jukkaDROPBOX\Domain Users00000000000000from typing import Any from .. import exceptions from .. import packages ConnectTimeoutError = exceptions.ConnectTimeoutError MaxRetryError = exceptions.MaxRetryError ProtocolError = exceptions.ProtocolError ReadTimeoutError = exceptions.ReadTimeoutError ResponseError = exceptions.ResponseError log = ... # type: Any class Retry: DEFAULT_METHOD_WHITELIST = ... # type: Any BACKOFF_MAX = ... # type: Any total = ... # type: Any connect = ... # type: Any read = ... # type: Any redirect = ... # type: Any status_forcelist = ... # type: Any method_whitelist = ... # type: Any backoff_factor = ... # type: Any raise_on_redirect = ... # type: Any def __init__(self, total=..., connect=..., read=..., redirect=..., method_whitelist=..., status_forcelist=..., backoff_factor=..., raise_on_redirect=..., _observed_errors=...) -> None: ... def new(self, **kw): ... @classmethod def from_int(cls, retries, redirect=..., default=...): ... def get_backoff_time(self): ... def sleep(self): ... def is_forced_retry(self, method, status_code): ... def is_exhausted(self): ... def increment(self, method=..., url=..., response=..., error=..., _pool=..., _stacktrace=...): ... mypy-0.560/typeshed/third_party/2and3/requests/packages/urllib3/util/ssl_.pyi0000644€tŠÔÚ€2›s®0000000134513215007212033422 0ustar jukkaDROPBOX\Domain Users00000000000000from typing import Any from .. import exceptions import ssl SSLError = exceptions.SSLError InsecurePlatformWarning = exceptions.InsecurePlatformWarning SSLContext = ssl.SSLContext HAS_SNI = ... # type: Any create_default_context = ... # type: Any OP_NO_SSLv2 = ... # type: Any OP_NO_SSLv3 = ... # type: Any OP_NO_COMPRESSION = ... # type: Any def assert_fingerprint(cert, fingerprint): ... def resolve_cert_reqs(candidate): ... def resolve_ssl_version(candidate): ... def create_urllib3_context(ssl_version=..., cert_reqs=..., options=..., ciphers=...): ... def ssl_wrap_socket(sock, keyfile=..., certfile=..., cert_reqs=..., ca_certs=..., server_hostname=..., ssl_version=..., ciphers=..., ssl_context=...): ... mypy-0.560/typeshed/third_party/2and3/requests/packages/urllib3/util/timeout.pyi0000644€tŠÔÚ€2›s®0000000101613215007212034143 0ustar jukkaDROPBOX\Domain Users00000000000000from typing import Any from .. import exceptions TimeoutStateError = exceptions.TimeoutStateError def current_time(): ... class Timeout: DEFAULT_TIMEOUT = ... # type: Any total = ... # type: Any def __init__(self, total=..., connect=..., read=...) -> None: ... @classmethod def from_float(cls, timeout): ... def clone(self): ... def start_connect(self): ... def get_connect_duration(self): ... @property def connect_timeout(self): ... @property def read_timeout(self): ... mypy-0.560/typeshed/third_party/2and3/requests/packages/urllib3/util/url.pyi0000644€tŠÔÚ€2›s®0000000100713215007212033257 0ustar jukkaDROPBOX\Domain Users00000000000000from typing import Any from .. import exceptions LocationParseError = exceptions.LocationParseError url_attrs = ... # type: Any class Url: slots = ... # type: Any def __new__(cls, scheme=..., auth=..., host=..., port=..., path=..., query=..., fragment=...): ... @property def hostname(self): ... @property def request_uri(self): ... @property def netloc(self): ... @property def url(self): ... def split_first(s, delims): ... def parse_url(url): ... def get_host(url): ... mypy-0.560/typeshed/third_party/2and3/requests/sessions.pyi0000644€tŠÔÚ€2›s®0000001163513215007212030224 0ustar jukkaDROPBOX\Domain Users00000000000000# Stubs for requests.sessions (Python 3) from typing import Any, Union, MutableMapping, Text, Optional, IO, Tuple, Callable from . import adapters from . import auth from . import compat from . import cookies from . import models from .models import Response from . import hooks from . import utils from . import exceptions from .packages.urllib3 import _collections from . import structures from . import adapters from . import status_codes BaseAdapter = adapters.BaseAdapter OrderedDict = compat.OrderedDict cookiejar_from_dict = cookies.cookiejar_from_dict extract_cookies_to_jar = cookies.extract_cookies_to_jar RequestsCookieJar = cookies.RequestsCookieJar merge_cookies = cookies.merge_cookies Request = models.Request PreparedRequest = models.PreparedRequest DEFAULT_REDIRECT_LIMIT = models.DEFAULT_REDIRECT_LIMIT default_hooks = hooks.default_hooks dispatch_hook = hooks.dispatch_hook to_key_val_list = utils.to_key_val_list default_headers = utils.default_headers to_native_string = utils.to_native_string TooManyRedirects = exceptions.TooManyRedirects InvalidSchema = exceptions.InvalidSchema ChunkedEncodingError = exceptions.ChunkedEncodingError ContentDecodingError = exceptions.ContentDecodingError RecentlyUsedContainer = _collections.RecentlyUsedContainer CaseInsensitiveDict = structures.CaseInsensitiveDict HTTPAdapter = adapters.HTTPAdapter requote_uri = utils.requote_uri get_environ_proxies = utils.get_environ_proxies get_netrc_auth = utils.get_netrc_auth should_bypass_proxies = utils.should_bypass_proxies get_auth_from_url = utils.get_auth_from_url codes = status_codes.codes REDIRECT_STATI = models.REDIRECT_STATI REDIRECT_CACHE_SIZE = ... # type: Any def merge_setting(request_setting, session_setting, dict_class=...): ... def merge_hooks(request_hooks, session_hooks, dict_class=...): ... class SessionRedirectMixin: def resolve_redirects(self, resp, req, stream=..., timeout=..., verify=..., cert=..., proxies=...): ... def rebuild_auth(self, prepared_request, response): ... def rebuild_proxies(self, prepared_request, proxies): ... _Data = Union[None, bytes, MutableMapping[Text, Text], IO] _Hooks = MutableMapping[Text, Callable[[Response], Any]] class Session(SessionRedirectMixin): __attrs__ = ... # type: Any headers = ... # type: MutableMapping[Text, Text] auth = ... # type: Union[None, Tuple[Text, Text], Callable[[Request], Request]] proxies = ... # type: MutableMapping[Text, Text] hooks = ... # type: _Hooks params = ... # type: Union[bytes, MutableMapping[Text, Text]] stream = ... # type: bool verify = ... # type: bool cert = ... # type: Union[None, Text, Tuple[Text, Text]] max_redirects = ... # type: int trust_env = ... # type: bool cookies = ... # type: Union[RequestsCookieJar, MutableMapping[Text, Text]] adapters = ... # type: MutableMapping redirect_cache = ... # type: RecentlyUsedContainer def __init__(self) -> None: ... def __enter__(self) -> 'Session': ... def __exit__(self, *args) -> None: ... def prepare_request(self, request): ... def request(self, method: str, url: str, params: Union[None, bytes, MutableMapping[Text, Text]] = ..., data: _Data = ..., headers: Optional[MutableMapping[Text, Text]] = ..., cookies: Union[None, RequestsCookieJar, MutableMapping[Text, Text]] = ..., files: Optional[MutableMapping[Text, IO]] = ..., auth: Union[None, Tuple[Text, Text], Callable[[Request], Request]] = ..., timeout: Union[None, float, Tuple[float, float]] = ..., allow_redirects: Optional[bool] = ..., proxies: Optional[MutableMapping[Text, Text]] = ..., hooks: Optional[_Hooks] = ..., stream: Optional[bool] = ..., verify: Optional[bool] = ..., cert: Union[Text, Tuple[Text, Text], None] = ..., json: Optional[MutableMapping] = ..., ) -> Response: ... def get(self, url: Union[Text, bytes], **kwargs) -> Response: ... def options(self, url: Union[Text, bytes], **kwargs) -> Response: ... def head(self, url: Union[Text, bytes], **kwargs) -> Response: ... def post(self, url: Union[Text, bytes], data: _Data = ..., json: Optional[MutableMapping] = ..., **kwargs) -> Response: ... def put(self, url: Union[Text, bytes], data: _Data = ..., **kwargs) -> Response: ... def patch(self, url: Union[Text, bytes], data: _Data = ..., **kwargs) -> Response: ... def delete(self, url: Union[Text, bytes], **kwargs) -> Response: ... def send(self, request, **kwargs): ... def merge_environment_settings(self, url, proxies, stream, verify, cert): ... def get_adapter(self, url): ... def close(self) -> None: ... def mount(self, prefix: Union[Text, bytes], adapter: BaseAdapter) -> None: ... def session() -> Session: ... mypy-0.560/typeshed/third_party/2and3/requests/status_codes.pyi0000644€tŠÔÚ€2›s®0000000012413215007212031045 0ustar jukkaDROPBOX\Domain Users00000000000000from typing import Any from .structures import LookupDict codes = ... # type: Any mypy-0.560/typeshed/third_party/2and3/requests/structures.pyi0000644€tŠÔÚ€2›s®0000000122013215007212030566 0ustar jukkaDROPBOX\Domain Users00000000000000from typing import Any, Iterator, MutableMapping, Text, Tuple, Union class CaseInsensitiveDict(MutableMapping[str, Union[Text, bytes]]): def lower_items(self) -> Iterator[Tuple[str, Union[Text, bytes]]]: ... def __setitem__(self, key: str, value: Union[Text, bytes]) -> None: ... def __getitem__(self, key: str) -> Union[Text, bytes]: ... def __delitem__(self, key: str) -> None: ... def __iter__(self) -> Iterator[str]: ... def __len__(self) -> int: ... class LookupDict(dict): name = ... # type: Any def __init__(self, name=...) -> None: ... def __getitem__(self, key): ... def get(self, key, default=...): ... mypy-0.560/typeshed/third_party/2and3/requests/utils.pyi0000644€tŠÔÚ€2›s®0000000315513215007212027514 0ustar jukkaDROPBOX\Domain Users00000000000000# Stubs for requests.utils (Python 3) from typing import Any from . import compat from . import cookies from . import structures from . import exceptions OrderedDict = compat.OrderedDict RequestsCookieJar = cookies.RequestsCookieJar cookiejar_from_dict = cookies.cookiejar_from_dict CaseInsensitiveDict = structures.CaseInsensitiveDict InvalidURL = exceptions.InvalidURL NETRC_FILES = ... # type: Any DEFAULT_CA_BUNDLE_PATH = ... # type: Any def dict_to_sequence(d): ... def super_len(o): ... def get_netrc_auth(url): ... def guess_filename(obj): ... def from_key_val_list(value): ... def to_key_val_list(value): ... def parse_list_header(value): ... def parse_dict_header(value): ... def unquote_header_value(value, is_filename=...): ... def dict_from_cookiejar(cj): ... def add_dict_to_cookiejar(cj, cookie_dict): ... def get_encodings_from_content(content): ... def get_encoding_from_headers(headers): ... def stream_decode_response_unicode(iterator, r): ... def iter_slices(string, slice_length): ... def get_unicode_from_response(r): ... UNRESERVED_SET = ... # type: Any def unquote_unreserved(uri): ... def requote_uri(uri): ... def address_in_network(ip, net): ... def dotted_netmask(mask): ... def is_ipv4_address(string_ip): ... def is_valid_cidr(string_network): ... def should_bypass_proxies(url): ... def get_environ_proxies(url): ... def default_user_agent(name=...): ... def default_headers(): ... def parse_header_links(value): ... def guess_json_utf(data): ... def prepend_scheme_if_needed(url, new_scheme): ... def get_auth_from_url(url): ... def to_native_string(string, encoding=...): ... def urldefragauth(url): ... mypy-0.560/typeshed/third_party/2and3/singledispatch.pyi0000644€tŠÔÚ€2›s®0000000121213215007212027472 0ustar jukkaDROPBOX\Domain Users00000000000000from typing import Any, Callable, Generic, Mapping, Optional, TypeVar, overload _T = TypeVar("_T") class _SingleDispatchCallable(Generic[_T]): registry = ... # type: Mapping[Any, Callable[..., _T]] def dispatch(self, cls: Any) -> Callable[..., _T]: ... @overload def register(self, cls: Any) -> Callable[[Callable[..., _T]], Callable[..., _T]]: ... @overload def register(self, cls: Any, func: Callable[..., _T]) -> Callable[..., _T]: ... def _clear_cache(self) -> None: ... def __call__(self, *args: Any, **kwargs: Any) -> _T: ... def singledispatch(func: Callable[..., _T]) -> _SingleDispatchCallable[_T]: ... mypy-0.560/typeshed/third_party/2and3/thrift/0000755€tŠÔÚ€2›s®0000000000013215007244025257 5ustar jukkaDROPBOX\Domain Users00000000000000mypy-0.560/typeshed/third_party/2and3/thrift/__init__.pyi0000644€tŠÔÚ€2›s®0000000000013215007212027522 0ustar jukkaDROPBOX\Domain Users00000000000000mypy-0.560/typeshed/third_party/2and3/thrift/protocol/0000755€tŠÔÚ€2›s®0000000000013215007244027120 5ustar jukkaDROPBOX\Domain Users00000000000000mypy-0.560/typeshed/third_party/2and3/thrift/protocol/__init__.pyi0000644€tŠÔÚ€2›s®0000000021113215007212031367 0ustar jukkaDROPBOX\Domain Users00000000000000# Names in __all__ with no definition: # TBase # TBinaryProtocol # TCompactProtocol # TJSONProtocol # TProtocol # fastbinary mypy-0.560/typeshed/third_party/2and3/thrift/protocol/TBinaryProtocol.pyi0000644€tŠÔÚ€2›s®0000000404413215007212032732 0ustar jukkaDROPBOX\Domain Users00000000000000from typing import Any from .TProtocol import TProtocolBase from .TProtocol import * # noqa: F403 class TBinaryProtocol(TProtocolBase): VERSION_MASK = ... # type: Any VERSION_1 = ... # type: Any TYPE_MASK = ... # type: Any strictRead = ... # type: Any strictWrite = ... # type: Any def __init__(self, trans, strictRead=..., strictWrite=...) -> None: ... def writeMessageBegin(self, name, type, seqid): ... def writeMessageEnd(self): ... def writeStructBegin(self, name): ... def writeStructEnd(self): ... def writeFieldBegin(self, name, type, id): ... def writeFieldEnd(self): ... def writeFieldStop(self): ... def writeMapBegin(self, ktype, vtype, size): ... def writeMapEnd(self): ... def writeListBegin(self, etype, size): ... def writeListEnd(self): ... def writeSetBegin(self, etype, size): ... def writeSetEnd(self): ... def writeBool(self, bool): ... def writeByte(self, byte): ... def writeI16(self, i16): ... def writeI32(self, i32): ... def writeI64(self, i64): ... def writeDouble(self, dub): ... def writeString(self, str): ... def readMessageBegin(self): ... def readMessageEnd(self): ... def readStructBegin(self): ... def readStructEnd(self): ... def readFieldBegin(self): ... def readFieldEnd(self): ... def readMapBegin(self): ... def readMapEnd(self): ... def readListBegin(self): ... def readListEnd(self): ... def readSetBegin(self): ... def readSetEnd(self): ... def readBool(self): ... def readByte(self): ... def readI16(self): ... def readI32(self): ... def readI64(self): ... def readDouble(self): ... def readString(self): ... class TBinaryProtocolFactory: strictRead = ... # type: Any strictWrite = ... # type: Any def __init__(self, strictRead=..., strictWrite=...) -> None: ... def getProtocol(self, trans): ... class TBinaryProtocolAccelerated(TBinaryProtocol): ... class TBinaryProtocolAcceleratedFactory: def getProtocol(self, trans): ... mypy-0.560/typeshed/third_party/2and3/thrift/protocol/TProtocol.pyi0000644€tŠÔÚ€2›s®0000000511513215007212031565 0ustar jukkaDROPBOX\Domain Users00000000000000from typing import Any from thrift.Thrift import TException from thrift.Thrift import * # noqa: F403 class TProtocolException(TException): UNKNOWN = ... # type: Any INVALID_DATA = ... # type: Any NEGATIVE_SIZE = ... # type: Any SIZE_LIMIT = ... # type: Any BAD_VERSION = ... # type: Any NOT_IMPLEMENTED = ... # type: Any DEPTH_LIMIT = ... # type: Any type = ... # type: Any def __init__(self, type=..., message=...) -> None: ... class TProtocolBase: trans = ... # type: Any def __init__(self, trans) -> None: ... def writeMessageBegin(self, name, ttype, seqid): ... def writeMessageEnd(self): ... def writeStructBegin(self, name): ... def writeStructEnd(self): ... def writeFieldBegin(self, name, ttype, fid): ... def writeFieldEnd(self): ... def writeFieldStop(self): ... def writeMapBegin(self, ktype, vtype, size): ... def writeMapEnd(self): ... def writeListBegin(self, etype, size): ... def writeListEnd(self): ... def writeSetBegin(self, etype, size): ... def writeSetEnd(self): ... def writeBool(self, bool_val): ... def writeByte(self, byte): ... def writeI16(self, i16): ... def writeI32(self, i32): ... def writeI64(self, i64): ... def writeDouble(self, dub): ... def writeString(self, str_val): ... def readMessageBegin(self): ... def readMessageEnd(self): ... def readStructBegin(self): ... def readStructEnd(self): ... def readFieldBegin(self): ... def readFieldEnd(self): ... def readMapBegin(self): ... def readMapEnd(self): ... def readListBegin(self): ... def readListEnd(self): ... def readSetBegin(self): ... def readSetEnd(self): ... def readBool(self): ... def readByte(self): ... def readI16(self): ... def readI32(self): ... def readI64(self): ... def readDouble(self): ... def readString(self): ... def skip(self, ttype): ... def readFieldByTType(self, ttype, spec): ... def readContainerList(self, spec): ... def readContainerSet(self, spec): ... def readContainerStruct(self, spec): ... def readContainerMap(self, spec): ... def readStruct(self, obj, thrift_spec): ... def writeContainerStruct(self, val, spec): ... def writeContainerList(self, val, spec): ... def writeContainerSet(self, val, spec): ... def writeContainerMap(self, val, spec): ... def writeStruct(self, obj, thrift_spec): ... def writeFieldByTType(self, ttype, val, spec): ... def checkIntegerLimits(i, bits): ... class TProtocolFactory: def getProtocol(self, trans): ... mypy-0.560/typeshed/third_party/2and3/thrift/Thrift.pyi0000644€tŠÔÚ€2›s®0000000270513215007212027241 0ustar jukkaDROPBOX\Domain Users00000000000000from typing import Any class TType: STOP = ... # type: Any VOID = ... # type: Any BOOL = ... # type: Any BYTE = ... # type: Any I08 = ... # type: Any DOUBLE = ... # type: Any I16 = ... # type: Any I32 = ... # type: Any I64 = ... # type: Any STRING = ... # type: Any UTF7 = ... # type: Any STRUCT = ... # type: Any MAP = ... # type: Any SET = ... # type: Any LIST = ... # type: Any UTF8 = ... # type: Any UTF16 = ... # type: Any class TMessageType: CALL = ... # type: Any REPLY = ... # type: Any EXCEPTION = ... # type: Any ONEWAY = ... # type: Any class TProcessor: def process(iprot, oprot): ... class TException(Exception): message = ... # type: Any def __init__(self, message=...) -> None: ... class TApplicationException(TException): UNKNOWN = ... # type: Any UNKNOWN_METHOD = ... # type: Any INVALID_MESSAGE_TYPE = ... # type: Any WRONG_METHOD_NAME = ... # type: Any BAD_SEQUENCE_ID = ... # type: Any MISSING_RESULT = ... # type: Any INTERNAL_ERROR = ... # type: Any PROTOCOL_ERROR = ... # type: Any INVALID_TRANSFORM = ... # type: Any INVALID_PROTOCOL = ... # type: Any UNSUPPORTED_CLIENT_TYPE = ... # type: Any type = ... # type: Any def __init__(self, type=..., message=...) -> None: ... message = ... # type: Any def read(self, iprot): ... def write(self, oprot): ... mypy-0.560/typeshed/third_party/2and3/thrift/transport/0000755€tŠÔÚ€2›s®0000000000013215007244027313 5ustar jukkaDROPBOX\Domain Users00000000000000mypy-0.560/typeshed/third_party/2and3/thrift/transport/__init__.pyi0000644€tŠÔÚ€2›s®0000000014513215007212031570 0ustar jukkaDROPBOX\Domain Users00000000000000# Names in __all__ with no definition: # THttpClient # TSocket # TTransport # TZlibTransport mypy-0.560/typeshed/third_party/2and3/thrift/transport/TSocket.pyi0000644€tŠÔÚ€2›s®0000000161613215007212031411 0ustar jukkaDROPBOX\Domain Users00000000000000from typing import Any from .TTransport import TTransportBase, TServerTransportBase from .TTransport import * # noqa: F403 class TSocketBase(TTransportBase): handle = ... # type: Any def close(self): ... class TSocket(TSocketBase): host = ... # type: Any port = ... # type: Any handle = ... # type: Any def __init__(self, host=..., port=..., unix_socket=..., socket_family=...) -> None: ... def setHandle(self, h): ... def isOpen(self): ... def setTimeout(self, ms): ... def open(self): ... def read(self, sz): ... def write(self, buff): ... def flush(self): ... class TServerSocket(TSocketBase, TServerTransportBase): host = ... # type: Any port = ... # type: Any handle = ... # type: Any def __init__(self, host=..., port=..., unix_socket=..., socket_family=...) -> None: ... def listen(self): ... def accept(self): ... mypy-0.560/typeshed/third_party/2and3/thrift/transport/TTransport.pyi0000644€tŠÔÚ€2›s®0000000611613215007212032155 0ustar jukkaDROPBOX\Domain Users00000000000000from typing import Any from thrift.Thrift import TException class TTransportException(TException): UNKNOWN = ... # type: Any NOT_OPEN = ... # type: Any ALREADY_OPEN = ... # type: Any TIMED_OUT = ... # type: Any END_OF_FILE = ... # type: Any type = ... # type: Any def __init__(self, type=..., message=...) -> None: ... class TTransportBase: def isOpen(self): ... def open(self): ... def close(self): ... def read(self, sz): ... def readAll(self, sz): ... def write(self, buf): ... def flush(self): ... class CReadableTransport: @property def cstringio_buf(self): ... def cstringio_refill(self, partialread, reqlen): ... class TServerTransportBase: def listen(self): ... def accept(self): ... def close(self): ... class TTransportFactoryBase: def getTransport(self, trans): ... class TBufferedTransportFactory: def getTransport(self, trans): ... class TBufferedTransport(TTransportBase, CReadableTransport): DEFAULT_BUFFER = ... # type: Any def __init__(self, trans, rbuf_size=...) -> None: ... def isOpen(self): ... def open(self): ... def close(self): ... def read(self, sz): ... def write(self, buf): ... def flush(self): ... @property def cstringio_buf(self): ... def cstringio_refill(self, partialread, reqlen): ... class TMemoryBuffer(TTransportBase, CReadableTransport): def __init__(self, value=...) -> None: ... def isOpen(self): ... def open(self): ... def close(self): ... def read(self, sz): ... def write(self, buf): ... def flush(self): ... def getvalue(self): ... @property def cstringio_buf(self): ... def cstringio_refill(self, partialread, reqlen): ... class TFramedTransportFactory: def getTransport(self, trans): ... class TFramedTransport(TTransportBase, CReadableTransport): def __init__(self, trans) -> None: ... def isOpen(self): ... def open(self): ... def close(self): ... def read(self, sz): ... def readFrame(self): ... def write(self, buf): ... def flush(self): ... @property def cstringio_buf(self): ... def cstringio_refill(self, prefix, reqlen): ... class TFileObjectTransport(TTransportBase): fileobj = ... # type: Any def __init__(self, fileobj) -> None: ... def isOpen(self): ... def close(self): ... def read(self, sz): ... def write(self, buf): ... def flush(self): ... class TSaslClientTransport(TTransportBase, CReadableTransport): START = ... # type: Any OK = ... # type: Any BAD = ... # type: Any ERROR = ... # type: Any COMPLETE = ... # type: Any transport = ... # type: Any sasl = ... # type: Any def __init__(self, transport, host, service, mechanism=..., **sasl_kwargs) -> None: ... def open(self): ... def send_sasl_msg(self, status, body): ... def recv_sasl_msg(self): ... def write(self, data): ... def flush(self): ... def read(self, sz): ... def close(self): ... @property def cstringio_buf(self): ... def cstringio_refill(self, prefix, reqlen): ... mypy-0.560/typeshed/third_party/2and3/typing_extensions.pyi0000644€tŠÔÚ€2›s®0000000216313215007212030270 0ustar jukkaDROPBOX\Domain Users00000000000000import sys import typing from typing import ClassVar as ClassVar from typing import ContextManager as ContextManager from typing import Counter as Counter from typing import DefaultDict as DefaultDict from typing import Deque as Deque from typing import NewType as NewType from typing import NoReturn as NoReturn from typing import overload as overload from typing import Text as Text from typing import Type as Type from typing import TYPE_CHECKING as TYPE_CHECKING from typing import TypeVar, Any _TC = TypeVar('_TC', bound=Type[object]) class _SpecialForm: def __getitem__(self, typeargs: Any) -> Any: ... def runtime(cls: _TC) -> _TC: ... Protocol: _SpecialForm = ... if sys.version_info >= (3, 3): from typing import ChainMap as ChainMap if sys.version_info >= (3, 5): from typing import AsyncIterable as AsyncIterable from typing import AsyncIterator as AsyncIterator from typing import AsyncContextManager as AsyncContextManager from typing import Awaitable as Awaitable from typing import Coroutine as Coroutine if sys.version_info >= (3, 6): from typing import AsyncGenerator as AsyncGenerator mypy-0.560/typeshed/third_party/2and3/ujson.pyi0000644€tŠÔÚ€2›s®0000000204413215007212025633 0ustar jukkaDROPBOX\Domain Users00000000000000# Stubs for ujson # See: https://pypi.python.org/pypi/ujson from typing import Any, AnyStr, IO, Optional __version__ = ... # type: str def encode( obj: Any, ensure_ascii: bool = ..., double_precision: int = ..., encode_html_chars: bool = ..., escape_forward_slashes: bool = ..., sort_keys: bool = ..., indent: int = ..., ) -> str: ... def dumps( obj: Any, ensure_ascii: bool = ..., double_precision: int = ..., encode_html_chars: bool = ..., escape_forward_slashes: bool = ..., sort_keys: bool = ..., indent: int = ..., ) -> str: ... def dump( obj: Any, fp: IO[str], ensure_ascii: bool = ..., double_precision: int = ..., encode_html_chars: bool = ..., escape_forward_slashes: bool = ..., sort_keys: bool = ..., indent: int = ..., ) -> None: ... def decode( s: AnyStr, precise_float: bool = ..., ) -> Any: ... def loads( s: AnyStr, precise_float: bool = ..., ) -> Any: ... def load( fp: IO[AnyStr], precise_float: bool = ..., ) -> Any: ... mypy-0.560/typeshed/third_party/2and3/yaml/0000755€tŠÔÚ€2›s®0000000000013215007244024721 5ustar jukkaDROPBOX\Domain Users00000000000000mypy-0.560/typeshed/third_party/2and3/yaml/__init__.pyi0000644€tŠÔÚ€2›s®0000000461713215007212027206 0ustar jukkaDROPBOX\Domain Users00000000000000from typing import Any, Iterator, Union, IO from yaml.error import * # noqa: F403 from yaml.tokens import * # noqa: F403 from yaml.events import * # noqa: F403 from yaml.nodes import * # noqa: F403 from yaml.loader import * # noqa: F403 from yaml.dumper import * # noqa: F403 from . import resolver # Help mypy a bit; this is implied by loader and dumper # TODO: stubs for cyaml? # from cyaml import * __with_libyaml__ = ... # type: Any def scan(stream, Loader=...): ... def parse(stream, Loader=...): ... def compose(stream, Loader=...): ... def compose_all(stream, Loader=...): ... def load(stream: Union[str, IO[str]], Loader=...) -> Any: ... def load_all(stream: Union[str, IO[str]], Loader=...) -> Iterator[Any]: ... def safe_load(stream: Union[str, IO[str]]) -> Any: ... def safe_load_all(stream: Union[str, IO[str]]) -> Iterator[Any]: ... def emit(events, stream=..., Dumper=..., canonical=..., indent=..., width=..., allow_unicode=..., line_break=...): ... def serialize_all(nodes, stream=..., Dumper=..., canonical=..., indent=..., width=..., allow_unicode=..., line_break=..., encoding=..., explicit_start=..., explicit_end=..., version=..., tags=...): ... def serialize(node, stream=..., Dumper=..., **kwds): ... def dump_all(documents, stream=..., Dumper=..., default_style=..., default_flow_style=..., canonical=..., indent=..., width=..., allow_unicode=..., line_break=..., encoding=..., explicit_start=..., explicit_end=..., version=..., tags=...): ... def dump(data, stream=..., Dumper=..., **kwds): ... def safe_dump_all(documents, stream=..., **kwds): ... def safe_dump(data, stream=..., **kwds): ... def add_implicit_resolver(tag, regexp, first=..., Loader=..., Dumper=...): ... def add_path_resolver(tag, path, kind=..., Loader=..., Dumper=...): ... def add_constructor(tag, constructor, Loader=...): ... def add_multi_constructor(tag_prefix, multi_constructor, Loader=...): ... def add_representer(data_type, representer, Dumper=...): ... def add_multi_representer(data_type, multi_representer, Dumper=...): ... class YAMLObjectMetaclass(type): def __init__(cls, name, bases, kwds) -> None: ... class YAMLObject: __metaclass__ = YAMLObjectMetaclass yaml_loader = ... # type: Any yaml_dumper = ... # type: Any yaml_tag = ... # type: Any yaml_flow_style = ... # type: Any @classmethod def from_yaml(cls, loader, node): ... @classmethod def to_yaml(cls, dumper, data): ... mypy-0.560/typeshed/third_party/2and3/yaml/composer.pyi0000644€tŠÔÚ€2›s®0000000114113215007212027263 0ustar jukkaDROPBOX\Domain Users00000000000000from typing import Any from yaml.error import Mark, YAMLError, MarkedYAMLError from yaml.nodes import Node, ScalarNode, CollectionNode, SequenceNode, MappingNode class ComposerError(MarkedYAMLError): ... class Composer: anchors = ... # type: Any def __init__(self) -> None: ... def check_node(self): ... def get_node(self): ... def get_single_node(self): ... def compose_document(self): ... def compose_node(self, parent, index): ... def compose_scalar_node(self, anchor): ... def compose_sequence_node(self, anchor): ... def compose_mapping_node(self, anchor): ... mypy-0.560/typeshed/third_party/2and3/yaml/constructor.pyi0000644€tŠÔÚ€2›s®0000000547513215007212030037 0ustar jukkaDROPBOX\Domain Users00000000000000from yaml.error import Mark, YAMLError, MarkedYAMLError from yaml.nodes import Node, ScalarNode, CollectionNode, SequenceNode, MappingNode from typing import Any class ConstructorError(MarkedYAMLError): ... class BaseConstructor: yaml_constructors = ... # type: Any yaml_multi_constructors = ... # type: Any constructed_objects = ... # type: Any recursive_objects = ... # type: Any state_generators = ... # type: Any deep_construct = ... # type: Any def __init__(self) -> None: ... def check_data(self): ... def get_data(self): ... def get_single_data(self): ... def construct_document(self, node): ... def construct_object(self, node, deep=...): ... def construct_scalar(self, node): ... def construct_sequence(self, node, deep=...): ... def construct_mapping(self, node, deep=...): ... def construct_pairs(self, node, deep=...): ... @classmethod def add_constructor(cls, tag, constructor): ... @classmethod def add_multi_constructor(cls, tag_prefix, multi_constructor): ... class SafeConstructor(BaseConstructor): def construct_scalar(self, node): ... def flatten_mapping(self, node): ... def construct_mapping(self, node, deep=...): ... def construct_yaml_null(self, node): ... bool_values = ... # type: Any def construct_yaml_bool(self, node): ... def construct_yaml_int(self, node): ... inf_value = ... # type: Any nan_value = ... # type: Any def construct_yaml_float(self, node): ... def construct_yaml_binary(self, node): ... timestamp_regexp = ... # type: Any def construct_yaml_timestamp(self, node): ... def construct_yaml_omap(self, node): ... def construct_yaml_pairs(self, node): ... def construct_yaml_set(self, node): ... def construct_yaml_str(self, node): ... def construct_yaml_seq(self, node): ... def construct_yaml_map(self, node): ... def construct_yaml_object(self, node, cls): ... def construct_undefined(self, node): ... class Constructor(SafeConstructor): def construct_python_str(self, node): ... def construct_python_unicode(self, node): ... def construct_python_long(self, node): ... def construct_python_complex(self, node): ... def construct_python_tuple(self, node): ... def find_python_module(self, name, mark): ... def find_python_name(self, name, mark): ... def construct_python_name(self, suffix, node): ... def construct_python_module(self, suffix, node): ... class classobj: ... def make_python_instance(self, suffix, node, args=..., kwds=..., newobj=...): ... def set_python_instance_state(self, instance, state): ... def construct_python_object(self, suffix, node): ... def construct_python_object_apply(self, suffix, node, newobj=...): ... def construct_python_object_new(self, suffix, node): ... mypy-0.560/typeshed/third_party/2and3/yaml/dumper.pyi0000644€tŠÔÚ€2›s®0000000211213215007212026727 0ustar jukkaDROPBOX\Domain Users00000000000000from yaml.emitter import Emitter from yaml.serializer import Serializer from yaml.representer import BaseRepresenter, Representer, SafeRepresenter from yaml.resolver import BaseResolver, Resolver class BaseDumper(Emitter, Serializer, BaseRepresenter, BaseResolver): def __init__(self, stream, default_style=..., default_flow_style=..., canonical=..., indent=..., width=..., allow_unicode=..., line_break=..., encoding=..., explicit_start=..., explicit_end=..., version=..., tags=...) -> None: ... class SafeDumper(Emitter, Serializer, SafeRepresenter, Resolver): def __init__(self, stream, default_style=..., default_flow_style=..., canonical=..., indent=..., width=..., allow_unicode=..., line_break=..., encoding=..., explicit_start=..., explicit_end=..., version=..., tags=...) -> None: ... class Dumper(Emitter, Serializer, Representer, Resolver): def __init__(self, stream, default_style=..., default_flow_style=..., canonical=..., indent=..., width=..., allow_unicode=..., line_break=..., encoding=..., explicit_start=..., explicit_end=..., version=..., tags=...) -> None: ... mypy-0.560/typeshed/third_party/2and3/yaml/emitter.pyi0000644€tŠÔÚ€2›s®0000001033713215007212027114 0ustar jukkaDROPBOX\Domain Users00000000000000from typing import Any from yaml.error import YAMLError class EmitterError(YAMLError): ... class ScalarAnalysis: scalar = ... # type: Any empty = ... # type: Any multiline = ... # type: Any allow_flow_plain = ... # type: Any allow_block_plain = ... # type: Any allow_single_quoted = ... # type: Any allow_double_quoted = ... # type: Any allow_block = ... # type: Any def __init__(self, scalar, empty, multiline, allow_flow_plain, allow_block_plain, allow_single_quoted, allow_double_quoted, allow_block) -> None: ... class Emitter: DEFAULT_TAG_PREFIXES = ... # type: Any stream = ... # type: Any encoding = ... # type: Any states = ... # type: Any state = ... # type: Any events = ... # type: Any event = ... # type: Any indents = ... # type: Any indent = ... # type: Any flow_level = ... # type: Any root_context = ... # type: Any sequence_context = ... # type: Any mapping_context = ... # type: Any simple_key_context = ... # type: Any line = ... # type: Any column = ... # type: Any whitespace = ... # type: Any indention = ... # type: Any open_ended = ... # type: Any canonical = ... # type: Any allow_unicode = ... # type: Any best_indent = ... # type: Any best_width = ... # type: Any best_line_break = ... # type: Any tag_prefixes = ... # type: Any prepared_anchor = ... # type: Any prepared_tag = ... # type: Any analysis = ... # type: Any style = ... # type: Any def __init__(self, stream, canonical=..., indent=..., width=..., allow_unicode=..., line_break=...) -> None: ... def dispose(self): ... def emit(self, event): ... def need_more_events(self): ... def need_events(self, count): ... def increase_indent(self, flow=..., indentless=...): ... def expect_stream_start(self): ... def expect_nothing(self): ... def expect_first_document_start(self): ... def expect_document_start(self, first=...): ... def expect_document_end(self): ... def expect_document_root(self): ... def expect_node(self, root=..., sequence=..., mapping=..., simple_key=...): ... def expect_alias(self): ... def expect_scalar(self): ... def expect_flow_sequence(self): ... def expect_first_flow_sequence_item(self): ... def expect_flow_sequence_item(self): ... def expect_flow_mapping(self): ... def expect_first_flow_mapping_key(self): ... def expect_flow_mapping_key(self): ... def expect_flow_mapping_simple_value(self): ... def expect_flow_mapping_value(self): ... def expect_block_sequence(self): ... def expect_first_block_sequence_item(self): ... def expect_block_sequence_item(self, first=...): ... def expect_block_mapping(self): ... def expect_first_block_mapping_key(self): ... def expect_block_mapping_key(self, first=...): ... def expect_block_mapping_simple_value(self): ... def expect_block_mapping_value(self): ... def check_empty_sequence(self): ... def check_empty_mapping(self): ... def check_empty_document(self): ... def check_simple_key(self): ... def process_anchor(self, indicator): ... def process_tag(self): ... def choose_scalar_style(self): ... def process_scalar(self): ... def prepare_version(self, version): ... def prepare_tag_handle(self, handle): ... def prepare_tag_prefix(self, prefix): ... def prepare_tag(self, tag): ... def prepare_anchor(self, anchor): ... def analyze_scalar(self, scalar): ... def flush_stream(self): ... def write_stream_start(self): ... def write_stream_end(self): ... def write_indicator(self, indicator, need_whitespace, whitespace=..., indention=...): ... def write_indent(self): ... def write_line_break(self, data=...): ... def write_version_directive(self, version_text): ... def write_tag_directive(self, handle_text, prefix_text): ... def write_single_quoted(self, text, split=...): ... ESCAPE_REPLACEMENTS = ... # type: Any def write_double_quoted(self, text, split=...): ... def determine_block_hints(self, text): ... def write_folded(self, text): ... def write_literal(self, text): ... def write_plain(self, text, split=...): ... mypy-0.560/typeshed/third_party/2and3/yaml/error.pyi0000644€tŠÔÚ€2›s®0000000126113215007212026570 0ustar jukkaDROPBOX\Domain Users00000000000000from typing import Any class Mark: name = ... # type: Any index = ... # type: Any line = ... # type: Any column = ... # type: Any buffer = ... # type: Any pointer = ... # type: Any def __init__(self, name, index, line, column, buffer, pointer) -> None: ... def get_snippet(self, indent=..., max_length=...): ... class YAMLError(Exception): ... class MarkedYAMLError(YAMLError): context = ... # type: Any context_mark = ... # type: Any problem = ... # type: Any problem_mark = ... # type: Any note = ... # type: Any def __init__(self, context=..., context_mark=..., problem=..., problem_mark=..., note=...) -> None: ... mypy-0.560/typeshed/third_party/2and3/yaml/events.pyi0000644€tŠÔÚ€2›s®0000000402413215007212026743 0ustar jukkaDROPBOX\Domain Users00000000000000from typing import Any class Event: start_mark = ... # type: Any end_mark = ... # type: Any def __init__(self, start_mark=..., end_mark=...) -> None: ... class NodeEvent(Event): anchor = ... # type: Any start_mark = ... # type: Any end_mark = ... # type: Any def __init__(self, anchor, start_mark=..., end_mark=...) -> None: ... class CollectionStartEvent(NodeEvent): anchor = ... # type: Any tag = ... # type: Any implicit = ... # type: Any start_mark = ... # type: Any end_mark = ... # type: Any flow_style = ... # type: Any def __init__(self, anchor, tag, implicit, start_mark=..., end_mark=..., flow_style=...) -> None: ... class CollectionEndEvent(Event): ... class StreamStartEvent(Event): start_mark = ... # type: Any end_mark = ... # type: Any encoding = ... # type: Any def __init__(self, start_mark=..., end_mark=..., encoding=...) -> None: ... class StreamEndEvent(Event): ... class DocumentStartEvent(Event): start_mark = ... # type: Any end_mark = ... # type: Any explicit = ... # type: Any version = ... # type: Any tags = ... # type: Any def __init__(self, start_mark=..., end_mark=..., explicit=..., version=..., tags=...) -> None: ... class DocumentEndEvent(Event): start_mark = ... # type: Any end_mark = ... # type: Any explicit = ... # type: Any def __init__(self, start_mark=..., end_mark=..., explicit=...) -> None: ... class AliasEvent(NodeEvent): ... class ScalarEvent(NodeEvent): anchor = ... # type: Any tag = ... # type: Any implicit = ... # type: Any value = ... # type: Any start_mark = ... # type: Any end_mark = ... # type: Any style = ... # type: Any def __init__(self, anchor, tag, implicit, value, start_mark=..., end_mark=..., style=...) -> None: ... class SequenceStartEvent(CollectionStartEvent): ... class SequenceEndEvent(CollectionEndEvent): ... class MappingStartEvent(CollectionStartEvent): ... class MappingEndEvent(CollectionEndEvent): ... mypy-0.560/typeshed/third_party/2and3/yaml/loader.pyi0000644€tŠÔÚ€2›s®0000000116113215007212026704 0ustar jukkaDROPBOX\Domain Users00000000000000from yaml.reader import Reader from yaml.scanner import Scanner from yaml.parser import Parser from yaml.composer import Composer from yaml.constructor import BaseConstructor, SafeConstructor, Constructor from yaml.resolver import BaseResolver, Resolver class BaseLoader(Reader, Scanner, Parser, Composer, BaseConstructor, BaseResolver): def __init__(self, stream) -> None: ... class SafeLoader(Reader, Scanner, Parser, Composer, SafeConstructor, Resolver): def __init__(self, stream) -> None: ... class Loader(Reader, Scanner, Parser, Composer, Constructor, Resolver): def __init__(self, stream) -> None: ... mypy-0.560/typeshed/third_party/2and3/yaml/nodes.pyi0000644€tŠÔÚ€2›s®0000000163313215007212026552 0ustar jukkaDROPBOX\Domain Users00000000000000from typing import Any class Node: tag = ... # type: Any value = ... # type: Any start_mark = ... # type: Any end_mark = ... # type: Any def __init__(self, tag, value, start_mark, end_mark) -> None: ... class ScalarNode(Node): id = ... # type: Any tag = ... # type: Any value = ... # type: Any start_mark = ... # type: Any end_mark = ... # type: Any style = ... # type: Any def __init__(self, tag, value, start_mark=..., end_mark=..., style=...) -> None: ... class CollectionNode(Node): tag = ... # type: Any value = ... # type: Any start_mark = ... # type: Any end_mark = ... # type: Any flow_style = ... # type: Any def __init__(self, tag, value, start_mark=..., end_mark=..., flow_style=...) -> None: ... class SequenceNode(CollectionNode): id = ... # type: Any class MappingNode(CollectionNode): id = ... # type: Any mypy-0.560/typeshed/third_party/2and3/yaml/parser.pyi0000644€tŠÔÚ€2›s®0000000334113215007212026734 0ustar jukkaDROPBOX\Domain Users00000000000000from typing import Any from yaml.error import MarkedYAMLError class ParserError(MarkedYAMLError): ... class Parser: DEFAULT_TAGS = ... # type: Any current_event = ... # type: Any yaml_version = ... # type: Any tag_handles = ... # type: Any states = ... # type: Any marks = ... # type: Any state = ... # type: Any def __init__(self) -> None: ... def dispose(self): ... def check_event(self, *choices): ... def peek_event(self): ... def get_event(self): ... def parse_stream_start(self): ... def parse_implicit_document_start(self): ... def parse_document_start(self): ... def parse_document_end(self): ... def parse_document_content(self): ... def process_directives(self): ... def parse_block_node(self): ... def parse_flow_node(self): ... def parse_block_node_or_indentless_sequence(self): ... def parse_node(self, block=..., indentless_sequence=...): ... def parse_block_sequence_first_entry(self): ... def parse_block_sequence_entry(self): ... def parse_indentless_sequence_entry(self): ... def parse_block_mapping_first_key(self): ... def parse_block_mapping_key(self): ... def parse_block_mapping_value(self): ... def parse_flow_sequence_first_entry(self): ... def parse_flow_sequence_entry(self, first=...): ... def parse_flow_sequence_entry_mapping_key(self): ... def parse_flow_sequence_entry_mapping_value(self): ... def parse_flow_sequence_entry_mapping_end(self): ... def parse_flow_mapping_first_key(self): ... def parse_flow_mapping_key(self, first=...): ... def parse_flow_mapping_value(self): ... def parse_flow_mapping_empty_value(self): ... def process_empty_scalar(self, mark): ... mypy-0.560/typeshed/third_party/2and3/yaml/reader.pyi0000644€tŠÔÚ€2›s®0000000207313215007212026703 0ustar jukkaDROPBOX\Domain Users00000000000000from typing import Any from yaml.error import YAMLError class ReaderError(YAMLError): name = ... # type: Any character = ... # type: Any position = ... # type: Any encoding = ... # type: Any reason = ... # type: Any def __init__(self, name, position, character, encoding, reason) -> None: ... class Reader: name = ... # type: Any stream = ... # type: Any stream_pointer = ... # type: Any eof = ... # type: Any buffer = ... # type: Any pointer = ... # type: Any raw_buffer = ... # type: Any raw_decode = ... # type: Any encoding = ... # type: Any index = ... # type: Any line = ... # type: Any column = ... # type: Any def __init__(self, stream) -> None: ... def peek(self, index=...): ... def prefix(self, length=...): ... def forward(self, length=...): ... def get_mark(self): ... def determine_encoding(self): ... NON_PRINTABLE = ... # type: Any def check_printable(self, data): ... def update(self, length): ... def update_raw(self, size=...): ... mypy-0.560/typeshed/third_party/2and3/yaml/representer.pyi0000644€tŠÔÚ€2›s®0000000410413215007212027774 0ustar jukkaDROPBOX\Domain Users00000000000000from typing import Any from yaml.error import YAMLError class RepresenterError(YAMLError): ... class BaseRepresenter: yaml_representers = ... # type: Any yaml_multi_representers = ... # type: Any default_style = ... # type: Any default_flow_style = ... # type: Any represented_objects = ... # type: Any object_keeper = ... # type: Any alias_key = ... # type: Any def __init__(self, default_style=..., default_flow_style=...) -> None: ... def represent(self, data): ... def get_classobj_bases(self, cls): ... def represent_data(self, data): ... @classmethod def add_representer(cls, data_type, representer): ... @classmethod def add_multi_representer(cls, data_type, representer): ... def represent_scalar(self, tag, value, style=...): ... def represent_sequence(self, tag, sequence, flow_style=...): ... def represent_mapping(self, tag, mapping, flow_style=...): ... def ignore_aliases(self, data): ... class SafeRepresenter(BaseRepresenter): def ignore_aliases(self, data): ... def represent_none(self, data): ... def represent_str(self, data): ... def represent_unicode(self, data): ... def represent_bool(self, data): ... def represent_int(self, data): ... def represent_long(self, data): ... inf_value = ... # type: Any def represent_float(self, data): ... def represent_list(self, data): ... def represent_dict(self, data): ... def represent_set(self, data): ... def represent_date(self, data): ... def represent_datetime(self, data): ... def represent_yaml_object(self, tag, data, cls, flow_style=...): ... def represent_undefined(self, data): ... class Representer(SafeRepresenter): def represent_str(self, data): ... def represent_unicode(self, data): ... def represent_long(self, data): ... def represent_complex(self, data): ... def represent_tuple(self, data): ... def represent_name(self, data): ... def represent_module(self, data): ... def represent_instance(self, data): ... def represent_object(self, data): ... mypy-0.560/typeshed/third_party/2and3/yaml/resolver.pyi0000644€tŠÔÚ€2›s®0000000156313215007212027305 0ustar jukkaDROPBOX\Domain Users00000000000000from typing import Any from yaml.error import YAMLError class ResolverError(YAMLError): ... class BaseResolver: DEFAULT_SCALAR_TAG = ... # type: Any DEFAULT_SEQUENCE_TAG = ... # type: Any DEFAULT_MAPPING_TAG = ... # type: Any yaml_implicit_resolvers = ... # type: Any yaml_path_resolvers = ... # type: Any resolver_exact_paths = ... # type: Any resolver_prefix_paths = ... # type: Any def __init__(self) -> None: ... @classmethod def add_implicit_resolver(cls, tag, regexp, first): ... @classmethod def add_path_resolver(cls, tag, path, kind=...): ... def descend_resolver(self, current_node, current_index): ... def ascend_resolver(self): ... def check_resolver_prefix(self, depth, path, kind, current_node, current_index): ... def resolve(self, kind, value, implicit): ... class Resolver(BaseResolver): ... mypy-0.560/typeshed/third_party/2and3/yaml/scanner.pyi0000644€tŠÔÚ€2›s®0000000732413215007212027076 0ustar jukkaDROPBOX\Domain Users00000000000000from typing import Any from yaml.error import MarkedYAMLError class ScannerError(MarkedYAMLError): ... class SimpleKey: token_number = ... # type: Any required = ... # type: Any index = ... # type: Any line = ... # type: Any column = ... # type: Any mark = ... # type: Any def __init__(self, token_number, required, index, line, column, mark) -> None: ... class Scanner: done = ... # type: Any flow_level = ... # type: Any tokens = ... # type: Any tokens_taken = ... # type: Any indent = ... # type: Any indents = ... # type: Any allow_simple_key = ... # type: Any possible_simple_keys = ... # type: Any def __init__(self) -> None: ... def check_token(self, *choices): ... def peek_token(self): ... def get_token(self): ... def need_more_tokens(self): ... def fetch_more_tokens(self): ... def next_possible_simple_key(self): ... def stale_possible_simple_keys(self): ... def save_possible_simple_key(self): ... def remove_possible_simple_key(self): ... def unwind_indent(self, column): ... def add_indent(self, column): ... def fetch_stream_start(self): ... def fetch_stream_end(self): ... def fetch_directive(self): ... def fetch_document_start(self): ... def fetch_document_end(self): ... def fetch_document_indicator(self, TokenClass): ... def fetch_flow_sequence_start(self): ... def fetch_flow_mapping_start(self): ... def fetch_flow_collection_start(self, TokenClass): ... def fetch_flow_sequence_end(self): ... def fetch_flow_mapping_end(self): ... def fetch_flow_collection_end(self, TokenClass): ... def fetch_flow_entry(self): ... def fetch_block_entry(self): ... def fetch_key(self): ... def fetch_value(self): ... def fetch_alias(self): ... def fetch_anchor(self): ... def fetch_tag(self): ... def fetch_literal(self): ... def fetch_folded(self): ... def fetch_block_scalar(self, style): ... def fetch_single(self): ... def fetch_double(self): ... def fetch_flow_scalar(self, style): ... def fetch_plain(self): ... def check_directive(self): ... def check_document_start(self): ... def check_document_end(self): ... def check_block_entry(self): ... def check_key(self): ... def check_value(self): ... def check_plain(self): ... def scan_to_next_token(self): ... def scan_directive(self): ... def scan_directive_name(self, start_mark): ... def scan_yaml_directive_value(self, start_mark): ... def scan_yaml_directive_number(self, start_mark): ... def scan_tag_directive_value(self, start_mark): ... def scan_tag_directive_handle(self, start_mark): ... def scan_tag_directive_prefix(self, start_mark): ... def scan_directive_ignored_line(self, start_mark): ... def scan_anchor(self, TokenClass): ... def scan_tag(self): ... def scan_block_scalar(self, style): ... def scan_block_scalar_indicators(self, start_mark): ... def scan_block_scalar_ignored_line(self, start_mark): ... def scan_block_scalar_indentation(self): ... def scan_block_scalar_breaks(self, indent): ... def scan_flow_scalar(self, style): ... ESCAPE_REPLACEMENTS = ... # type: Any ESCAPE_CODES = ... # type: Any def scan_flow_scalar_non_spaces(self, double, start_mark): ... def scan_flow_scalar_spaces(self, double, start_mark): ... def scan_flow_scalar_breaks(self, double, start_mark): ... def scan_plain(self): ... def scan_plain_spaces(self, indent, start_mark): ... def scan_tag_handle(self, name, start_mark): ... def scan_tag_uri(self, name, start_mark): ... def scan_uri_escapes(self, name, start_mark): ... def scan_line_break(self): ... mypy-0.560/typeshed/third_party/2and3/yaml/serializer.pyi0000644€tŠÔÚ€2›s®0000000144513215007212027614 0ustar jukkaDROPBOX\Domain Users00000000000000from typing import Any from yaml.error import YAMLError class SerializerError(YAMLError): ... class Serializer: ANCHOR_TEMPLATE = ... # type: Any use_encoding = ... # type: Any use_explicit_start = ... # type: Any use_explicit_end = ... # type: Any use_version = ... # type: Any use_tags = ... # type: Any serialized_nodes = ... # type: Any anchors = ... # type: Any last_anchor_id = ... # type: Any closed = ... # type: Any def __init__(self, encoding=..., explicit_start=..., explicit_end=..., version=..., tags=...) -> None: ... def open(self): ... def close(self): ... def serialize(self, node): ... def anchor_node(self, node): ... def generate_anchor(self, node): ... def serialize_node(self, node, parent, index): ... mypy-0.560/typeshed/third_party/2and3/yaml/tokens.pyi0000644€tŠÔÚ€2›s®0000000453213215007212026746 0ustar jukkaDROPBOX\Domain Users00000000000000from typing import Any class Token: start_mark = ... # type: Any end_mark = ... # type: Any def __init__(self, start_mark, end_mark) -> None: ... class DirectiveToken(Token): id = ... # type: Any name = ... # type: Any value = ... # type: Any start_mark = ... # type: Any end_mark = ... # type: Any def __init__(self, name, value, start_mark, end_mark) -> None: ... class DocumentStartToken(Token): id = ... # type: Any class DocumentEndToken(Token): id = ... # type: Any class StreamStartToken(Token): id = ... # type: Any start_mark = ... # type: Any end_mark = ... # type: Any encoding = ... # type: Any def __init__(self, start_mark=..., end_mark=..., encoding=...) -> None: ... class StreamEndToken(Token): id = ... # type: Any class BlockSequenceStartToken(Token): id = ... # type: Any class BlockMappingStartToken(Token): id = ... # type: Any class BlockEndToken(Token): id = ... # type: Any class FlowSequenceStartToken(Token): id = ... # type: Any class FlowMappingStartToken(Token): id = ... # type: Any class FlowSequenceEndToken(Token): id = ... # type: Any class FlowMappingEndToken(Token): id = ... # type: Any class KeyToken(Token): id = ... # type: Any class ValueToken(Token): id = ... # type: Any class BlockEntryToken(Token): id = ... # type: Any class FlowEntryToken(Token): id = ... # type: Any class AliasToken(Token): id = ... # type: Any value = ... # type: Any start_mark = ... # type: Any end_mark = ... # type: Any def __init__(self, value, start_mark, end_mark) -> None: ... class AnchorToken(Token): id = ... # type: Any value = ... # type: Any start_mark = ... # type: Any end_mark = ... # type: Any def __init__(self, value, start_mark, end_mark) -> None: ... class TagToken(Token): id = ... # type: Any value = ... # type: Any start_mark = ... # type: Any end_mark = ... # type: Any def __init__(self, value, start_mark, end_mark) -> None: ... class ScalarToken(Token): id = ... # type: Any value = ... # type: Any plain = ... # type: Any start_mark = ... # type: Any end_mark = ... # type: Any style = ... # type: Any def __init__(self, value, plain, start_mark, end_mark, style=...) -> None: ... mypy-0.560/typeshed/third_party/3/0000755€tŠÔÚ€2›s®0000000000013215007244023212 5ustar jukkaDROPBOX\Domain Users00000000000000mypy-0.560/typeshed/third_party/3/dateutil/0000755€tŠÔÚ€2›s®0000000000013215007244025025 5ustar jukkaDROPBOX\Domain Users00000000000000mypy-0.560/typeshed/third_party/3/dateutil/__init__.pyi0000644€tŠÔÚ€2›s®0000000000013215007212027270 0ustar jukkaDROPBOX\Domain Users00000000000000mypy-0.560/typeshed/third_party/3/dateutil/parser.pyi0000644€tŠÔÚ€2›s®0000000345013215007212027041 0ustar jukkaDROPBOX\Domain Users00000000000000from typing import List, Tuple, Optional, Callable, Union, IO, Any, Dict, Mapping, Text from datetime import datetime, tzinfo _FileOrStr = Union[bytes, Text, IO[str], IO[Any]] __all__ = ... # type: List[str] class parserinfo(object): JUMP = ... # type: List[str] WEEKDAYS = ... # type: List[Tuple[str, str]] MONTHS = ... # type: List[Tuple[str, str]] HMS = ... # type: List[Tuple[str, str, str]] AMPM = ... # type: List[Tuple[str, str]] UTCZONE = ... # type: List[str] PERTAIN = ... # type: List[str] TZOFFSET = ... # type: Dict[str, int] def __init__(self, dayfirst: bool=..., yearfirst: bool=...) -> None: ... def jump(self, name: Text) -> bool: ... def weekday(self, name: Text) -> Optional[int]: ... def month(self, name: Text) -> Optional[int]: ... def hms(self, name: Text) -> Optional[int]: ... def ampm(self, name: Text) -> Optional[int]: ... def pertain(self, name: Text) -> bool: ... def utczone(self, name: Text) -> bool: ... def tzoffset(self, name: Text) -> Optional[int]: ... def convertyear(self, year: int) -> int: ... def validate(self, res: datetime) -> bool: ... class parser(object): def __init__(self, info: Optional[parserinfo] = ...) -> None: ... def parse(self, timestr: _FileOrStr, default: Optional[datetime] = ..., ignoretz: bool = ..., tzinfos: Optional[Mapping[Text, tzinfo]] = ..., **kwargs: Any) -> datetime: ... DEFAULTPARSER = ... # type: parser def parse(timestr: _FileOrStr, parserinfo: Optional[parserinfo] = ..., **kwargs: Any) -> datetime: ... class _tzparser: ... DEFAULTTZPARSER = ... # type: _tzparser class InvalidDatetimeError(ValueError): ... class InvalidDateError(InvalidDatetimeError): ... class InvalidTimeError(InvalidDatetimeError): ... mypy-0.560/typeshed/third_party/3/dateutil/relativedelta.pyi0000644€tŠÔÚ€2›s®0000000614113215007212030372 0ustar jukkaDROPBOX\Domain Users00000000000000from typing import overload, Any, List, Optional, SupportsFloat, TypeVar, Union from datetime import date, datetime, timedelta __all__ = ... # type: List[str] _SelfT = TypeVar('_SelfT', bound=relativedelta) _DateT = TypeVar('_DateT', date, datetime) class weekday(object): def __init__(self, weekday: int, n: Optional[int]=...) -> None: ... def __call__(self, n: int) -> 'weekday': ... def __eq__(self, other) -> bool: ... def __repr__(self) -> str: ... weekday = ... # type: int n = ... # type: int MO = ... # type: weekday TU = ... # type: weekday WE = ... # type: weekday TH = ... # type: weekday FR = ... # type: weekday SA = ... # type: weekday SU = ... # type: weekday class relativedelta(object): def __init__(self, dt1: Optional[date]=..., dt2: Optional[date]=..., years: Optional[int]=..., months: Optional[int]=..., days: Optional[int]=..., leapdays: Optional[int]=..., weeks: Optional[int]=..., hours: Optional[int]=..., minutes: Optional[int]=..., seconds: Optional[int]=..., microseconds: Optional[int]=..., year: Optional[int]=..., month: Optional[int]=..., day: Optional[int]=..., weekday: Optional[Union[int, weekday]]=..., yearday: Optional[int]=..., nlyearday: Optional[int]=..., hour: Optional[int]=..., minute: Optional[int]=..., second: Optional[int]=..., microsecond: Optional[int]=...) -> None: ... @property def weeks(self) -> int: ... @weeks.setter def weeks(self, value: int) -> None: ... def normalized(self: _SelfT) -> _SelfT: ... # TODO: use Union when mypy will handle it properly in overloaded operator # methods (#2129, #1442, #1264 in mypy) @overload def __add__(self: _SelfT, other: relativedelta) -> _SelfT: ... @overload def __add__(self: _SelfT, other: timedelta) -> _SelfT: ... @overload def __add__(self, other: _DateT) -> _DateT: ... @overload def __radd__(self: _SelfT, other: relativedelta) -> _SelfT: ... @overload def __radd__(self: _SelfT, other: timedelta) -> _SelfT: ... @overload def __radd__(self, other: _DateT) -> _DateT: ... @overload def __rsub__(self: _SelfT, other: relativedelta) -> _SelfT: ... @overload def __rsub__(self: _SelfT, other: timedelta) -> _SelfT: ... @overload def __rsub__(self, other: _DateT) -> _DateT: ... def __sub__(self: _SelfT, other: relativedelta) -> _SelfT: ... def __neg__(self: _SelfT) -> _SelfT: ... def __bool__(self) -> bool: ... def __nonzero__(self) -> bool: ... def __mul__(self: _SelfT, other: SupportsFloat) -> _SelfT: ... def __rmul__(self: _SelfT, other: SupportsFloat) -> _SelfT: ... def __eq__(self, other) -> bool: ... def __ne__(self, other: object) -> bool: ... def __div__(self: _SelfT, other: SupportsFloat) -> _SelfT: ... def __truediv__(self: _SelfT, other: SupportsFloat) -> _SelfT: ... def __repr__(self) -> str: ... mypy-0.560/typeshed/third_party/3/dateutil/tz/0000755€tŠÔÚ€2›s®0000000000013215007244025462 5ustar jukkaDROPBOX\Domain Users00000000000000mypy-0.560/typeshed/third_party/3/dateutil/tz/__init__.pyi0000644€tŠÔÚ€2›s®0000000043413215007212027740 0ustar jukkaDROPBOX\Domain Users00000000000000from .tz import ( tzutc as tzutc, tzoffset as tzoffset, tzlocal as tzlocal, tzfile as tzfile, tzrange as tzrange, tzstr as tzstr, tzical as tzical, gettz as gettz, datetime_exists as datetime_exists, datetime_ambiguous as datetime_ambiguous, ) mypy-0.560/typeshed/third_party/3/dateutil/tz/_common.pyi0000644€tŠÔÚ€2›s®0000000151613215007212027632 0ustar jukkaDROPBOX\Domain Users00000000000000from typing import Any, Optional from datetime import datetime, tzinfo, timedelta def tzname_in_python2(namefunc): ... def enfold(dt: datetime, fold: int = ...): ... class _DatetimeWithFold(datetime): @property def fold(self): ... class _tzinfo(tzinfo): def is_ambiguous(self, dt: datetime) -> bool: ... def fromutc(self, dt: datetime) -> datetime: ... class tzrangebase(_tzinfo): def __init__(self) -> None: ... def utcoffset(self, dt: Optional[datetime]) -> Optional[timedelta]: ... def dst(self, dt: Optional[datetime]) -> Optional[timedelta]: ... def tzname(self, dt: Optional[datetime]) -> str: ... def fromutc(self, dt: datetime) -> datetime: ... def is_ambiguous(self, dt: datetime) -> bool: ... __hash__ = ... # type: Any def __ne__(self, other): ... __reduce__ = ... # type: Any mypy-0.560/typeshed/third_party/3/dateutil/tz/tz.pyi0000644€tŠÔÚ€2›s®0000000733413215007212026644 0ustar jukkaDROPBOX\Domain Users00000000000000from typing import Any, Optional, Union, IO, Text, Tuple, List import datetime from ._common import tzname_in_python2 as tzname_in_python2, _tzinfo as _tzinfo from ._common import tzrangebase as tzrangebase, enfold as enfold from ..relativedelta import relativedelta _FileObj = Union[str, Text, IO[str], IO[Text]] ZERO = ... # type: datetime.timedelta EPOCH = ... # type: datetime.datetime EPOCHORDINAL = ... # type: int class tzutc(datetime.tzinfo): def utcoffset(self, dt: Optional[datetime.datetime]) -> Optional[datetime.timedelta]: ... def dst(self, dt: Optional[datetime.datetime]) -> Optional[datetime.timedelta]: ... def tzname(self, dt: Optional[datetime.datetime]) -> str: ... def is_ambiguous(self, dt: Optional[datetime.datetime]) -> bool: ... def __eq__(self, other): ... __hash__ = ... # type: Any def __ne__(self, other): ... __reduce__ = ... # type: Any class tzoffset(datetime.tzinfo): def __init__(self, name, offset) -> None: ... def utcoffset(self, dt: Optional[datetime.datetime]) -> Optional[datetime.timedelta]: ... def dst(self, dt: Optional[datetime.datetime]) -> Optional[datetime.timedelta]: ... def is_ambiguous(self, dt: Optional[datetime.datetime]) -> bool: ... def tzname(self, dt: Optional[datetime.datetime]) -> str: ... def __eq__(self, other): ... __hash__ = ... # type: Any def __ne__(self, other): ... __reduce__ = ... # type: Any class tzlocal(_tzinfo): def __init__(self) -> None: ... def utcoffset(self, dt: Optional[datetime.datetime]) -> Optional[datetime.timedelta]: ... def dst(self, dt: Optional[datetime.datetime]) -> Optional[datetime.timedelta]: ... def tzname(self, dt: Optional[datetime.datetime]) -> str: ... def is_ambiguous(self, dt: Optional[datetime.datetime]) -> bool: ... def __eq__(self, other): ... __hash__ = ... # type: Any def __ne__(self, other): ... __reduce__ = ... # type: Any class _ttinfo: def __init__(self) -> None: ... def __eq__(self, other): ... __hash__ = ... # type: Any def __ne__(self, other): ... class tzfile(_tzinfo): def __init__(self, fileobj: _FileObj, filename: Optional[Text] = ...) -> None: ... def is_ambiguous(self, dt: Optional[datetime.datetime], idx: Optional[int] = ...) -> bool: ... def utcoffset(self, dt: Optional[datetime.datetime]) -> Optional[datetime.timedelta]: ... def dst(self, dt: Optional[datetime.datetime]) -> Optional[datetime.timedelta]: ... def tzname(self, dt: Optional[datetime.datetime]) -> str: ... def __eq__(self, other): ... __hash__ = ... # type: Any def __ne__(self, other): ... def __reduce__(self): ... def __reduce_ex__(self, protocol): ... class tzrange(tzrangebase): hasdst = ... # type: bool def __init__(self, stdabbr: Text, stdoffset: Union[int, datetime.timedelta, None] = ..., dstabbr: Optional[Text] = ..., dstoffset: Union[int, datetime.timedelta, None] = ..., start: Optional[relativedelta] = ..., end: Optional[relativedelta] = ...) -> None: ... def transitions(self, year: int) -> Tuple[datetime.datetime, datetime.datetime]: ... def __eq__(self, other): ... class tzstr(tzrange): hasdst = ... # type: bool def __init__(self, s: Union[bytes, _FileObj], posix_offset: bool = ...) -> None: ... class tzical: def __init__(self, fileobj: _FileObj) -> None: ... def keys(self): ... def get(self, tzid: Optional[Any] = ...): ... TZFILES = ... # type: List[str] TZPATHS = ... # type: List[str] def gettz(name: Optional[Text] = ...) -> Optional[datetime.tzinfo]: ... def datetime_exists(dt: datetime.datetime, tz: Optional[datetime.tzinfo] = ...) -> bool: ... def datetime_ambiguous(dt: datetime.datetime, tz: Optional[datetime.tzinfo] = ...) -> bool: ... mypy-0.560/typeshed/third_party/3/docutils/0000755€tŠÔÚ€2›s®0000000000013215007244025040 5ustar jukkaDROPBOX\Domain Users00000000000000mypy-0.560/typeshed/third_party/3/docutils/__init__.pyi0000644€tŠÔÚ€2›s®0000000000413215007212027307 0ustar jukkaDROPBOX\Domain Users00000000000000... mypy-0.560/typeshed/third_party/3/docutils/examples.pyi0000644€tŠÔÚ€2›s®0000000006613215007212027376 0ustar jukkaDROPBOX\Domain Users00000000000000from typing import Any html_parts = ... # type: Any mypy-0.560/typeshed/third_party/3/docutils/nodes.pyi0000644€tŠÔÚ€2›s®0000000034213215007212026665 0ustar jukkaDROPBOX\Domain Users00000000000000from typing import Any, List class reference: def __init__(self, rawsource: str = ..., text: str = ..., *children: List[Any], **attributes) -> None: ... mypy-0.560/typeshed/third_party/3/docutils/parsers/0000755€tŠÔÚ€2›s®0000000000013215007244026517 5ustar jukkaDROPBOX\Domain Users00000000000000mypy-0.560/typeshed/third_party/3/docutils/parsers/__init__.pyi0000644€tŠÔÚ€2›s®0000000000413215007212030766 0ustar jukkaDROPBOX\Domain Users00000000000000... mypy-0.560/typeshed/third_party/3/docutils/parsers/rst/0000755€tŠÔÚ€2›s®0000000000013215007244027327 5ustar jukkaDROPBOX\Domain Users00000000000000mypy-0.560/typeshed/third_party/3/docutils/parsers/rst/__init__.pyi0000644€tŠÔÚ€2›s®0000000000013215007212031572 0ustar jukkaDROPBOX\Domain Users00000000000000mypy-0.560/typeshed/third_party/3/docutils/parsers/rst/nodes.pyi0000644€tŠÔÚ€2›s®0000000000413215007212031147 0ustar jukkaDROPBOX\Domain Users00000000000000... mypy-0.560/typeshed/third_party/3/docutils/parsers/rst/roles.pyi0000644€tŠÔÚ€2›s®0000000063713215007212031177 0ustar jukkaDROPBOX\Domain Users00000000000000import docutils.nodes import docutils.parsers.rst.states from typing import Callable, Any, List, Dict, Tuple def register_local_role(name: str, role_fn: Callable[[str, str, str, int, docutils.parsers.rst.states.Inliner, Dict, List], Tuple[List[docutils.nodes.reference], List[docutils.nodes.reference]]] ) -> None: ... mypy-0.560/typeshed/third_party/3/docutils/parsers/rst/states.pyi0000644€tŠÔÚ€2›s®0000000011213215007212031342 0ustar jukkaDROPBOX\Domain Users00000000000000import typing class Inliner: def __init__(self) -> None: ... mypy-0.560/typeshed/third_party/3/enum.pyi0000644€tŠÔÚ€2›s®0000000466713215007212024711 0ustar jukkaDROPBOX\Domain Users00000000000000import sys from typing import List, Any, TypeVar, Union, Iterable, Iterator, TypeVar, Generic, Type, Sized, Reversible, Container, Mapping from abc import ABCMeta _T = TypeVar('_T') _S = TypeVar('_S', bound=Type[Enum]) # Note: EnumMeta actually subclasses type directly, not ABCMeta. # This is a temporary workaround to allow multiple creation of enums with builtins # such as str as mixins, which due to the handling of ABCs of builtin types, cause # spurious inconsistent metaclass structure. See #1595. class EnumMeta(ABCMeta, Iterable[Enum], Sized, Reversible[Enum], Container[Enum]): def __iter__(self: Type[_T]) -> Iterator[_T]: ... def __reversed__(self: Type[_T]) -> Iterator[_T]: ... def __contains__(self, member: Any) -> bool: ... def __getitem__(self: Type[_T], name: str) -> _T: ... @property def __members__(self: Type[_T]) -> Mapping[str, _T]: ... class Enum(metaclass=EnumMeta): def __new__(cls: Type[_T], value: Any) -> _T: ... def __repr__(self) -> str: ... def __str__(self) -> str: ... def __dir__(self) -> List[str]: ... def __format__(self, format_spec: str) -> str: ... def __hash__(self) -> Any: ... def __reduce_ex__(self, proto: Any) -> Any: ... name = ... # type: str value = ... # type: Any class IntEnum(int, Enum): value = ... # type: int def unique(enumeration: _S) -> _S: ... if sys.version_info >= (3, 6): _auto_null = ... # type: Any # subclassing IntFlag so it picks up all implemented base functions, best modeling behavior of enum.auto() class auto(IntFlag): value = ... # type: Any class Flag(Enum): def __contains__(self: _T, other: _T) -> bool: ... def __repr__(self) -> str: ... def __str__(self) -> str: ... def __bool__(self) -> bool: ... def __or__(self: _T, other: _T) -> _T: ... def __and__(self: _T, other: _T) -> _T: ... def __xor__(self: _T, other: _T) -> _T: ... def __invert__(self: _T) -> _T: ... # The `type: ignore` comment is needed because mypy considers the type # signatures of several methods defined in int and Flag to be incompatible. class IntFlag(int, Flag): # type: ignore def __or__(self: _T, other: Union[int, _T]) -> _T: ... def __and__(self: _T, other: Union[int, _T]) -> _T: ... def __xor__(self: _T, other: Union[int, _T]) -> _T: ... __ror__ = __or__ __rand__ = __and__ __rxor__ = __xor__ mypy-0.560/typeshed/third_party/3/itsdangerous.pyi0000644€tŠÔÚ€2›s®0000002003313215007212026435 0ustar jukkaDROPBOX\Domain Users00000000000000from datetime import datetime from typing import Any, Callable, IO, MutableMapping, Optional, Text, Tuple, TypeVar, Union PY2 = ... # type: bool text_type = str int_to_byte = Callable[[int], bytes] number_types = (int, float) izip = zip _bytes_like = Union[bytearray, bytes] _str_like = Union[str, bytes] _can_become_bytes = Union[str, bytes, bytearray] _comparable_bytes = TypeVar('_comparable_bytes', str, _bytes_like) _serializer = Any # must be an object that has "dumps" and "loads" attributes (e.g. the json module) class _CompactJSON: def loads(self, payload: Text) -> Any: ... def dumps(self, obj: Any) -> Text: ... compact_json = _CompactJSON EPOCH = ... # type: int def want_bytes(s: _can_become_bytes, encoding: str='', errors: str='') -> bytes: ... def is_text_serializer(serializer: _serializer) -> bool: ... def constant_time_compare(val1: _comparable_bytes, val2: _comparable_bytes) -> bool: ... class BadData(Exception): message = ... # type: str def __init__(self, message: str) -> None: ... class BadPayload(BadData): original_error = ... # type: Optional[Exception] def __init__(self, message: str, original_error: Optional[Exception] = ...) -> None: ... class BadSignature(BadData): payload = ... # type: Optional[Any] def __init__(self, message: str, payload: Optional[Any] = ...) -> None: ... class BadTimeSignature(BadSignature): date_signed = ... # type: Optional[int] def __init__(self, message: str, payload: Optional[Any] = ..., date_signed: Optional[int] = ...) -> None: ... class BadHeader(BadSignature): header = ... # type: Any original_error = ... # type: Any def __init__(self, message, payload=None, header=None, original_error=None) -> None: ... class SignatureExpired(BadTimeSignature): ... def base64_encode(string: _can_become_bytes) -> bytes: ... def base64_decode(string: _can_become_bytes) -> bytes: ... def int_to_bytes(num: int) -> bytes: ... def bytes_to_int(bytestr: _can_become_bytes) -> bytes: ... class SigningAlgorithm: def get_signature(self, key: _bytes_like, value: _bytes_like) -> bytes: ... def verify_signature(self, key: _bytes_like, value: _bytes_like, sig: _can_become_bytes) -> bool: ... class NoneAlgorithm(SigningAlgorithm): def get_signature(self, key: _bytes_like, value: _bytes_like) -> bytes: ... class HMACAlgorithm(SigningAlgorithm): default_digest_method = ... # type: Callable digest_method = ... # type: Callable def __init__(self, digest_method: Optional[Callable] = ...) -> None: ... def get_signature(self, key: _bytes_like, value: _bytes_like) -> bytes: ... class Signer: default_digest_method = ... # type: Callable default_key_derivation = ... # type: str secret_key = ... # type: _can_become_bytes sep = ... # type: _can_become_bytes salt = ... # type: _can_become_bytes key_derivation = ... # type: str digest_method = ... # type: Callable algorithm = ... # type: SigningAlgorithm def __init__(self, secret_key: _can_become_bytes, salt: Optional[_can_become_bytes] = ..., sep: Optional[_can_become_bytes]='', key_derivation: Optional[str] = ..., digest_method: Optional[Callable] = ..., algorithm: Optional[SigningAlgorithm] = ...) -> None: ... def derive_key(self) -> bytes: ... def get_signature(self, value: _bytes_like) -> bytes: ... def sign(self, value: _bytes_like) -> bytes: ... def verify_signature(self, value: _bytes_like, sig: _can_become_bytes) -> bool: ... def unsign(self, signed_value: _can_become_bytes) -> str: ... def validate(self, signed_value: _can_become_bytes) -> bool: ... class TimestampSigner(Signer): def get_timestamp(self) -> int: ... def timestamp_to_datetime(self, ts: int) -> datetime: ... def sign(self, value: _bytes_like) -> bytes: ... def unsign(self, value: _can_become_bytes, max_age: Optional[int] = ..., return_timestamp: bool = ...) -> Any: ... def validate(self, signed_value: _can_become_bytes, max_age: Optional[int] = ...) -> bool: ... class Serializer: default_serializer = ... # type: _serializer default_signer = ... # type: Callable[..., Signer] secret_key = ... # type: Any salt = ... # type: _can_become_bytes serializer = ... # type: _serializer is_text_serializer = ... # type: bool signer = ... # type: Signer signer_kwargs = ... # type: MutableMapping def __init__(self, secret_key: _can_become_bytes, salt: Optional[_can_become_bytes]=b'', serializer: _serializer=None, signer: Optional[Callable[..., Signer]] = ..., signer_kwargs: Optional[MutableMapping] = ...) -> None: ... def load_payload(self, payload: Any, serializer: _serializer=None) -> Any: ... def dump_payload(self, *args, **kwargs) -> bytes: ... def make_signer(self, salt: Optional[_can_become_bytes] = ...) -> Signer: ... def dumps(self, obj: Any, salt: Optional[_can_become_bytes] = ...) -> _str_like: ... def dump(self, obj: Any, f: IO, salt: Optional[_can_become_bytes] = ...) -> None: ... def loads(self, s: _can_become_bytes, salt: Optional[_can_become_bytes] = ...) -> Any: ... def load(self, f: IO, salt: Optional[_can_become_bytes] = ...): ... def loads_unsafe(self, s: _can_become_bytes, salt: Optional[_can_become_bytes] = ...) -> Tuple[bool, Any]: ... def load_unsafe(self, f: IO, *args, **kwargs) -> Tuple[bool, Any]: ... class TimedSerializer(Serializer): default_signer = ... # type: Callable[..., TimestampSigner] def loads(self, s: _can_become_bytes, salt: Optional[_can_become_bytes] = ..., max_age: Optional[int] = ..., return_timestamp: bool = ...) -> Any: ... def loads_unsafe(self, s: _can_become_bytes, salt: Optional[_can_become_bytes] = ..., max_age: Optional[int] = ...) -> Tuple[bool, Any]: ... class JSONWebSignatureSerializer(Serializer): jws_algorithms = ... # type: MutableMapping[str, SigningAlgorithm] default_algorithm = ... # type: str default_serializer = ... # type: Any algorithm_name = ... # type: str algorithm = ... # type: Any def __init__(self, secret_key: _can_become_bytes, salt: Optional[_can_become_bytes] = ..., serializer: _serializer=None, signer: Optional[Callable[..., Signer]] = ..., signer_kwargs: Optional[MutableMapping] = ..., algorithm_name: Optional[str] = ...) -> None: ... def load_payload(self, payload: Any, serializer: _serializer = None, return_header: bool = ...) -> Any: ... def dump_payload(self, *args, **kwargs) -> bytes: ... def make_algorithm(self, algorithm_name: str) -> SigningAlgorithm: ... def make_signer(self, salt: Optional[_can_become_bytes] = ..., algorithm_name: Optional[str] = ...) -> Signer: ... def make_header(self, header_fields=Optional[MutableMapping]) -> MutableMapping: ... def dumps(self, obj: Any, salt: Optional[_can_become_bytes] = ..., header_fields: Optional[MutableMapping]=...) -> str: ... def loads(self, s: _can_become_bytes, salt: Optional[_can_become_bytes] = ..., return_header: bool = ...) -> Any: ... def loads_unsafe(self, s: _can_become_bytes, salt: Optional[_can_become_bytes] = ..., return_header: bool = ...) -> Tuple[bool, Any]: ... class TimedJSONWebSignatureSerializer(JSONWebSignatureSerializer): DEFAULT_EXPIRES_IN = ... # type: int expires_in = ... # type: int def __init__(self, secret_key: _can_become_bytes, expires_in: Optional[int] = ..., **kwargs) -> None: ... def make_header(self, header_fields=Optional[MutableMapping]) -> MutableMapping: ... def loads(self, s: _can_become_bytes, salt: Optional[_can_become_bytes] = ..., return_header: bool = ...) -> Any: ... def get_issue_date(self, header: MutableMapping) -> Optional[datetime]: ... def now(self) -> int: ... class URLSafeSerializerMixin: def load_payload(self, payload: Any, serializer: Any = ..., **kwargs) -> Any: ... def dump_payload(self, *args, **kwargs) -> bytes: ... class URLSafeSerializer(URLSafeSerializerMixin, Serializer): default_serializer = ... # type: Any class URLSafeTimedSerializer(URLSafeSerializerMixin, TimedSerializer): default_serializer = ... # type: Any mypy-0.560/typeshed/third_party/3/jwt/0000755€tŠÔÚ€2›s®0000000000013215007244024016 5ustar jukkaDROPBOX\Domain Users00000000000000mypy-0.560/typeshed/third_party/3/jwt/__init__.pyi0000644€tŠÔÚ€2›s®0000000306013215007212026272 0ustar jukkaDROPBOX\Domain Users00000000000000from typing import Mapping, Any, Optional, Union from . import algorithms def decode(jwt: Union[str, bytes], key: Union[str, bytes] = ..., verify: bool = ..., algorithms: Optional[Any] = ..., options: Optional[Mapping[Any, Any]] = ..., **kwargs: Any) -> Mapping[str, Any]: ... def encode(payload: Mapping[str, Any], key: Union[str, bytes], algorithm: str = ..., headers: Optional[Mapping[str, Any]] = ..., json_encoder: Optional[Any] = ...) -> bytes: ... def register_algorithm(alg_id: str, alg_obj: algorithms.Algorithm) -> None: ... def unregister_algorithm(alg_id: str) -> None: ... class InvalidTokenError(Exception): pass class DecodeError(InvalidTokenError): pass class ExpiredSignatureError(InvalidTokenError): pass class InvalidAudienceError(InvalidTokenError): pass class InvalidIssuerError(InvalidTokenError): pass class InvalidIssuedAtError(InvalidTokenError): pass class ImmatureSignatureError(InvalidTokenError): pass class InvalidKeyError(Exception): pass class InvalidAlgorithmError(InvalidTokenError): pass class MissingRequiredClaimError(InvalidTokenError): ... # Compatibility aliases (deprecated) ExpiredSignature = ExpiredSignatureError InvalidAudience = InvalidAudienceError InvalidIssuer = InvalidIssuerError # These aren't actually documented, but the package # exports them in __init__.py, so we should at least # make sure that mypy doesn't raise spurious errors # if they're used. get_unverified_header = ... # type: Any PyJWT = ... # type: Any PyJWS = ... # type: Any mypy-0.560/typeshed/third_party/3/jwt/algorithms.pyi0000644€tŠÔÚ€2›s®0000000010213215007212026676 0ustar jukkaDROPBOX\Domain Users00000000000000from typing import Any class Algorithm(Any): ... # type: ignore mypy-0.560/typeshed/third_party/3/jwt/contrib/0000755€tŠÔÚ€2›s®0000000000013215007244025456 5ustar jukkaDROPBOX\Domain Users00000000000000mypy-0.560/typeshed/third_party/3/jwt/contrib/__init__.pyi0000644€tŠÔÚ€2›s®0000000000013215007212027721 0ustar jukkaDROPBOX\Domain Users00000000000000mypy-0.560/typeshed/third_party/3/jwt/contrib/algorithms/0000755€tŠÔÚ€2›s®0000000000013215007244027627 5ustar jukkaDROPBOX\Domain Users00000000000000mypy-0.560/typeshed/third_party/3/jwt/contrib/algorithms/__init__.pyi0000644€tŠÔÚ€2›s®0000000004613215007212032104 0ustar jukkaDROPBOX\Domain Users00000000000000from hashlib import _Hash as _HashAlg mypy-0.560/typeshed/third_party/3/jwt/contrib/algorithms/py_ecdsa.pyi0000644€tŠÔÚ€2›s®0000000042413215007212032134 0ustar jukkaDROPBOX\Domain Users00000000000000from typing import Any from jwt.algorithms import Algorithm from . import _HashAlg class ECAlgorithm(Algorithm): SHA256 = ... # type: _HashAlg SHA384 = ... # type: _HashAlg SHA512 = ... # type: _HashAlg def __init__(self, hash_alg: _HashAlg) -> None: ... mypy-0.560/typeshed/third_party/3/jwt/contrib/algorithms/pycrypto.pyi0000644€tŠÔÚ€2›s®0000000042513215007212032237 0ustar jukkaDROPBOX\Domain Users00000000000000from typing import Any from jwt.algorithms import Algorithm from . import _HashAlg class RSAAlgorithm(Algorithm): SHA256 = ... # type: _HashAlg SHA384 = ... # type: _HashAlg SHA512 = ... # type: _HashAlg def __init__(self, hash_alg: _HashAlg) -> None: ... mypy-0.560/typeshed/third_party/3/pkg_resources.pyi0000644€tŠÔÚ€2›s®0000003117213215007212026607 0ustar jukkaDROPBOX\Domain Users00000000000000# Stubs for pkg_resources (Python 3.4) from typing import ( Any, Callable, Dict, IO, Iterable, Generator, Optional, Sequence, Tuple, List, Union, TypeVar, overload, ) import importlib.abc import sys import types import zipimport _T = TypeVar('_T') _NestedStr = Union[str, Iterable[Union[str, Iterable[Any]]]] _InstallerType = Callable[[Requirement], Optional[Distribution]] _EPDistType = Union[Distribution, Requirement, str] _MetadataType = Optional[IResourceProvider] _PkgReqType = Union[str, Requirement] _DistFinderType = Callable[[str, _Importer, bool], Generator[Distribution, None, None]] _NSHandlerType = Callable[[_Importer, str, str, types.ModuleType], str] def declare_namespace(name: str) -> None: ... def fixup_namespace_packages(path_item: str) -> None: ... class WorkingSet: entries = ... # type: List[str] def __init__(self, entries: Optional[Iterable[str]] = ...) -> None: ... def require(self, *requirements: _NestedStr) -> Sequence[Distribution]: ... def run_script(self, requires: str, script_name: str) -> None: ... def iter_entry_points(self, group: str, name: Optional[str] = ...) \ -> Generator[EntryPoint, None, None]: ... def add_entry(self, entry: str) -> None: ... def __contains__(self, dist: Distribution) -> bool: ... def __iter__(self) -> Generator[Distribution, None, None]: ... def find(self, req: Requirement) -> Optional[Distribution]: ... def resolve(self, requirements: Sequence[Requirement], env: Optional[Environment] = ..., installer: Optional[_InstallerType] = ...) \ -> List[Distribution]: ... def add(self, dist: Distribution, entry: Optional[str] = ..., insert: bool = ..., replace: bool = ...) -> None: ... def subscribe(self, callback: Callable[[Distribution], None]) -> None: ... def find_plugins(self, plugin_env: Environment, full_env: Optional[Environment] = ..., fallback: bool = ...) \ -> Tuple[List[Distribution], Dict[Distribution, Exception]]: ... working_set = ... # type: WorkingSet def require( *requirements: Union[str, Sequence[str]] ) -> Sequence[Distribution]: ... def run_script(requires: str, script_name: str) -> None: ... def iter_entry_points( group: str, name: Optional[str] = ... ) -> Generator[EntryPoint, None, None]: ... def add_activation_listener( callback: Callable[[Distribution], None] ) -> None: ... class Environment: def __init__(self, search_path: Optional[Sequence[str]] = ..., platform: Optional[str] = ..., python: Optional[str] = ...) -> None: ... def __getitem__(self, project_name: str) -> List[Distribution]: ... def __iter__(self) -> Generator[str, None, None]: ... def add(self, dist: Distribution) -> None: ... def remove(self, dist: Distribution) -> None: ... def can_add(self, dist: Distribution) -> bool: ... def __add__(self, other: Union[Distribution, Environment]) -> Environment: ... def __iadd__(self, other: Union[Distribution, Environment]) -> Environment: ... @overload def best_match(self, req: Requirement, working_set: WorkingSet) -> Distribution: ... @overload def best_match(self, req: Requirement, working_set: WorkingSet, installer: Callable[[Requirement], _T] = ...) -> _T: ... @overload def obtain(self, requirement: Requirement) -> None: ... @overload def obtain(self, requirement: Requirement, installer: Callable[[Requirement], _T] = ...) -> _T: ... def scan(self, search_path: Optional[Sequence[str]] = ...) -> None: ... def parse_requirements(strs: Union[str, Iterable[str]]) -> Generator[Requirement, None, None]: ... class Requirement: project_name = ... # type: str key = ... # type: str extras = ... # type: Tuple[str, ...] specs = ... # type: List[Tuple[str, str]] @staticmethod def parse(s: Union[str, Iterable[str]]) -> Requirement: ... def __contains__(self, item: Union[Distribution, str, Tuple[str, ...]]) \ -> bool: ... def __eq__(self, other_requirement: Any) -> bool: ... def load_entry_point(dist: _EPDistType, group: str, name: str) -> None: ... def get_entry_info(dist: _EPDistType, group: str, name: str) -> Optional[EntryPoint]: ... @overload def get_entry_map(dist: _EPDistType) -> Dict[str, Dict[str, EntryPoint]]: ... @overload def get_entry_map(dist: _EPDistType, group: str = ...) -> Dict[str, EntryPoint]: ... class EntryPoint: name = ... # type: str module_name = ... # type: str attrs = ... # type: Tuple[str, ...] extras = ... # type: Tuple[str, ...] dist = ... # type: Optional[Distribution] def __init__(self, name: str, module_name: str, attrs: Tuple[str, ...] = ..., extras: Tuple[str, ...] = ..., dist: Optional[Distribution] = ...) -> None: ... @classmethod def parse(cls, src: str, dist: Optional[Distribution] = ...) -> EntryPoint: ... @classmethod def parse_group(cls, group: str, lines: Union[str, Sequence[str]], dist: Optional[Distribution] = ...) -> Dict[str, EntryPoint]: ... @classmethod def parse_map(cls, data: Union[Dict[str, Union[str, Sequence[str]]], str, Sequence[str]], dist: Optional[Distribution] = ...) -> Dict[str, EntryPoint]: ... def load(self, require: bool = ..., env: Optional[Environment] = ..., installer: Optional[_InstallerType] = ...) -> Any: ... def require(self, env: Optional[Environment] = ..., installer: Optional[_InstallerType] = ...) -> None: ... def find_distributions( path_item: str, only: bool = ... ) -> Generator[Distribution, None, None]: ... def get_distribution(dist: Union[Requirement, str, Distribution]) -> Distribution: ... class Distribution(IResourceProvider, IMetadataProvider): location = ... # type: str project_name = ... # type: str key = ... # type: str extras = ... # type: List[str] version = ... # type: str parsed_version = ... # type: Tuple[str, ...] py_version = ... # type: str platform = ... # type: Optional[str] precedence = ... # type: int def __init__(self, location: Optional[str] = ..., metadata: Optional[str] = ..., project_name: Optional[str] = ..., version: Optional[str] = ..., py_version: str = ..., platform: Optional[str] = ..., precedence: int = ...) -> None: ... @classmethod def from_location(cls, location: str, basename: str, metadata: Optional[str] = ..., **kw: Union[str, None, int]) -> Distribution: ... @classmethod def from_filename(cls, filename: str, metadata: Optional[str] = ..., **kw: Union[str, None, int]) -> Distribution: ... def activate(self, path: Optional[List[str]] = ...) -> None: ... def as_requirement(self) -> Requirement: ... def requires(self, extras: Tuple[str, ...] = ...) -> List[Requirement]: ... def clone(self, **kw: Union[str, int, None]) -> Requirement: ... def egg_name(self) -> str: ... def __cmp__(self, other: Any) -> bool: ... def get_entry_info(dist: _EPDistType, group: str, name: str) -> Optional[EntryPoint]: ... @overload def get_entry_map(dist: _EPDistType) \ -> Dict[str, Dict[str, EntryPoint]]: ... @overload def get_entry_map(dist: _EPDistType, group: str = ...) \ -> Dict[str, EntryPoint]: ... def load_entry_point(dist: _EPDistType, group: str, name: str) -> None: ... EGG_DIST = ... # type: int BINARY_DIST = ... # type: int SOURCE_DIST = ... # type: int CHECKOUT_DIST = ... # type: int DEVELOP_DIST = ... # type: int def resource_exists(package_or_requirement: _PkgReqType, resource_name: str) -> bool: ... def resource_stream(package_or_requirement: _PkgReqType, resource_name: str) -> IO[bytes]: ... def resource_string(package_or_requirement: _PkgReqType, resource_name: str) -> bytes: ... def resource_isdir(package_or_requirement: _PkgReqType, resource_name: str) -> bool: ... def resource_listdir(package_or_requirement: _PkgReqType, resource_name: str) -> List[str]: ... def resource_filename(package_or_requirement: _PkgReqType, resource_name: str) -> str: ... def set_extraction_path(path: str) -> None: ... def cleanup_resources(force: bool = ...) -> List[str]: ... class IResourceManager: def resource_exists(self, package_or_requirement: _PkgReqType, resource_name: str) -> bool: ... def resource_stream(self, package_or_requirement: _PkgReqType, resource_name: str) -> IO[bytes]: ... def resource_string(self, package_or_requirement: _PkgReqType, resource_name: str) -> bytes: ... def resource_isdir(self, package_or_requirement: _PkgReqType, resource_name: str) -> bool: ... def resource_listdir(self, package_or_requirement: _PkgReqType, resource_name: str) -> List[str]: ... def resource_filename(self, package_or_requirement: _PkgReqType, resource_name: str) -> str: ... def set_extraction_path(self, path: str) -> None: ... def cleanup_resources(self, force: bool = ...) -> List[str]: ... def get_cache_path(self, archive_name: str, names: Tuple[str, ...] = ...) -> str: ... def extraction_error(self) -> None: ... def postprocess(self, tempname: str, filename: str) -> None: ... @overload def get_provider(package_or_requirement: str) -> IResourceProvider: ... @overload def get_provider(package_or_requirement: Requirement) -> Distribution: ... class IMetadataProvider: def has_metadata(self, name: str) -> bool: ... def metadata_isdir(self, name: str) -> bool: ... def metadata_listdir(self, name: str) -> List[str]: ... def get_metadata(self, name: str) -> str: ... def get_metadata_lines(self, name: str) -> Generator[List[str], None, None]: ... def run_script(self, script_name: str, namespace: Dict[str, Any]) -> None: ... class ResolutionError(Exception): ... class DistributionNotFound(ResolutionError): ... class VersionConflict(ResolutionError): ... class UnknownExtra(ResolutionError): ... class ExtractionError(Exception): manager = ... # type: IResourceManager cache_path = ... # type: str original_error = ... # type: Exception if sys.version_info >= (3, 3): class _Importer(importlib.abc.MetaPathFinder, importlib.abc.InspectLoader): ... else: class _Importer(importlib.abc.InspectLoader): ... def register_finder(importer_type: type, distribution_finder: _DistFinderType) -> None: ... def register_loader_type( loader_type: type, provider_factory: Callable[[types.ModuleType], IResourceProvider] ) -> None: ... def register_namespace_handler(importer_type: type, namespace_handler: _NSHandlerType) -> None: ... class IResourceProvider(IMetadataProvider): ... class NullProvider: ... class EggProvider(NullProvider): ... class DefaultProvider(EggProvider): ... class PathMetadata(DefaultProvider, IResourceProvider): def __init__(self, path: str, egg_info: str) -> None: ... class ZipProvider(EggProvider): ... class EggMetadata(ZipProvider, IResourceProvider): def __init__(self, zipimporter: zipimport.zipimporter) -> None: ... class EmptyProvider(NullProvider): ... empty_provider = ... # type: EmptyProvider class FileMetadata(EmptyProvider, IResourceProvider): def __init__(self, path_to_pkg_info: str) -> None: ... def parse_version(v: str) -> Tuple[str, ...]: ... def yield_lines(strs: _NestedStr) -> Generator[str, None, None]: ... def split_sections( strs: _NestedStr ) -> Generator[Tuple[Optional[str], str], None, None]: ... def safe_name(name: str) -> str: ... def safe_version(version: str) -> str: ... def safe_extra(extra: str) -> str: ... def to_filename(name_or_version: str) -> str: ... def get_build_platform() -> str: ... def get_platform() -> str: ... def get_supported_platform() -> str: ... def compatible_platforms(provided: Optional[str], required: Optional[str]) -> bool: ... def get_default_cache() -> str: ... def get_importer(path_item: str) -> _Importer: ... def ensure_directory(path: str) -> None: ... def normalize_path(filename: str) -> str: ... mypy-0.560/typeshed/third_party/3/six/0000755€tŠÔÚ€2›s®0000000000013215007244024015 5ustar jukkaDROPBOX\Domain Users00000000000000mypy-0.560/typeshed/third_party/3/six/__init__.pyi0000644€tŠÔÚ€2›s®0000000630713215007212026300 0ustar jukkaDROPBOX\Domain Users00000000000000# Stubs for six (Python 3.5) from __future__ import print_function from typing import ( Any, AnyStr, Callable, Dict, ItemsView, Iterable, KeysView, Mapping, Optional, Pattern, Tuple, Type, TypeVar, Union, ValuesView, overload, ) import types import typing import unittest from mypy_extensions import NoReturn # Exports from io import StringIO as StringIO, BytesIO as BytesIO from builtins import next as next from functools import wraps as wraps from . import moves _T = TypeVar('_T') _K = TypeVar('_K') _V = TypeVar('_V') # TODO make constant, then move this stub to 2and3 # https://github.com/python/typeshed/issues/17 PY2 = False PY3 = True PY34 = ... # type: bool string_types = str, integer_types = int, class_types = type, text_type = str binary_type = bytes MAXSIZE = ... # type: int # def add_move # def remove_move def callable(obj: object) -> bool: ... def get_unbound_function(unbound: types.FunctionType) -> types.FunctionType: ... def create_bound_method(func: types.FunctionType, obj: object) -> types.MethodType: ... def create_unbound_method(func: types.FunctionType, cls: type) -> types.FunctionType: ... Iterator = object def get_method_function(meth: types.MethodType) -> types.FunctionType: ... def get_method_self(meth: types.MethodType) -> Optional[object]: ... def get_function_closure(fun: types.FunctionType) -> Optional[Tuple[types._Cell, ...]]: ... def get_function_code(fun: types.FunctionType) -> types.CodeType: ... def get_function_defaults(fun: types.FunctionType) -> Optional[Tuple[Any, ...]]: ... def get_function_globals(fun: types.FunctionType) -> Dict[str, Any]: ... def iterkeys(d: Mapping[_K, _V]) -> typing.Iterator[_K]: ... def itervalues(d: Mapping[_K, _V]) -> typing.Iterator[_V]: ... def iteritems(d: Mapping[_K, _V]) -> typing.Iterator[Tuple[_K, _V]]: ... # def iterlists def viewkeys(d: Mapping[_K, _V]) -> KeysView[_K]: ... def viewvalues(d: Mapping[_K, _V]) -> ValuesView[_V]: ... def viewitems(d: Mapping[_K, _V]) -> ItemsView[_K, _V]: ... def b(s: str) -> binary_type: ... def u(s: str) -> text_type: ... unichr = chr def int2byte(i: int) -> bytes: ... def byte2int(bs: binary_type) -> int: ... def indexbytes(buf: binary_type, i: int) -> int: ... def iterbytes(buf: binary_type) -> typing.Iterator[int]: ... def assertCountEqual(self: unittest.TestCase, first: Iterable[_T], second: Iterable[_T], msg: Optional[str] = ...) -> None: ... @overload def assertRaisesRegex(self: unittest.TestCase, msg: Optional[str] = ...) -> Any: ... @overload def assertRaisesRegex(self: unittest.TestCase, callable_obj: Callable[..., Any], *args: Any, **kwargs: Any) -> Any: ... def assertRegex(self: unittest.TestCase, text: AnyStr, expected_regex: Union[AnyStr, Pattern[AnyStr]], msg: Optional[str] = ...) -> None: ... exec_ = exec def reraise(tp: Optional[Type[BaseException]], value: Optional[BaseException], tb: Optional[types.TracebackType] = ...) -> NoReturn: ... def raise_from(value: BaseException, from_value: Optional[BaseException]) -> NoReturn: ... print_ = print def with_metaclass(meta: type, *bases: type) -> type: ... def add_metaclass(metaclass: type) -> Callable[[_T], _T]: ... def python_2_unicode_compatible(klass: _T) -> _T: ... mypy-0.560/typeshed/third_party/3/six/moves/0000755€tŠÔÚ€2›s®0000000000013215007244025146 5ustar jukkaDROPBOX\Domain Users00000000000000mypy-0.560/typeshed/third_party/3/six/moves/__init__.pyi0000644€tŠÔÚ€2›s®0000000522013215007212027422 0ustar jukkaDROPBOX\Domain Users00000000000000# Stubs for six.moves # # Note: Commented out items means they weren't implemented at the time. # Uncomment them when the modules have been added to the typeshed. import sys from io import StringIO as cStringIO from builtins import filter as filter from itertools import filterfalse as filterfalse from builtins import input as input from sys import intern as intern from builtins import map as map from os import getcwd as getcwd from os import getcwdb as getcwdb from builtins import range as range from functools import reduce as reduce from shlex import quote as shlex_quote from io import StringIO as StringIO from collections import UserDict as UserDict from collections import UserList as UserList from collections import UserString as UserString from builtins import range as xrange from builtins import zip as zip from itertools import zip_longest as zip_longest import builtins as builtins import configparser as configparser # import copyreg as copyreg # import dbm.gnu as dbm_gnu import _dummy_thread as _dummy_thread import http.cookiejar as http_cookiejar import http.cookies as http_cookies import html.entities as html_entities import html.parser as html_parser import http.client as http_client import email.mime.multipart as email_mime_multipart import email.mime.nonmultipart as email_mime_nonmultipart import email.mime.text as email_mime_text import email.mime.base as email_mime_base import http.server as BaseHTTPServer import http.server as CGIHTTPServer import http.server as SimpleHTTPServer import pickle as cPickle import queue as queue # import reprlib as reprlib import socketserver as socketserver import _thread as _thread import tkinter as tkinter # import tkinter.dialog as tkinter_dialog # import tkinter.filedialog as tkinter_filedialog # import tkinter.scrolledtext as tkinter_scrolledtext # import tkinter.simpledialog as tkinter_simpledialog # import tkinter.tix as tkinter_tix import tkinter.ttk as tkinter_ttk import tkinter.constants as tkinter_constants # import tkinter.dnd as tkinter_dnd # import tkinter.colorchooser as tkinter_colorchooser # import tkinter.commondialog as tkinter_commondialog # import tkinter.filedialog as tkinter_tkfiledialog # import tkinter.font as tkinter_font # import tkinter.messagebox as tkinter_messagebox # import tkinter.simpledialog as tkinter_tksimpledialog import urllib.parse as urllib_parse import urllib.error as urllib_error import six.moves.urllib as urllib import urllib.robotparser as urllib_robotparser # import xmlrpc.client as xmlrpc_client # import xmlrpc.server as xmlrpc_server if sys.version_info >= (3, 4): from importlib import reload as reload_module else: from imp import reload as reload_module mypy-0.560/typeshed/third_party/3/six/moves/urllib/0000755€tŠÔÚ€2›s®0000000000013215007244026437 5ustar jukkaDROPBOX\Domain Users00000000000000mypy-0.560/typeshed/third_party/3/six/moves/urllib/__init__.pyi0000644€tŠÔÚ€2›s®0000000033113215007212030711 0ustar jukkaDROPBOX\Domain Users00000000000000import six.moves.urllib.error as error import six.moves.urllib.parse as parse import six.moves.urllib.request as request import six.moves.urllib.response as response import six.moves.urllib.robotparser as robotparser mypy-0.560/typeshed/third_party/3/six/moves/urllib/error.pyi0000644€tŠÔÚ€2›s®0000000024413215007212030306 0ustar jukkaDROPBOX\Domain Users00000000000000from urllib.error import URLError as URLError from urllib.error import HTTPError as HTTPError from urllib.error import ContentTooShortError as ContentTooShortError mypy-0.560/typeshed/third_party/3/six/moves/urllib/parse.pyi0000644€tŠÔÚ€2›s®0000000243613215007212030274 0ustar jukkaDROPBOX\Domain Users00000000000000# Stubs for six.moves.urllib.parse # # Note: Commented out items means they weren't implemented at the time. # Uncomment them when the modules have been added to the typeshed. from urllib.parse import ParseResult as ParseResult from urllib.parse import SplitResult as SplitResult from urllib.parse import parse_qs as parse_qs from urllib.parse import parse_qsl as parse_qsl from urllib.parse import urldefrag as urldefrag from urllib.parse import urljoin as urljoin from urllib.parse import urlparse as urlparse from urllib.parse import urlsplit as urlsplit from urllib.parse import urlunparse as urlunparse from urllib.parse import urlunsplit as urlunsplit from urllib.parse import quote as quote from urllib.parse import quote_plus as quote_plus from urllib.parse import unquote as unquote from urllib.parse import unquote_plus as unquote_plus from urllib.parse import urlencode as urlencode # from urllib.parse import splitquery as splitquery # from urllib.parse import splittag as splittag # from urllib.parse import splituser as splituser from urllib.parse import uses_fragment as uses_fragment from urllib.parse import uses_netloc as uses_netloc from urllib.parse import uses_params as uses_params from urllib.parse import uses_query as uses_query from urllib.parse import uses_relative as uses_relative mypy-0.560/typeshed/third_party/3/six/moves/urllib/request.pyi0000644€tŠÔÚ€2›s®0000000427013215007212030650 0ustar jukkaDROPBOX\Domain Users00000000000000# Stubs for six.moves.urllib.request # # Note: Commented out items means they weren't implemented at the time. # Uncomment them when the modules have been added to the typeshed. from urllib.request import urlopen as urlopen from urllib.request import install_opener as install_opener from urllib.request import build_opener as build_opener from urllib.request import pathname2url as pathname2url from urllib.request import url2pathname as url2pathname from urllib.request import getproxies as getproxies from urllib.request import Request as Request from urllib.request import OpenerDirector as OpenerDirector from urllib.request import HTTPDefaultErrorHandler as HTTPDefaultErrorHandler from urllib.request import HTTPRedirectHandler as HTTPRedirectHandler from urllib.request import HTTPCookieProcessor as HTTPCookieProcessor from urllib.request import ProxyHandler as ProxyHandler from urllib.request import BaseHandler as BaseHandler from urllib.request import HTTPPasswordMgr as HTTPPasswordMgr from urllib.request import HTTPPasswordMgrWithDefaultRealm as HTTPPasswordMgrWithDefaultRealm from urllib.request import AbstractBasicAuthHandler as AbstractBasicAuthHandler from urllib.request import HTTPBasicAuthHandler as HTTPBasicAuthHandler from urllib.request import ProxyBasicAuthHandler as ProxyBasicAuthHandler from urllib.request import AbstractDigestAuthHandler as AbstractDigestAuthHandler from urllib.request import HTTPDigestAuthHandler as HTTPDigestAuthHandler from urllib.request import ProxyDigestAuthHandler as ProxyDigestAuthHandler from urllib.request import HTTPHandler as HTTPHandler from urllib.request import HTTPSHandler as HTTPSHandler from urllib.request import FileHandler as FileHandler from urllib.request import FTPHandler as FTPHandler from urllib.request import CacheFTPHandler as CacheFTPHandler from urllib.request import UnknownHandler as UnknownHandler from urllib.request import HTTPErrorProcessor as HTTPErrorProcessor from urllib.request import urlretrieve as urlretrieve from urllib.request import urlcleanup as urlcleanup from urllib.request import URLopener as URLopener from urllib.request import FancyURLopener as FancyURLopener # from urllib.request import proxy_bypass as proxy_bypass mypy-0.560/typeshed/third_party/3/six/moves/urllib/response.pyi0000644€tŠÔÚ€2›s®0000000060513215007212031014 0ustar jukkaDROPBOX\Domain Users00000000000000# Stubs for six.moves.urllib.response # # Note: Commented out items means they weren't implemented at the time. # Uncomment them when the modules have been added to the typeshed. # from urllib.response import addbase as addbase # from urllib.response import addclosehook as addclosehook # from urllib.response import addinfo as addinfo from urllib.response import addinfourl as addinfourl mypy-0.560/typeshed/third_party/3/six/moves/urllib/robotparser.pyi0000644€tŠÔÚ€2›s®0000000010213215007212031510 0ustar jukkaDROPBOX\Domain Users00000000000000from urllib.robotparser import RobotFileParser as RobotFileParser mypy-0.560/typeshed/third_party/3/typed_ast/0000755€tŠÔÚ€2›s®0000000000013215007244025206 5ustar jukkaDROPBOX\Domain Users00000000000000mypy-0.560/typeshed/third_party/3/typed_ast/__init__.pyi0000644€tŠÔÚ€2›s®0000000017413215007212027465 0ustar jukkaDROPBOX\Domain Users00000000000000# This module is a fork of the CPython 2 and 3 ast modules with PEP 484 support. # See: https://github.com/python/typed_ast mypy-0.560/typeshed/third_party/3/typed_ast/ast27.pyi0000644€tŠÔÚ€2›s®0000002131313215007212026664 0ustar jukkaDROPBOX\Domain Users00000000000000import typing from typing import Any, Optional, Union, Generic, Iterator class NodeVisitor(): def visit(self, node: AST) -> Any: ... def generic_visit(self, node: AST) -> None: ... class NodeTransformer(NodeVisitor): def generic_visit(self, node: AST) -> None: ... def parse(source: Union[str, bytes], filename: Union[str, bytes] = ..., mode: str = ...) -> AST: ... def copy_location(new_node: AST, old_node: AST) -> AST: ... def dump(node: AST, annotate_fields: bool = ..., include_attributes: bool = ...) -> str: ... def fix_missing_locations(node: AST) -> AST: ... def get_docstring(node: AST, clean: bool = ...) -> Optional[bytes]: ... def increment_lineno(node: AST, n: int = ...) -> AST: ... def iter_child_nodes(node: AST) -> Iterator[AST]: ... def iter_fields(node: AST) -> Iterator[typing.Tuple[str, Any]]: ... def literal_eval(node_or_string: Union[str, AST]) -> Any: ... def walk(node: AST) -> Iterator[AST]: ... PyCF_ONLY_AST = ... # type: int # ast classes identifier = str class AST: _attributes = ... # type: typing.Tuple[str, ...] _fields = ... # type: typing.Tuple[str, ...] def __init__(self, *args, **kwargs) -> None: ... class mod(AST): ... class Module(mod): body = ... # type: typing.List[stmt] type_ignores = ... # type: typing.List[TypeIgnore] class Interactive(mod): body = ... # type: typing.List[stmt] class Expression(mod): body = ... # type: expr class FunctionType(mod): argtypes = ... # type: typing.List[expr] returns = ... # type: expr class Suite(mod): body = ... # type: typing.List[stmt] class stmt(AST): lineno = ... # type: int col_offset = ... # type: int class FunctionDef(stmt): name = ... # type: identifier args = ... # type: arguments body = ... # type: typing.List[stmt] decorator_list = ... # type: typing.List[expr] type_comment = ... # type: Optional[str] class ClassDef(stmt): name = ... # type: identifier bases = ... # type: typing.List[expr] body = ... # type: typing.List[stmt] decorator_list = ... # type: typing.List[expr] class Return(stmt): value = ... # type: Optional[expr] class Delete(stmt): targets = ... # type: typing.List[expr] class Assign(stmt): targets = ... # type: typing.List[expr] value = ... # type: expr type_comment = ... # type: Optional[str] class AugAssign(stmt): target = ... # type: expr op = ... # type: operator value = ... # type: expr class Print(stmt): dest = ... # type: Optional[expr] values = ... # type: typing.List[expr] nl = ... # type: bool class For(stmt): target = ... # type: expr iter = ... # type: expr body = ... # type: typing.List[stmt] orelse = ... # type: typing.List[stmt] type_comment = ... # type: Optional[str] class While(stmt): test = ... # type: expr body = ... # type: typing.List[stmt] orelse = ... # type: typing.List[stmt] class If(stmt): test = ... # type: expr body = ... # type: typing.List[stmt] orelse = ... # type: typing.List[stmt] class With(stmt): context_expr = ... # type: expr optional_vars = ... # type: Optional[expr] body = ... # type: typing.List[stmt] type_comment = ... # type: Optional[str] class Raise(stmt): type = ... # type: Optional[expr] inst = ... # type: Optional[expr] tback = ... # type: Optional[expr] class TryExcept(stmt): body = ... # type: typing.List[stmt] handlers = ... # type: typing.List[ExceptHandler] orelse = ... # type: typing.List[stmt] class TryFinally(stmt): body = ... # type: typing.List[stmt] finalbody = ... # type: typing.List[stmt] class Assert(stmt): test = ... # type: expr msg = ... # type: Optional[expr] class Import(stmt): names = ... # type: typing.List[alias] class ImportFrom(stmt): module = ... # type: Optional[identifier] names = ... # type: typing.List[alias] level = ... # type: Optional[int] class Exec(stmt): body = ... # type: expr globals = ... # type: Optional[expr] locals = ... # type: Optional[expr] class Global(stmt): names = ... # type: typing.List[identifier] class Expr(stmt): value = ... # type: expr class Pass(stmt): ... class Break(stmt): ... class Continue(stmt): ... class slice(AST): ... _slice = slice # this lets us type the variable named 'slice' below class Slice(slice): lower = ... # type: Optional[expr] upper = ... # type: Optional[expr] step = ... # type: Optional[expr] class ExtSlice(slice): dims = ... # type: typing.List[slice] class Index(slice): value = ... # type: expr class Ellipsis(slice): ... class expr(AST): lineno = ... # type: int col_offset = ... # type: int class BoolOp(expr): op = ... # type: boolop values = ... # type: typing.List[expr] class BinOp(expr): left = ... # type: expr op = ... # type: operator right = ... # type: expr class UnaryOp(expr): op = ... # type: unaryop operand = ... # type: expr class Lambda(expr): args = ... # type: arguments body = ... # type: expr class IfExp(expr): test = ... # type: expr body = ... # type: expr orelse = ... # type: expr class Dict(expr): keys = ... # type: typing.List[expr] values = ... # type: typing.List[expr] class Set(expr): elts = ... # type: typing.List[expr] class ListComp(expr): elt = ... # type: expr generators = ... # type: typing.List[comprehension] class SetComp(expr): elt = ... # type: expr generators = ... # type: typing.List[comprehension] class DictComp(expr): key = ... # type: expr value = ... # type: expr generators = ... # type: typing.List[comprehension] class GeneratorExp(expr): elt = ... # type: expr generators = ... # type: typing.List[comprehension] class Yield(expr): value = ... # type: Optional[expr] class Compare(expr): left = ... # type: expr ops = ... # type: typing.List[cmpop] comparators = ... # type: typing.List[expr] class Call(expr): func = ... # type: expr args = ... # type: typing.List[expr] keywords = ... # type: typing.List[keyword] starargs = ... # type: Optional[expr] kwargs = ... # type: Optional[expr] class Repr(expr): value = ... # type: expr class Num(expr): n = ... # type: Union[int, float, complex] class Str(expr): s = ... # type: bytes class Attribute(expr): value = ... # type: expr attr = ... # type: identifier ctx = ... # type: expr_context class Subscript(expr): value = ... # type: expr slice = ... # type: _slice ctx = ... # type: expr_context class Name(expr): id = ... # type: identifier ctx = ... # type: expr_context class List(expr): elts = ... # type: typing.List[expr] ctx = ... # type: expr_context class Tuple(expr): elts = ... # type: typing.List[expr] ctx = ... # type: expr_context class expr_context(AST): ... class AugLoad(expr_context): ... class AugStore(expr_context): ... class Del(expr_context): ... class Load(expr_context): ... class Param(expr_context): ... class Store(expr_context): ... class boolop(AST): ... class And(boolop): ... class Or(boolop): ... class operator(AST): ... class Add(operator): ... class BitAnd(operator): ... class BitOr(operator): ... class BitXor(operator): ... class Div(operator): ... class FloorDiv(operator): ... class LShift(operator): ... class Mod(operator): ... class Mult(operator): ... class Pow(operator): ... class RShift(operator): ... class Sub(operator): ... class unaryop(AST): ... class Invert(unaryop): ... class Not(unaryop): ... class UAdd(unaryop): ... class USub(unaryop): ... class cmpop(AST): ... class Eq(cmpop): ... class Gt(cmpop): ... class GtE(cmpop): ... class In(cmpop): ... class Is(cmpop): ... class IsNot(cmpop): ... class Lt(cmpop): ... class LtE(cmpop): ... class NotEq(cmpop): ... class NotIn(cmpop): ... class comprehension(AST): target = ... # type: expr iter = ... # type: expr ifs = ... # type: typing.List[expr] class ExceptHandler(AST): type = ... # type: Optional[expr] name = ... # type: Optional[expr] body = ... # type: typing.List[stmt] lineno = ... # type: int col_offset = ... # type: int class arguments(AST): args = ... # type: typing.List[expr] vararg = ... # type: Optional[identifier] kwarg = ... # type: Optional[identifier] defaults = ... # type: typing.List[expr] type_comments = ... # type: typing.List[str] class keyword(AST): arg = ... # type: identifier value = ... # type: expr class alias(AST): name = ... # type: identifier asname = ... # type: Optional[identifier] class TypeIgnore(AST): lineno = ... # type: int mypy-0.560/typeshed/third_party/3/typed_ast/ast3.pyi0000644€tŠÔÚ€2›s®0000002413113215007212026577 0ustar jukkaDROPBOX\Domain Users00000000000000import typing from typing import Any, Optional, Union, Generic, Iterator class NodeVisitor(): def visit(self, node: AST) -> Any: ... def generic_visit(self, node: AST) -> None: ... class NodeTransformer(NodeVisitor): def generic_visit(self, node: AST) -> None: ... def parse(source: Union[str, bytes], filename: Union[str, bytes] = ..., mode: str = ..., feature_version: int = ...) -> AST: ... def copy_location(new_node: AST, old_node: AST) -> AST: ... def dump(node: AST, annotate_fields: bool = ..., include_attributes: bool = ...) -> str: ... def fix_missing_locations(node: AST) -> AST: ... def get_docstring(node: AST, clean: bool = ...) -> str: ... def increment_lineno(node: AST, n: int = ...) -> AST: ... def iter_child_nodes(node: AST) -> Iterator[AST]: ... def iter_fields(node: AST) -> Iterator[typing.Tuple[str, Any]]: ... def literal_eval(node_or_string: Union[str, AST]) -> Any: ... def walk(node: AST) -> Iterator[AST]: ... PyCF_ONLY_AST = ... # type: int # ast classes identifier = str class AST: _attributes = ... # type: typing.Tuple[str, ...] _fields = ... # type: typing.Tuple[str, ...] def __init__(self, *args, **kwargs) -> None: ... class mod(AST): ... class Module(mod): body = ... # type: typing.List[stmt] type_ignores = ... # type: typing.List[TypeIgnore] class Interactive(mod): body = ... # type: typing.List[stmt] class Expression(mod): body = ... # type: expr class FunctionType(mod): argtypes = ... # type: typing.List[expr] returns = ... # type: expr class Suite(mod): body = ... # type: typing.List[stmt] class stmt(AST): lineno = ... # type: int col_offset = ... # type: int class FunctionDef(stmt): name = ... # type: identifier args = ... # type: arguments body = ... # type: typing.List[stmt] decorator_list = ... # type: typing.List[expr] returns = ... # type: Optional[expr] type_comment = ... # type: Optional[str] class AsyncFunctionDef(stmt): name = ... # type: identifier args = ... # type: arguments body = ... # type: typing.List[stmt] decorator_list = ... # type: typing.List[expr] returns = ... # type: Optional[expr] type_comment = ... # type: Optional[str] class ClassDef(stmt): name = ... # type: identifier bases = ... # type: typing.List[expr] keywords = ... # type: typing.List[keyword] body = ... # type: typing.List[stmt] decorator_list = ... # type: typing.List[expr] class Return(stmt): value = ... # type: Optional[expr] class Delete(stmt): targets = ... # type: typing.List[expr] class Assign(stmt): targets = ... # type: typing.List[expr] value = ... # type: expr type_comment = ... # type: Optional[str] class AugAssign(stmt): target = ... # type: expr op = ... # type: operator value = ... # type: expr class AnnAssign(stmt): target = ... # type: expr annotation = ... # type: expr value = ... # type: Optional[expr] simple = ... # type: int class For(stmt): target = ... # type: expr iter = ... # type: expr body = ... # type: typing.List[stmt] orelse = ... # type: typing.List[stmt] type_comment = ... # type: Optional[str] class AsyncFor(stmt): target = ... # type: expr iter = ... # type: expr body = ... # type: typing.List[stmt] orelse = ... # type: typing.List[stmt] type_comment = ... # type: Optional[str] class While(stmt): test = ... # type: expr body = ... # type: typing.List[stmt] orelse = ... # type: typing.List[stmt] class If(stmt): test = ... # type: expr body = ... # type: typing.List[stmt] orelse = ... # type: typing.List[stmt] class With(stmt): items = ... # type: typing.List[withitem] body = ... # type: typing.List[stmt] type_comment = ... # type: Optional[str] class AsyncWith(stmt): items = ... # type: typing.List[withitem] body = ... # type: typing.List[stmt] type_comment = ... # type: Optional[str] class Raise(stmt): exc = ... # type: Optional[expr] cause = ... # type: Optional[expr] class Try(stmt): body = ... # type: typing.List[stmt] handlers = ... # type: typing.List[ExceptHandler] orelse = ... # type: typing.List[stmt] finalbody = ... # type: typing.List[stmt] class Assert(stmt): test = ... # type: expr msg = ... # type: Optional[expr] class Import(stmt): names = ... # type: typing.List[alias] class ImportFrom(stmt): module = ... # type: Optional[identifier] names = ... # type: typing.List[alias] level = ... # type: Optional[int] class Global(stmt): names = ... # type: typing.List[identifier] class Nonlocal(stmt): names = ... # type: typing.List[identifier] class Expr(stmt): value = ... # type: expr class Pass(stmt): ... class Break(stmt): ... class Continue(stmt): ... class slice(AST): ... _slice = slice # this lets us type the variable named 'slice' below class Slice(slice): lower = ... # type: Optional[expr] upper = ... # type: Optional[expr] step = ... # type: Optional[expr] class ExtSlice(slice): dims = ... # type: typing.List[slice] class Index(slice): value = ... # type: expr class expr(AST): lineno = ... # type: int col_offset = ... # type: int class BoolOp(expr): op = ... # type: boolop values = ... # type: typing.List[expr] class BinOp(expr): left = ... # type: expr op = ... # type: operator right = ... # type: expr class UnaryOp(expr): op = ... # type: unaryop operand = ... # type: expr class Lambda(expr): args = ... # type: arguments body = ... # type: expr class IfExp(expr): test = ... # type: expr body = ... # type: expr orelse = ... # type: expr class Dict(expr): keys = ... # type: typing.List[expr] values = ... # type: typing.List[expr] class Set(expr): elts = ... # type: typing.List[expr] class ListComp(expr): elt = ... # type: expr generators = ... # type: typing.List[comprehension] class SetComp(expr): elt = ... # type: expr generators = ... # type: typing.List[comprehension] class DictComp(expr): key = ... # type: expr value = ... # type: expr generators = ... # type: typing.List[comprehension] class GeneratorExp(expr): elt = ... # type: expr generators = ... # type: typing.List[comprehension] class Await(expr): value = ... # type: expr class Yield(expr): value = ... # type: Optional[expr] class YieldFrom(expr): value = ... # type: expr class Compare(expr): left = ... # type: expr ops = ... # type: typing.List[cmpop] comparators = ... # type: typing.List[expr] class Call(expr): func = ... # type: expr args = ... # type: typing.List[expr] keywords = ... # type: typing.List[keyword] class Num(expr): n = ... # type: Union[int, float] class Str(expr): s = ... # type: str class FormattedValue(expr): value = ... # type: expr conversion = ... # type: typing.Optional[int] format_spec = ... # type: typing.Optional[expr] class JoinedStr(expr): values = ... # type: typing.List[expr] class Bytes(expr): s = ... # type: bytes class NameConstant(expr): value = ... # type: Any class Ellipsis(expr): ... class Attribute(expr): value = ... # type: expr attr = ... # type: identifier ctx = ... # type: expr_context class Subscript(expr): value = ... # type: expr slice = ... # type: _slice ctx = ... # type: expr_context class Starred(expr): value = ... # type: expr ctx = ... # type: expr_context class Name(expr): id = ... # type: identifier ctx = ... # type: expr_context class List(expr): elts = ... # type: typing.List[expr] ctx = ... # type: expr_context class Tuple(expr): elts = ... # type: typing.List[expr] ctx = ... # type: expr_context class expr_context(AST): ... class AugLoad(expr_context): ... class AugStore(expr_context): ... class Del(expr_context): ... class Load(expr_context): ... class Param(expr_context): ... class Store(expr_context): ... class boolop(AST): ... class And(boolop): ... class Or(boolop): ... class operator(AST): ... class Add(operator): ... class BitAnd(operator): ... class BitOr(operator): ... class BitXor(operator): ... class Div(operator): ... class FloorDiv(operator): ... class LShift(operator): ... class Mod(operator): ... class Mult(operator): ... class MatMult(operator): ... class Pow(operator): ... class RShift(operator): ... class Sub(operator): ... class unaryop(AST): ... class Invert(unaryop): ... class Not(unaryop): ... class UAdd(unaryop): ... class USub(unaryop): ... class cmpop(AST): ... class Eq(cmpop): ... class Gt(cmpop): ... class GtE(cmpop): ... class In(cmpop): ... class Is(cmpop): ... class IsNot(cmpop): ... class Lt(cmpop): ... class LtE(cmpop): ... class NotEq(cmpop): ... class NotIn(cmpop): ... class comprehension(AST): target = ... # type: expr iter = ... # type: expr ifs = ... # type: typing.List[expr] is_async = ... # type: int class ExceptHandler(AST): type = ... # type: Optional[expr] name = ... # type: Optional[identifier] body = ... # type: typing.List[stmt] lineno = ... # type: int col_offset = ... # type: int class arguments(AST): args = ... # type: typing.List[arg] vararg = ... # type: Optional[arg] kwonlyargs = ... # type: typing.List[arg] kw_defaults = ... # type: typing.List[expr] kwarg = ... # type: Optional[arg] defaults = ... # type: typing.List[expr] class arg(AST): arg = ... # type: identifier annotation = ... # type: Optional[expr] lineno = ... # type: int col_offset = ... # type: int type_comment = ... # type: typing.Optional[str] class keyword(AST): arg = ... # type: Optional[identifier] value = ... # type: expr class alias(AST): name = ... # type: identifier asname = ... # type: Optional[identifier] class withitem(AST): context_expr = ... # type: expr optional_vars = ... # type: Optional[expr] class TypeIgnore(AST): lineno = ... # type: int mypy-0.560/typeshed/third_party/3/typed_ast/conversions.pyi0000644€tŠÔÚ€2›s®0000000012413215007212030271 0ustar jukkaDROPBOX\Domain Users00000000000000from . import ast27 from . import ast3 def py2to3(ast: ast27.AST) -> ast3.AST: ... mypy-0.560/typeshed/third_party/3/werkzeug/0000755€tŠÔÚ€2›s®0000000000013215007244025055 5ustar jukkaDROPBOX\Domain Users00000000000000mypy-0.560/typeshed/third_party/3/werkzeug/__init__.pyi0000644€tŠÔÚ€2›s®0000001266113215007212027340 0ustar jukkaDROPBOX\Domain Users00000000000000from types import ModuleType from typing import Any from werkzeug import _internal from werkzeug import datastructures from werkzeug import debug from werkzeug import exceptions from werkzeug import formparser from werkzeug import http from werkzeug import local from werkzeug import security from werkzeug import serving from werkzeug import test from werkzeug import testapp from werkzeug import urls from werkzeug import useragents from werkzeug import utils from werkzeug import wrappers from werkzeug import wsgi class module(ModuleType): def __getattr__(self, name): ... def __dir__(self): ... __version__ = ... # type: Any run_simple = serving.run_simple test_app = testapp.test_app UserAgent = useragents.UserAgent _easteregg = _internal._easteregg DebuggedApplication = debug.DebuggedApplication MultiDict = datastructures.MultiDict CombinedMultiDict = datastructures.CombinedMultiDict Headers = datastructures.Headers EnvironHeaders = datastructures.EnvironHeaders ImmutableList = datastructures.ImmutableList ImmutableDict = datastructures.ImmutableDict ImmutableMultiDict = datastructures.ImmutableMultiDict TypeConversionDict = datastructures.TypeConversionDict ImmutableTypeConversionDict = datastructures.ImmutableTypeConversionDict Accept = datastructures.Accept MIMEAccept = datastructures.MIMEAccept CharsetAccept = datastructures.CharsetAccept LanguageAccept = datastructures.LanguageAccept RequestCacheControl = datastructures.RequestCacheControl ResponseCacheControl = datastructures.ResponseCacheControl ETags = datastructures.ETags HeaderSet = datastructures.HeaderSet WWWAuthenticate = datastructures.WWWAuthenticate Authorization = datastructures.Authorization FileMultiDict = datastructures.FileMultiDict CallbackDict = datastructures.CallbackDict FileStorage = datastructures.FileStorage OrderedMultiDict = datastructures.OrderedMultiDict ImmutableOrderedMultiDict = datastructures.ImmutableOrderedMultiDict escape = utils.escape environ_property = utils.environ_property append_slash_redirect = utils.append_slash_redirect redirect = utils.redirect cached_property = utils.cached_property import_string = utils.import_string dump_cookie = http.dump_cookie parse_cookie = http.parse_cookie unescape = utils.unescape format_string = utils.format_string find_modules = utils.find_modules header_property = utils.header_property html = utils.html xhtml = utils.xhtml HTMLBuilder = utils.HTMLBuilder validate_arguments = utils.validate_arguments ArgumentValidationError = utils.ArgumentValidationError bind_arguments = utils.bind_arguments secure_filename = utils.secure_filename BaseResponse = wrappers.BaseResponse BaseRequest = wrappers.BaseRequest Request = wrappers.Request Response = wrappers.Response AcceptMixin = wrappers.AcceptMixin ETagRequestMixin = wrappers.ETagRequestMixin ETagResponseMixin = wrappers.ETagResponseMixin ResponseStreamMixin = wrappers.ResponseStreamMixin CommonResponseDescriptorsMixin = wrappers.CommonResponseDescriptorsMixin UserAgentMixin = wrappers.UserAgentMixin AuthorizationMixin = wrappers.AuthorizationMixin WWWAuthenticateMixin = wrappers.WWWAuthenticateMixin CommonRequestDescriptorsMixin = wrappers.CommonRequestDescriptorsMixin Local = local.Local LocalManager = local.LocalManager LocalProxy = local.LocalProxy LocalStack = local.LocalStack release_local = local.release_local generate_password_hash = security.generate_password_hash check_password_hash = security.check_password_hash Client = test.Client EnvironBuilder = test.EnvironBuilder create_environ = test.create_environ run_wsgi_app = test.run_wsgi_app get_current_url = wsgi.get_current_url get_host = wsgi.get_host pop_path_info = wsgi.pop_path_info peek_path_info = wsgi.peek_path_info SharedDataMiddleware = wsgi.SharedDataMiddleware DispatcherMiddleware = wsgi.DispatcherMiddleware ClosingIterator = wsgi.ClosingIterator FileWrapper = wsgi.FileWrapper make_line_iter = wsgi.make_line_iter LimitedStream = wsgi.LimitedStream responder = wsgi.responder wrap_file = wsgi.wrap_file extract_path_info = wsgi.extract_path_info parse_etags = http.parse_etags parse_date = http.parse_date http_date = http.http_date cookie_date = http.cookie_date parse_cache_control_header = http.parse_cache_control_header is_resource_modified = http.is_resource_modified parse_accept_header = http.parse_accept_header parse_set_header = http.parse_set_header quote_etag = http.quote_etag unquote_etag = http.unquote_etag generate_etag = http.generate_etag dump_header = http.dump_header parse_list_header = http.parse_list_header parse_dict_header = http.parse_dict_header parse_authorization_header = http.parse_authorization_header parse_www_authenticate_header = http.parse_www_authenticate_header remove_entity_headers = http.remove_entity_headers is_entity_header = http.is_entity_header remove_hop_by_hop_headers = http.remove_hop_by_hop_headers parse_options_header = http.parse_options_header dump_options_header = http.dump_options_header is_hop_by_hop_header = http.is_hop_by_hop_header unquote_header_value = http.unquote_header_value quote_header_value = http.quote_header_value HTTP_STATUS_CODES = http.HTTP_STATUS_CODES url_decode = urls.url_decode url_encode = urls.url_encode url_quote = urls.url_quote url_quote_plus = urls.url_quote_plus url_unquote = urls.url_unquote url_unquote_plus = urls.url_unquote_plus url_fix = urls.url_fix Href = urls.Href iri_to_uri = urls.iri_to_uri uri_to_iri = urls.uri_to_iri parse_form_data = formparser.parse_form_data abort = exceptions.Aborter Aborter = exceptions.Aborter mypy-0.560/typeshed/third_party/3/werkzeug/_compat.pyi0000644€tŠÔÚ€2›s®0000000225713215007212027223 0ustar jukkaDROPBOX\Domain Users00000000000000from typing import Any from io import StringIO as BytesIO PY2 = ... # type: Any WIN = ... # type: Any unichr = ... # type: Any text_type = ... # type: Any string_types = ... # type: Any integer_types = ... # type: Any iterkeys = ... # type: Any itervalues = ... # type: Any iteritems = ... # type: Any iterlists = ... # type: Any iterlistvalues = ... # type: Any int_to_byte = ... # type: Any iter_bytes = ... # type: Any def fix_tuple_repr(obj): ... def implements_iterator(cls): ... def implements_to_string(cls): ... def native_string_result(func): ... def implements_bool(cls): ... range_type = ... # type: Any NativeStringIO = ... # type: Any def make_literal_wrapper(reference): ... def normalize_string_tuple(tup): ... def try_coerce_native(s): ... wsgi_get_bytes = ... # type: Any def wsgi_decoding_dance(s, charset='', errors=''): ... def wsgi_encoding_dance(s, charset='', errors=''): ... def to_bytes(x, charset=..., errors=''): ... def to_native(x, charset=..., errors=''): ... def reraise(tp, value, tb=None): ... imap = ... # type: Any izip = ... # type: Any ifilter = ... # type: Any def to_unicode(x, charset=..., errors='', allow_none_charset=False): ... mypy-0.560/typeshed/third_party/3/werkzeug/_internal.pyi0000644€tŠÔÚ€2›s®0000000103713215007212027547 0ustar jukkaDROPBOX\Domain Users00000000000000from typing import Any class _Missing: def __reduce__(self): ... class _DictAccessorProperty: read_only = ... # type: Any name = ... # type: Any default = ... # type: Any load_func = ... # type: Any dump_func = ... # type: Any __doc__ = ... # type: Any def __init__(self, name, default=None, load_func=None, dump_func=None, read_only=None, doc=None): ... def __get__(self, obj, type=None): ... def __set__(self, obj, value): ... def __delete__(self, obj): ... def _easteregg(app=None): ... mypy-0.560/typeshed/third_party/3/werkzeug/_reloader.pyi0000644€tŠÔÚ€2›s®0000000157413215007212027536 0ustar jukkaDROPBOX\Domain Users00000000000000from typing import Any class ReloaderLoop: name = ... # type: Any extra_files = ... # type: Any interval = ... # type: Any def __init__(self, extra_files=None, interval=1): ... def run(self): ... def restart_with_reloader(self): ... def trigger_reload(self, filename): ... def log_reload(self, filename): ... class StatReloaderLoop(ReloaderLoop): name = ... # type: Any def run(self): ... class WatchdogReloaderLoop(ReloaderLoop): observable_paths = ... # type: Any name = ... # type: Any observer_class = ... # type: Any event_handler = ... # type: Any should_reload = ... # type: Any def __init__(self, *args, **kwargs): ... def trigger_reload(self, filename): ... def run(self): ... reloader_loops = ... # type: Any def run_with_reloader(main_func, extra_files=None, interval=1, reloader_type=''): ... mypy-0.560/typeshed/third_party/3/werkzeug/contrib/0000755€tŠÔÚ€2›s®0000000000013215007244026515 5ustar jukkaDROPBOX\Domain Users00000000000000mypy-0.560/typeshed/third_party/3/werkzeug/contrib/__init__.pyi0000644€tŠÔÚ€2›s®0000000000013215007212030760 0ustar jukkaDROPBOX\Domain Users00000000000000mypy-0.560/typeshed/third_party/3/werkzeug/contrib/atom.pyi0000644€tŠÔÚ€2›s®0000000274413215007212030202 0ustar jukkaDROPBOX\Domain Users00000000000000from typing import Any XHTML_NAMESPACE = ... # type: Any def format_iso8601(obj): ... class AtomFeed: default_generator = ... # type: Any title = ... # type: Any title_type = ... # type: Any url = ... # type: Any feed_url = ... # type: Any id = ... # type: Any updated = ... # type: Any author = ... # type: Any icon = ... # type: Any logo = ... # type: Any rights = ... # type: Any rights_type = ... # type: Any subtitle = ... # type: Any subtitle_type = ... # type: Any generator = ... # type: Any links = ... # type: Any entries = ... # type: Any def __init__(self, title=None, entries=None, **kwargs): ... def add(self, *args, **kwargs): ... def generate(self): ... def to_string(self): ... def get_response(self): ... def __call__(self, environ, start_response): ... class FeedEntry: title = ... # type: Any title_type = ... # type: Any content = ... # type: Any content_type = ... # type: Any url = ... # type: Any id = ... # type: Any updated = ... # type: Any summary = ... # type: Any summary_type = ... # type: Any author = ... # type: Any published = ... # type: Any rights = ... # type: Any links = ... # type: Any categories = ... # type: Any xml_base = ... # type: Any def __init__(self, title=None, content=None, feed_url=None, **kwargs): ... def generate(self): ... def to_string(self): ... mypy-0.560/typeshed/third_party/3/werkzeug/contrib/cache.pyi0000644€tŠÔÚ€2›s®0000000555413215007212030307 0ustar jukkaDROPBOX\Domain Users00000000000000from typing import Any class BaseCache: default_timeout = ... # type: Any def __init__(self, default_timeout=300): ... def get(self, key): ... def delete(self, key): ... def get_many(self, *keys): ... def get_dict(self, *keys): ... def set(self, key, value, timeout=None): ... def add(self, key, value, timeout=None): ... def set_many(self, mapping, timeout=None): ... def delete_many(self, *keys): ... def has(self, key): ... def clear(self): ... def inc(self, key, delta=1): ... def dec(self, key, delta=1): ... class NullCache(BaseCache): ... class SimpleCache(BaseCache): clear = ... # type: Any def __init__(self, threshold=500, default_timeout=300): ... def get(self, key): ... def set(self, key, value, timeout=None): ... def add(self, key, value, timeout=None): ... def delete(self, key): ... def has(self, key): ... class MemcachedCache(BaseCache): key_prefix = ... # type: Any def __init__(self, servers=None, default_timeout=300, key_prefix=None): ... def get(self, key): ... def get_dict(self, *keys): ... def add(self, key, value, timeout=None): ... def set(self, key, value, timeout=None): ... def get_many(self, *keys): ... def set_many(self, mapping, timeout=None): ... def delete(self, key): ... def delete_many(self, *keys): ... def has(self, key): ... def clear(self): ... def inc(self, key, delta=1): ... def dec(self, key, delta=1): ... def import_preferred_memcache_lib(self, servers): ... GAEMemcachedCache = ... # type: Any class RedisCache(BaseCache): key_prefix = ... # type: Any def __init__(self, host='', port=6379, password=None, db=0, default_timeout=300, key_prefix=None, **kwargs): ... def dump_object(self, value): ... def load_object(self, value): ... def get(self, key): ... def get_many(self, *keys): ... def set(self, key, value, timeout=None): ... def add(self, key, value, timeout=None): ... def set_many(self, mapping, timeout=None): ... def delete(self, key): ... def delete_many(self, *keys): ... def has(self, key): ... def clear(self): ... def inc(self, key, delta=1): ... def dec(self, key, delta=1): ... class FileSystemCache(BaseCache): def __init__(self, cache_dir, threshold=500, default_timeout=300, mode=384): ... def clear(self): ... def get(self, key): ... def add(self, key, value, timeout=None): ... def set(self, key, value, timeout=None): ... def delete(self, key): ... def has(self, key): ... class UWSGICache(BaseCache): cache = ... # type: Any def __init__(self, default_timeout=300, cache=''): ... def get(self, key): ... def delete(self, key): ... def set(self, key, value, timeout=None): ... def add(self, key, value, timeout=None): ... def clear(self): ... def has(self, key): ... mypy-0.560/typeshed/third_party/3/werkzeug/contrib/fixers.pyi0000644€tŠÔÚ€2›s®0000000230613215007212030534 0ustar jukkaDROPBOX\Domain Users00000000000000from typing import Any class CGIRootFix: app = ... # type: Any app_root = ... # type: Any def __init__(self, app, app_root=''): ... def __call__(self, environ, start_response): ... LighttpdCGIRootFix = ... # type: Any class PathInfoFromRequestUriFix: app = ... # type: Any def __init__(self, app): ... def __call__(self, environ, start_response): ... class ProxyFix: app = ... # type: Any num_proxies = ... # type: Any def __init__(self, app, num_proxies=1): ... def get_remote_addr(self, forwarded_for): ... def __call__(self, environ, start_response): ... class HeaderRewriterFix: app = ... # type: Any remove_headers = ... # type: Any add_headers = ... # type: Any def __init__(self, app, remove_headers=None, add_headers=None): ... def __call__(self, environ, start_response): ... class InternetExplorerFix: app = ... # type: Any fix_vary = ... # type: Any fix_attach = ... # type: Any def __init__(self, app, fix_vary=True, fix_attach=True): ... def fix_headers(self, environ, headers, status=None): ... def run_fixed(self, environ, start_response): ... def __call__(self, environ, start_response): ... mypy-0.560/typeshed/third_party/3/werkzeug/contrib/iterio.pyi0000644€tŠÔÚ€2›s®0000000204613215007212030530 0ustar jukkaDROPBOX\Domain Users00000000000000from typing import Any greenlet = ... # type: Any class IterIO: def __new__(cls, obj, sentinel=''): ... def __iter__(self): ... def tell(self): ... def isatty(self): ... def seek(self, pos, mode=0): ... def truncate(self, size=None): ... def write(self, s): ... def writelines(self, list): ... def read(self, n=-1): ... def readlines(self, sizehint=0): ... def readline(self, length=None): ... def flush(self): ... def __next__(self): ... class IterI(IterIO): def __new__(cls, func, sentinel=''): ... closed = ... # type: Any def close(self): ... def write(self, s): ... def writelines(self, list): ... def flush(self): ... class IterO(IterIO): sentinel = ... # type: Any closed = ... # type: Any pos = ... # type: Any def __new__(cls, gen, sentinel=''): ... def __iter__(self): ... def close(self): ... def seek(self, pos, mode=0): ... def read(self, n=-1): ... def readline(self, length=None): ... def readlines(self, sizehint=0): ... mypy-0.560/typeshed/third_party/3/werkzeug/contrib/jsrouting.pyi0000644€tŠÔÚ€2›s®0000000047313215007212031263 0ustar jukkaDROPBOX\Domain Users00000000000000from typing import Any def dumps(*args): ... def render_template(name_parts, rules, converters): ... def generate_map(map, name=''): ... def generate_adapter(adapter, name='', map_name=''): ... def js_to_url_function(converter): ... def NumberConverter_js_to_url(conv): ... js_to_url_functions = ... # type: Any mypy-0.560/typeshed/third_party/3/werkzeug/contrib/limiter.pyi0000644€tŠÔÚ€2›s®0000000033413215007212030700 0ustar jukkaDROPBOX\Domain Users00000000000000from typing import Any class StreamLimitMiddleware: app = ... # type: Any maximum_size = ... # type: Any def __init__(self, app, maximum_size=...): ... def __call__(self, environ, start_response): ... mypy-0.560/typeshed/third_party/3/werkzeug/contrib/lint.pyi0000644€tŠÔÚ€2›s®0000000223713215007212030205 0ustar jukkaDROPBOX\Domain Users00000000000000from typing import Any class WSGIWarning(Warning): ... class HTTPWarning(Warning): ... def check_string(context, obj, stacklevel=3): ... class InputStream: def __init__(self, stream): ... def read(self, *args): ... def readline(self, *args): ... def __iter__(self): ... def close(self): ... class ErrorStream: def __init__(self, stream): ... def write(self, s): ... def flush(self): ... def writelines(self, seq): ... def close(self): ... class GuardedWrite: def __init__(self, write, chunks): ... def __call__(self, s): ... class GuardedIterator: closed = ... # type: Any headers_set = ... # type: Any chunks = ... # type: Any def __init__(self, iterator, headers_set, chunks): ... def __iter__(self): ... def next(self): ... def close(self): ... def __del__(self): ... class LintMiddleware: app = ... # type: Any def __init__(self, app): ... def check_environ(self, environ): ... def check_start_response(self, status, headers, exc_info): ... def check_headers(self, headers): ... def check_iterator(self, app_iter): ... def __call__(self, *args, **kwargs): ... mypy-0.560/typeshed/third_party/3/werkzeug/contrib/profiler.pyi0000644€tŠÔÚ€2›s®0000000074013215007212031056 0ustar jukkaDROPBOX\Domain Users00000000000000from typing import Any available = ... # type: Any class MergeStream: streams = ... # type: Any def __init__(self, *streams): ... def write(self, data): ... class ProfilerMiddleware: def __init__(self, app, stream=None, sort_by=..., restrictions=..., profile_dir=None): ... def __call__(self, environ, start_response): ... def make_action(app_factory, hostname='', port=5000, threaded=False, processes=1, stream=None, sort_by=..., restrictions=...): ... mypy-0.560/typeshed/third_party/3/werkzeug/contrib/securecookie.pyi0000644€tŠÔÚ€2›s®0000000172313215007212031716 0ustar jukkaDROPBOX\Domain Users00000000000000from typing import Any from hmac import new as hmac from hashlib import sha1 as _default_hash from werkzeug.contrib.sessions import ModificationTrackingDict class UnquoteError(Exception): ... class SecureCookie(ModificationTrackingDict): hash_method = ... # type: Any serialization_method = ... # type: Any quote_base64 = ... # type: Any secret_key = ... # type: Any new = ... # type: Any def __init__(self, data=None, secret_key=None, new=True): ... @property def should_save(self): ... @classmethod def quote(cls, value): ... @classmethod def unquote(cls, value): ... def serialize(self, expires=None): ... @classmethod def unserialize(cls, string, secret_key): ... @classmethod def load_cookie(cls, request, key='', secret_key=None): ... def save_cookie(self, response, key='', expires=None, session_expires=None, max_age=None, path='', domain=None, secure=None, httponly=False, force=False): ... mypy-0.560/typeshed/third_party/3/werkzeug/contrib/sessions.pyi0000644€tŠÔÚ€2›s®0000000354513215007212031110 0ustar jukkaDROPBOX\Domain Users00000000000000from typing import Any from werkzeug.datastructures import CallbackDict def generate_key(salt=None): ... class ModificationTrackingDict(CallbackDict): modified = ... # type: Any def __init__(self, *args, **kwargs): ... def copy(self): ... def __copy__(self): ... class Session(ModificationTrackingDict): sid = ... # type: Any new = ... # type: Any def __init__(self, data, sid, new=False): ... @property def should_save(self): ... class SessionStore: session_class = ... # type: Any def __init__(self, session_class=None): ... def is_valid_key(self, key): ... def generate_key(self, salt=None): ... def new(self): ... def save(self, session): ... def save_if_modified(self, session): ... def delete(self, session): ... def get(self, sid): ... class FilesystemSessionStore(SessionStore): path = ... # type: Any filename_template = ... # type: Any renew_missing = ... # type: Any mode = ... # type: Any def __init__(self, path=None, filename_template='', session_class=None, renew_missing=False, mode=420): ... def get_session_filename(self, sid): ... def save(self, session): ... def delete(self, session): ... def get(self, sid): ... def list(self): ... class SessionMiddleware: app = ... # type: Any store = ... # type: Any cookie_name = ... # type: Any cookie_age = ... # type: Any cookie_expires = ... # type: Any cookie_path = ... # type: Any cookie_domain = ... # type: Any cookie_secure = ... # type: Any cookie_httponly = ... # type: Any environ_key = ... # type: Any def __init__(self, app, store, cookie_name='', cookie_age=None, cookie_expires=None, cookie_path='', cookie_domain=None, cookie_secure=None, cookie_httponly=False, environ_key=''): ... def __call__(self, environ, start_response): ... mypy-0.560/typeshed/third_party/3/werkzeug/contrib/testtools.pyi0000644€tŠÔÚ€2›s®0000000032313215007212031271 0ustar jukkaDROPBOX\Domain Users00000000000000from typing import Any from werkzeug.wrappers import Response class ContentAccessors: def xml(self): ... def lxml(self): ... def json(self): ... class TestResponse(Response, ContentAccessors): ... mypy-0.560/typeshed/third_party/3/werkzeug/contrib/wrappers.pyi0000644€tŠÔÚ€2›s®0000000125713215007212031103 0ustar jukkaDROPBOX\Domain Users00000000000000from typing import Any def is_known_charset(charset): ... class JSONRequestMixin: def json(self): ... class ProtobufRequestMixin: protobuf_check_initialization = ... # type: Any def parse_protobuf(self, proto_type): ... class RoutingArgsRequestMixin: routing_args = ... # type: Any routing_vars = ... # type: Any class ReverseSlashBehaviorRequestMixin: def path(self): ... def script_root(self): ... class DynamicCharsetRequestMixin: default_charset = ... # type: Any def unknown_charset(self, charset): ... def charset(self): ... class DynamicCharsetResponseMixin: default_charset = ... # type: Any charset = ... # type: Any mypy-0.560/typeshed/third_party/3/werkzeug/datastructures.pyi0000644€tŠÔÚ€2›s®0000003021013215007212030644 0ustar jukkaDROPBOX\Domain Users00000000000000from typing import Any from collections import Container, Iterable, Mapping, MutableSet def is_immutable(self): ... def iter_multi_items(mapping): ... def native_itermethods(names): ... class ImmutableListMixin: def __hash__(self): ... def __reduce_ex__(self, protocol): ... def __delitem__(self, key): ... def __delslice__(self, i, j): ... def __iadd__(self, other): ... __imul__ = ... # type: Any def __setitem__(self, key, value): ... def __setslice__(self, i, j, value): ... def append(self, item): ... remove = ... # type: Any def extend(self, iterable): ... def insert(self, pos, value): ... def pop(self, index=-1): ... def reverse(self): ... def sort(self, cmp=None, key=None, reverse=None): ... class ImmutableList(ImmutableListMixin, list): ... class ImmutableDictMixin: @classmethod def fromkeys(cls, *args, **kwargs): ... def __reduce_ex__(self, protocol): ... def __hash__(self): ... def setdefault(self, key, default=None): ... def update(self, *args, **kwargs): ... def pop(self, key, default=None): ... def popitem(self): ... def __setitem__(self, key, value): ... def __delitem__(self, key): ... def clear(self): ... class ImmutableMultiDictMixin(ImmutableDictMixin): def __reduce_ex__(self, protocol): ... def add(self, key, value): ... def popitemlist(self): ... def poplist(self, key): ... def setlist(self, key, new_list): ... def setlistdefault(self, key, default_list=None): ... class UpdateDictMixin: on_update = ... # type: Any def calls_update(name): ... def setdefault(self, key, default=None): ... def pop(self, key, default=...): ... __setitem__ = ... # type: Any __delitem__ = ... # type: Any clear = ... # type: Any popitem = ... # type: Any update = ... # type: Any class TypeConversionDict(dict): def get(self, key, default=None, type=None): ... class ImmutableTypeConversionDict(ImmutableDictMixin, TypeConversionDict): def copy(self): ... def __copy__(self): ... class ViewItems: def __init__(self, multi_dict, method, repr_name, *a, **kw): ... def __iter__(self): ... class MultiDict(TypeConversionDict): def __init__(self, mapping=None): ... def __getitem__(self, key): ... def __setitem__(self, key, value): ... def add(self, key, value): ... def getlist(self, key, type=None): ... def setlist(self, key, new_list): ... def setdefault(self, key, default=None): ... def setlistdefault(self, key, default_list=None): ... def items(self, multi=False): ... def lists(self): ... def keys(self): ... __iter__ = ... # type: Any def values(self): ... def listvalues(self): ... def copy(self): ... def deepcopy(self, memo=None): ... def to_dict(self, flat=True): ... def update(self, other_dict): ... def pop(self, key, default=...): ... def popitem(self): ... def poplist(self, key): ... def popitemlist(self): ... def __copy__(self): ... def __deepcopy__(self, memo): ... class _omd_bucket: prev = ... # type: Any key = ... # type: Any value = ... # type: Any next = ... # type: Any def __init__(self, omd, key, value): ... def unlink(self, omd): ... class OrderedMultiDict(MultiDict): def __init__(self, mapping=None): ... def __eq__(self, other): ... def __ne__(self, other): ... def __reduce_ex__(self, protocol): ... def __getitem__(self, key): ... def __setitem__(self, key, value): ... def __delitem__(self, key): ... def keys(self): ... __iter__ = ... # type: Any def values(self): ... def items(self, multi=False): ... def lists(self): ... def listvalues(self): ... def add(self, key, value): ... def getlist(self, key, type=None): ... def setlist(self, key, new_list): ... def setlistdefault(self, key, default_list=None): ... def update(self, mapping): ... def poplist(self, key): ... def pop(self, key, default=...): ... def popitem(self): ... def popitemlist(self): ... class Headers(Mapping): def __init__(self, defaults=None): ... def __getitem__(self, key, _get_mode=False): ... def __eq__(self, other): ... def __ne__(self, other): ... def get(self, key, default=None, type=None, as_bytes=False): ... def getlist(self, key, type=None, as_bytes=False): ... def get_all(self, name): ... def items(self, lower=False): ... def keys(self, lower=False): ... def values(self): ... def extend(self, iterable): ... def __delitem__(self, key): ... def remove(self, key): ... def pop(self, **kwargs): ... def popitem(self): ... def __contains__(self, key): ... has_key = ... # type: Any def __iter__(self): ... def __len__(self): ... def add(self, _key, _value, **kw): ... def add_header(self, _key, _value, **_kw): ... def clear(self): ... def set(self, _key, _value, **kw): ... def setdefault(self, key, value): ... def __setitem__(self, key, value): ... def to_list(self, charset=''): ... def to_wsgi_list(self): ... def copy(self): ... def __copy__(self): ... class ImmutableHeadersMixin: def __delitem__(self, key): ... def __setitem__(self, key, value): ... set = ... # type: Any def add(self, *args, **kwargs): ... remove = ... # type: Any add_header = ... # type: Any def extend(self, iterable): ... def insert(self, pos, value): ... def pop(self, **kwargs): ... def popitem(self): ... def setdefault(self, key, default): ... class EnvironHeaders(ImmutableHeadersMixin, Headers): environ = ... # type: Any def __init__(self, environ): ... def __eq__(self, other): ... def __getitem__(self, key, _get_mode=False): ... def __len__(self): ... def __iter__(self): ... def copy(self): ... class CombinedMultiDict(ImmutableMultiDictMixin, MultiDict): def __reduce_ex__(self, protocol): ... dicts = ... # type: Any def __init__(self, dicts=None): ... @classmethod def fromkeys(cls): ... def __getitem__(self, key): ... def get(self, key, default=None, type=None): ... def getlist(self, key, type=None): ... def keys(self): ... __iter__ = ... # type: Any def items(self, multi=False): ... def values(self): ... def lists(self): ... def listvalues(self): ... def copy(self): ... def to_dict(self, flat=True): ... def __len__(self): ... def __contains__(self, key): ... has_key = ... # type: Any class FileMultiDict(MultiDict): def add_file(self, name, file, filename=None, content_type=None): ... class ImmutableDict(ImmutableDictMixin, dict): def copy(self): ... def __copy__(self): ... class ImmutableMultiDict(ImmutableMultiDictMixin, MultiDict): def copy(self): ... def __copy__(self): ... class ImmutableOrderedMultiDict(ImmutableMultiDictMixin, OrderedMultiDict): def copy(self): ... def __copy__(self): ... class Accept(ImmutableList): provided = ... # type: Any def __init__(self, values=...): ... def __getitem__(self, key): ... def quality(self, key): ... def __contains__(self, value): ... def index(self, key): ... def find(self, key): ... def values(self): ... def to_header(self): ... def best_match(self, matches, default=None): ... @property def best(self): ... class MIMEAccept(Accept): @property def accept_html(self): ... @property def accept_xhtml(self): ... @property def accept_json(self): ... class LanguageAccept(Accept): ... class CharsetAccept(Accept): ... def cache_property(key, empty, type): ... class _CacheControl(UpdateDictMixin, dict): no_cache = ... # type: Any no_store = ... # type: Any max_age = ... # type: Any no_transform = ... # type: Any on_update = ... # type: Any provided = ... # type: Any def __init__(self, values=..., on_update=None): ... def to_header(self): ... class RequestCacheControl(ImmutableDictMixin, _CacheControl): max_stale = ... # type: Any min_fresh = ... # type: Any no_transform = ... # type: Any only_if_cached = ... # type: Any class ResponseCacheControl(_CacheControl): public = ... # type: Any private = ... # type: Any must_revalidate = ... # type: Any proxy_revalidate = ... # type: Any s_maxage = ... # type: Any class CallbackDict(UpdateDictMixin, dict): on_update = ... # type: Any def __init__(self, initial=None, on_update=None): ... class HeaderSet(MutableSet): on_update = ... # type: Any def __init__(self, headers=None, on_update=None): ... def add(self, header): ... def remove(self, header): ... def update(self, iterable): ... def discard(self, header): ... def find(self, header): ... def index(self, header): ... def clear(self): ... def as_set(self, preserve_casing=False): ... def to_header(self): ... def __getitem__(self, idx): ... def __delitem__(self, idx): ... def __setitem__(self, idx, value): ... def __contains__(self, header): ... def __len__(self): ... def __iter__(self): ... def __nonzero__(self): ... class ETags(Container, Iterable): star_tag = ... # type: Any def __init__(self, strong_etags=None, weak_etags=None, star_tag=False): ... def as_set(self, include_weak=False): ... def is_weak(self, etag): ... def contains_weak(self, etag): ... def contains(self, etag): ... def contains_raw(self, etag): ... def to_header(self): ... def __call__(self, etag=None, data=None, include_weak=False): ... def __bool__(self): ... __nonzero__ = ... # type: Any def __iter__(self): ... def __contains__(self, etag): ... class IfRange: etag = ... # type: Any date = ... # type: Any def __init__(self, etag=None, date=None): ... def to_header(self): ... class Range: units = ... # type: Any ranges = ... # type: Any def __init__(self, units, ranges): ... def range_for_length(self, length): ... def make_content_range(self, length): ... def to_header(self): ... def to_content_range_header(self, length): ... class ContentRange: on_update = ... # type: Any def __init__(self, units, start, stop, length=None, on_update=None): ... units = ... # type: Any start = ... # type: Any stop = ... # type: Any length = ... # type: Any def set(self, start, stop, length=None, units=''): ... def unset(self): ... def to_header(self): ... def __nonzero__(self): ... __bool__ = ... # type: Any class Authorization(ImmutableDictMixin, dict): type = ... # type: Any def __init__(self, auth_type, data=None): ... username = ... # type: Any password = ... # type: Any realm = ... # type: Any nonce = ... # type: Any uri = ... # type: Any nc = ... # type: Any cnonce = ... # type: Any response = ... # type: Any opaque = ... # type: Any @property def qop(self): ... class WWWAuthenticate(UpdateDictMixin, dict): on_update = ... # type: Any def __init__(self, auth_type=None, values=None, on_update=None): ... def set_basic(self, realm=''): ... def set_digest(self, realm, nonce, qop=..., opaque=None, algorithm=None, stale=False): ... def to_header(self): ... @staticmethod def auth_property(name, doc=None): ... type = ... # type: Any realm = ... # type: Any domain = ... # type: Any nonce = ... # type: Any opaque = ... # type: Any algorithm = ... # type: Any qop = ... # type: Any stale = ... # type: Any class FileStorage: name = ... # type: Any stream = ... # type: Any filename = ... # type: Any headers = ... # type: Any def __init__(self, stream=None, filename=None, name=None, content_type=None, content_length=None, headers=None): ... @property def content_type(self): ... @property def content_length(self): ... @property def mimetype(self): ... @property def mimetype_params(self): ... def save(self, dst, buffer_size=16384): ... def close(self): ... def __nonzero__(self): ... __bool__ = ... # type: Any def __getattr__(self, name): ... def __iter__(self): ... mypy-0.560/typeshed/third_party/3/werkzeug/debug/0000755€tŠÔÚ€2›s®0000000000013215007244026143 5ustar jukkaDROPBOX\Domain Users00000000000000mypy-0.560/typeshed/third_party/3/werkzeug/debug/__init__.pyi0000644€tŠÔÚ€2›s®0000000260013215007212030416 0ustar jukkaDROPBOX\Domain Users00000000000000from typing import Any from werkzeug.wrappers import BaseRequest as Request, BaseResponse as Response PIN_TIME = ... # type: Any def hash_pin(pin): ... def get_machine_id(): ... class _ConsoleFrame: console = ... # type: Any id = ... # type: Any def __init__(self, namespace): ... def get_pin_and_cookie_name(app): ... class DebuggedApplication: app = ... # type: Any evalex = ... # type: Any frames = ... # type: Any tracebacks = ... # type: Any request_key = ... # type: Any console_path = ... # type: Any console_init_func = ... # type: Any show_hidden_frames = ... # type: Any secret = ... # type: Any pin_logging = ... # type: Any pin = ... # type: Any def __init__(self, app, evalex=False, request_key='', console_path='', console_init_func=None, show_hidden_frames=False, lodgeit_url=None, pin_security=True, pin_logging=True): ... @property def pin_cookie_name(self): ... def debug_application(self, environ, start_response): ... def execute_command(self, request, command, frame): ... def display_console(self, request): ... def paste_traceback(self, request, traceback): ... def get_resource(self, request, filename): ... def check_pin_trust(self, environ): ... def pin_auth(self, request): ... def log_pin_request(self): ... def __call__(self, environ, start_response): ... mypy-0.560/typeshed/third_party/3/werkzeug/debug/console.pyi0000644€tŠÔÚ€2›s®0000000223613215007212030326 0ustar jukkaDROPBOX\Domain Users00000000000000from typing import Any import code class HTMLStringO: def __init__(self): ... def isatty(self): ... def close(self): ... def flush(self): ... def seek(self, n, mode=0): ... def readline(self): ... def reset(self): ... def write(self, x): ... def writelines(self, x): ... class ThreadedStream: @staticmethod def push(): ... @staticmethod def fetch(): ... @staticmethod def displayhook(obj): ... def __setattr__(self, name, value): ... def __dir__(self): ... def __getattribute__(self, name): ... class _ConsoleLoader: def __init__(self): ... def register(self, code, source): ... def get_source_by_code(self, code): ... class _InteractiveConsole(code.InteractiveInterpreter): globals = ... # type: Any more = ... # type: Any buffer = ... # type: Any def __init__(self, globals, locals): ... def runsource(self, source): ... def runcode(self, code): ... def showtraceback(self): ... def showsyntaxerror(self, filename=None): ... def write(self, data): ... class Console: def __init__(self, globals=None, locals=None): ... def eval(self, code): ... mypy-0.560/typeshed/third_party/3/werkzeug/debug/repr.pyi0000644€tŠÔÚ€2›s®0000000165513215007212027640 0ustar jukkaDROPBOX\Domain Users00000000000000from typing import Any deque = ... # type: Any missing = ... # type: Any RegexType = ... # type: Any HELP_HTML = ... # type: Any OBJECT_DUMP_HTML = ... # type: Any def debug_repr(obj): ... def dump(obj=...): ... class _Helper: def __call__(self, topic=None): ... helper = ... # type: Any class DebugReprGenerator: def __init__(self): ... list_repr = ... # type: Any tuple_repr = ... # type: Any set_repr = ... # type: Any frozenset_repr = ... # type: Any deque_repr = ... # type: Any def regex_repr(self, obj): ... def string_repr(self, obj, limit=70): ... def dict_repr(self, d, recursive, limit=5): ... def object_repr(self, obj): ... def dispatch_repr(self, obj, recursive): ... def fallback_repr(self): ... def repr(self, obj): ... def dump_object(self, obj): ... def dump_locals(self, d): ... def render_object_dump(self, items, title, repr=None): ... mypy-0.560/typeshed/third_party/3/werkzeug/debug/tbtools.pyi0000644€tŠÔÚ€2›s®0000000370213215007212030351 0ustar jukkaDROPBOX\Domain Users00000000000000from typing import Any UTF8_COOKIE = ... # type: Any system_exceptions = ... # type: Any HEADER = ... # type: Any FOOTER = ... # type: Any PAGE_HTML = ... # type: Any CONSOLE_HTML = ... # type: Any SUMMARY_HTML = ... # type: Any FRAME_HTML = ... # type: Any SOURCE_LINE_HTML = ... # type: Any def render_console_html(secret, evalex_trusted=True): ... def get_current_traceback(ignore_system_exceptions=False, show_hidden_frames=False, skip=0): ... class Line: lineno = ... # type: Any code = ... # type: Any in_frame = ... # type: Any current = ... # type: Any def __init__(self, lineno, code): ... def classes(self): ... def render(self): ... class Traceback: exc_type = ... # type: Any exc_value = ... # type: Any exception_type = ... # type: Any frames = ... # type: Any def __init__(self, exc_type, exc_value, tb): ... def filter_hidden_frames(self): ... def is_syntax_error(self): ... def exception(self): ... def log(self, logfile=None): ... def paste(self): ... def render_summary(self, include_title=True): ... def render_full(self, evalex=False, secret=None, evalex_trusted=True): ... def generate_plaintext_traceback(self): ... def plaintext(self): ... id = ... # type: Any class Frame: lineno = ... # type: Any function_name = ... # type: Any locals = ... # type: Any globals = ... # type: Any filename = ... # type: Any module = ... # type: Any loader = ... # type: Any code = ... # type: Any hide = ... # type: Any info = ... # type: Any def __init__(self, exc_type, exc_value, tb): ... def render(self): ... def render_line_context(self): ... def get_annotated_lines(self): ... def eval(self, code, mode=''): ... def sourcelines(self): ... def get_context_lines(self, context=5): ... @property def current_line(self): ... def console(self): ... id = ... # type: Any mypy-0.560/typeshed/third_party/3/werkzeug/exceptions.pyi0000644€tŠÔÚ€2›s®0000000777613215007212027775 0ustar jukkaDROPBOX\Domain Users00000000000000from typing import Any class HTTPException(Exception): code = ... # type: Any description = ... # type: Any response = ... # type: Any def __init__(self, description=None, response=None): ... @classmethod def wrap(cls, exception, name=None): ... @property def name(self): ... def get_description(self, environ=None): ... def get_body(self, environ=None): ... def get_headers(self, environ=None): ... def get_response(self, environ=None): ... def __call__(self, environ, start_response): ... class BadRequest(HTTPException): code = ... # type: Any description = ... # type: Any class ClientDisconnected(BadRequest): ... class SecurityError(BadRequest): ... class BadHost(BadRequest): ... class Unauthorized(HTTPException): code = ... # type: Any description = ... # type: Any class Forbidden(HTTPException): code = ... # type: Any description = ... # type: Any class NotFound(HTTPException): code = ... # type: Any description = ... # type: Any class MethodNotAllowed(HTTPException): code = ... # type: Any description = ... # type: Any valid_methods = ... # type: Any def __init__(self, valid_methods=None, description=None): ... def get_headers(self, environ): ... class NotAcceptable(HTTPException): code = ... # type: Any description = ... # type: Any class RequestTimeout(HTTPException): code = ... # type: Any description = ... # type: Any class Conflict(HTTPException): code = ... # type: Any description = ... # type: Any class Gone(HTTPException): code = ... # type: Any description = ... # type: Any class LengthRequired(HTTPException): code = ... # type: Any description = ... # type: Any class PreconditionFailed(HTTPException): code = ... # type: Any description = ... # type: Any class RequestEntityTooLarge(HTTPException): code = ... # type: Any description = ... # type: Any class RequestURITooLarge(HTTPException): code = ... # type: Any description = ... # type: Any class UnsupportedMediaType(HTTPException): code = ... # type: Any description = ... # type: Any class RequestedRangeNotSatisfiable(HTTPException): code = ... # type: Any description = ... # type: Any length = ... # type: Any units = ... # type: Any def __init__(self, length=None, units='', description=None): ... def get_headers(self, environ): ... class ExpectationFailed(HTTPException): code = ... # type: Any description = ... # type: Any class ImATeapot(HTTPException): code = ... # type: Any description = ... # type: Any class UnprocessableEntity(HTTPException): code = ... # type: Any description = ... # type: Any class Locked(HTTPException): code = ... # type: Any description = ... # type: Any class PreconditionRequired(HTTPException): code = ... # type: Any description = ... # type: Any class TooManyRequests(HTTPException): code = ... # type: Any description = ... # type: Any class RequestHeaderFieldsTooLarge(HTTPException): code = ... # type: Any description = ... # type: Any class UnavailableForLegalReasons(HTTPException): code = ... # type: Any description = ... # type: Any class InternalServerError(HTTPException): code = ... # type: Any description = ... # type: Any class NotImplemented(HTTPException): code = ... # type: Any description = ... # type: Any class BadGateway(HTTPException): code = ... # type: Any description = ... # type: Any class ServiceUnavailable(HTTPException): code = ... # type: Any description = ... # type: Any class GatewayTimeout(HTTPException): code = ... # type: Any description = ... # type: Any class HTTPVersionNotSupported(HTTPException): code = ... # type: Any description = ... # type: Any class Aborter: mapping = ... # type: Any def __init__(self, mapping=None, extra=None): ... def __call__(self, code, *args, **kwargs): ... mypy-0.560/typeshed/third_party/3/werkzeug/filesystem.pyi0000644€tŠÔÚ€2›s®0000000026713215007212027764 0ustar jukkaDROPBOX\Domain Users00000000000000from typing import Any has_likely_buggy_unicode_filesystem = ... # type: Any class BrokenFilesystemWarning(RuntimeWarning, UnicodeWarning): ... def get_filesystem_encoding(): ... mypy-0.560/typeshed/third_party/3/werkzeug/formparser.pyi0000644€tŠÔÚ€2›s®0000000353213215007212027756 0ustar jukkaDROPBOX\Domain Users00000000000000from typing import Any def default_stream_factory(total_content_length, filename, content_type, content_length=None): ... def parse_form_data(environ, stream_factory=None, charset='', errors='', max_form_memory_size=None, max_content_length=None, cls=None, silent=True): ... def exhaust_stream(f): ... class FormDataParser: stream_factory = ... # type: Any charset = ... # type: Any errors = ... # type: Any max_form_memory_size = ... # type: Any max_content_length = ... # type: Any cls = ... # type: Any silent = ... # type: Any def __init__(self, stream_factory=None, charset='', errors='', max_form_memory_size=None, max_content_length=None, cls=None, silent=True): ... def get_parse_func(self, mimetype, options): ... def parse_from_environ(self, environ): ... def parse(self, stream, mimetype, content_length, options=None): ... parse_functions = ... # type: Any def is_valid_multipart_boundary(boundary): ... def parse_multipart_headers(iterable): ... class MultiPartParser: charset = ... # type: Any errors = ... # type: Any max_form_memory_size = ... # type: Any stream_factory = ... # type: Any cls = ... # type: Any buffer_size = ... # type: Any def __init__(self, stream_factory=None, charset='', errors='', max_form_memory_size=None, cls=None, buffer_size=...): ... def fail(self, message): ... def get_part_encoding(self, headers): ... def get_part_charset(self, headers): ... def start_file_streaming(self, filename, headers, total_content_length): ... def in_memory_threshold_reached(self, bytes): ... def validate_boundary(self, boundary): ... def parse_lines(self, file, boundary, content_length, cap_at_buffer=True): ... def parse_parts(self, file, boundary, content_length): ... def parse(self, file, boundary, content_length): ... mypy-0.560/typeshed/third_party/3/werkzeug/http.pyi0000644€tŠÔÚ€2›s®0000000334313215007212026555 0ustar jukkaDROPBOX\Domain Users00000000000000from typing import Any from urllib.request import parse_http_list as _parse_list_header from urllib.parse import unquote_to_bytes as _unquote HTTP_STATUS_CODES = ... # type: Any def wsgi_to_bytes(data): ... def bytes_to_wsgi(data): ... def quote_header_value(value, extra_chars='', allow_token=True): ... def unquote_header_value(value, is_filename=False): ... def dump_options_header(header, options): ... def dump_header(iterable, allow_token=True): ... def parse_list_header(value): ... def parse_dict_header(value, cls=...): ... def parse_options_header(value, multiple=False): ... def parse_accept_header(value, cls=None): ... def parse_cache_control_header(value, on_update=None, cls=None): ... def parse_set_header(value, on_update=None): ... def parse_authorization_header(value): ... def parse_www_authenticate_header(value, on_update=None): ... def parse_if_range_header(value): ... def parse_range_header(value, make_inclusive=True): ... def parse_content_range_header(value, on_update=None): ... def quote_etag(etag, weak=False): ... def unquote_etag(etag): ... def parse_etags(value): ... def generate_etag(data): ... def parse_date(value): ... def cookie_date(expires=None): ... def http_date(timestamp=None): ... def is_resource_modified(environ, etag=None, data=None, last_modified=None, ignore_if_range=True): ... def remove_entity_headers(headers, allowed=...): ... def remove_hop_by_hop_headers(headers): ... def is_entity_header(header): ... def is_hop_by_hop_header(header): ... def parse_cookie(header, charset='', errors='', cls=None): ... def dump_cookie(key, value='', max_age=None, expires=None, path='', domain=None, secure=False, httponly=False, charset='', sync_expires=True): ... def is_byte_range_valid(start, stop, length): ... mypy-0.560/typeshed/third_party/3/werkzeug/local.pyi0000644€tŠÔÚ€2›s®0000000575713215007212026703 0ustar jukkaDROPBOX\Domain Users00000000000000from typing import Any def release_local(local): ... class Local: def __init__(self): ... def __iter__(self): ... def __call__(self, proxy): ... def __release_local__(self): ... def __getattr__(self, name): ... def __setattr__(self, name, value): ... def __delattr__(self, name): ... class LocalStack: def __init__(self): ... def __release_local__(self): ... def _get__ident_func__(self): ... def _set__ident_func__(self, value): ... __ident_func__ = ... # type: Any def __call__(self): ... def push(self, obj): ... def pop(self): ... @property def top(self): ... class LocalManager: locals = ... # type: Any ident_func = ... # type: Any def __init__(self, locals=None, ident_func=None): ... def get_ident(self): ... def cleanup(self): ... def make_middleware(self, app): ... def middleware(self, func): ... class LocalProxy: def __init__(self, local, name=None): ... @property def __dict__(self): ... def __bool__(self): ... def __unicode__(self): ... def __dir__(self): ... def __getattr__(self, name): ... def __setitem__(self, key, value): ... def __delitem__(self, key): ... __getslice__ = ... # type: Any def __setslice__(self, i, j, seq): ... def __delslice__(self, i, j): ... __setattr__ = ... # type: Any __delattr__ = ... # type: Any __lt__ = ... # type: Any __le__ = ... # type: Any __eq__ = ... # type: Any __ne__ = ... # type: Any __gt__ = ... # type: Any __ge__ = ... # type: Any __cmp__ = ... # type: Any __hash__ = ... # type: Any __call__ = ... # type: Any __len__ = ... # type: Any __getitem__ = ... # type: Any __iter__ = ... # type: Any __contains__ = ... # type: Any __add__ = ... # type: Any __sub__ = ... # type: Any __mul__ = ... # type: Any __floordiv__ = ... # type: Any __mod__ = ... # type: Any __divmod__ = ... # type: Any __pow__ = ... # type: Any __lshift__ = ... # type: Any __rshift__ = ... # type: Any __and__ = ... # type: Any __xor__ = ... # type: Any __or__ = ... # type: Any __div__ = ... # type: Any __truediv__ = ... # type: Any __neg__ = ... # type: Any __pos__ = ... # type: Any __abs__ = ... # type: Any __invert__ = ... # type: Any __complex__ = ... # type: Any __int__ = ... # type: Any __long__ = ... # type: Any __float__ = ... # type: Any __oct__ = ... # type: Any __hex__ = ... # type: Any __index__ = ... # type: Any __coerce__ = ... # type: Any __enter__ = ... # type: Any __exit__ = ... # type: Any __radd__ = ... # type: Any __rsub__ = ... # type: Any __rmul__ = ... # type: Any __rdiv__ = ... # type: Any __rtruediv__ = ... # type: Any __rfloordiv__ = ... # type: Any __rmod__ = ... # type: Any __rdivmod__ = ... # type: Any __copy__ = ... # type: Any __deepcopy__ = ... # type: Any mypy-0.560/typeshed/third_party/3/werkzeug/posixemulation.pyi0000644€tŠÔÚ€2›s®0000000032413215007212030652 0ustar jukkaDROPBOX\Domain Users00000000000000from typing import Any from ._compat import to_unicode as to_unicode from .filesystem import get_filesystem_encoding as get_filesystem_encoding can_rename_open_file = ... # type: Any def rename(src, dst): ... mypy-0.560/typeshed/third_party/3/werkzeug/routing.pyi0000644€tŠÔÚ€2›s®0000001440313215007212027264 0ustar jukkaDROPBOX\Domain Users00000000000000from typing import Any from werkzeug.exceptions import HTTPException def parse_converter_args(argstr): ... def parse_rule(rule): ... class RoutingException(Exception): ... class RequestRedirect(HTTPException, RoutingException): code = ... # type: Any new_url = ... # type: Any def __init__(self, new_url): ... def get_response(self, environ): ... class RequestSlash(RoutingException): ... class RequestAliasRedirect(RoutingException): matched_values = ... # type: Any def __init__(self, matched_values): ... class BuildError(RoutingException, LookupError): endpoint = ... # type: Any values = ... # type: Any method = ... # type: Any adapter = ... # type: Any def __init__(self, endpoint, values, method, adapter=None): ... def suggested(self): ... def closest_rule(self, adapter): ... class ValidationError(ValueError): ... class RuleFactory: def get_rules(self, map): ... class Subdomain(RuleFactory): subdomain = ... # type: Any rules = ... # type: Any def __init__(self, subdomain, rules): ... def get_rules(self, map): ... class Submount(RuleFactory): path = ... # type: Any rules = ... # type: Any def __init__(self, path, rules): ... def get_rules(self, map): ... class EndpointPrefix(RuleFactory): prefix = ... # type: Any rules = ... # type: Any def __init__(self, prefix, rules): ... def get_rules(self, map): ... class RuleTemplate: rules = ... # type: Any def __init__(self, rules): ... def __call__(self, *args, **kwargs): ... class RuleTemplateFactory(RuleFactory): rules = ... # type: Any context = ... # type: Any def __init__(self, rules, context): ... def get_rules(self, map): ... class Rule(RuleFactory): rule = ... # type: Any is_leaf = ... # type: Any map = ... # type: Any strict_slashes = ... # type: Any subdomain = ... # type: Any host = ... # type: Any defaults = ... # type: Any build_only = ... # type: Any alias = ... # type: Any methods = ... # type: Any endpoint = ... # type: Any redirect_to = ... # type: Any arguments = ... # type: Any def __init__(self, string, defaults=None, subdomain=None, methods=None, build_only=False, endpoint=None, strict_slashes=None, redirect_to=None, alias=False, host=None): ... def empty(self): ... def get_empty_kwargs(self): ... def get_rules(self, map): ... def refresh(self): ... def bind(self, map, rebind=False): ... def get_converter(self, variable_name, converter_name, args, kwargs): ... def compile(self): ... def match(self, path, method=None): ... def build(self, values, append_unknown=True): ... def provides_defaults_for(self, rule): ... def suitable_for(self, values, method=None): ... def match_compare_key(self): ... def build_compare_key(self): ... def __eq__(self, other): ... def __ne__(self, other): ... class BaseConverter: regex = ... # type: Any weight = ... # type: Any map = ... # type: Any def __init__(self, map): ... def to_python(self, value): ... def to_url(self, value): ... class UnicodeConverter(BaseConverter): regex = ... # type: Any def __init__(self, map, minlength=1, maxlength=None, length=None): ... class AnyConverter(BaseConverter): regex = ... # type: Any def __init__(self, map, *items): ... class PathConverter(BaseConverter): regex = ... # type: Any weight = ... # type: Any class NumberConverter(BaseConverter): weight = ... # type: Any fixed_digits = ... # type: Any min = ... # type: Any max = ... # type: Any def __init__(self, map, fixed_digits=0, min=None, max=None): ... def to_python(self, value): ... def to_url(self, value): ... class IntegerConverter(NumberConverter): regex = ... # type: Any num_convert = ... # type: Any class FloatConverter(NumberConverter): regex = ... # type: Any num_convert = ... # type: Any def __init__(self, map, min=None, max=None): ... class UUIDConverter(BaseConverter): regex = ... # type: Any def to_python(self, value): ... def to_url(self, value): ... DEFAULT_CONVERTERS = ... # type: Any class Map: default_converters = ... # type: Any default_subdomain = ... # type: Any charset = ... # type: Any encoding_errors = ... # type: Any strict_slashes = ... # type: Any redirect_defaults = ... # type: Any host_matching = ... # type: Any converters = ... # type: Any sort_parameters = ... # type: Any sort_key = ... # type: Any def __init__(self, rules=None, default_subdomain='', charset='', strict_slashes=True, redirect_defaults=True, converters=None, sort_parameters=False, sort_key=None, encoding_errors='', host_matching=False): ... def is_endpoint_expecting(self, endpoint, *arguments): ... def iter_rules(self, endpoint=None): ... def add(self, rulefactory): ... def bind(self, server_name, script_name=None, subdomain=None, url_scheme='', default_method='', path_info=None, query_args=None): ... def bind_to_environ(self, environ, server_name=None, subdomain=None): ... def update(self): ... class MapAdapter: map = ... # type: Any server_name = ... # type: Any script_name = ... # type: Any subdomain = ... # type: Any url_scheme = ... # type: Any path_info = ... # type: Any default_method = ... # type: Any query_args = ... # type: Any def __init__(self, map, server_name, script_name, subdomain, url_scheme, path_info, default_method, query_args=None): ... def dispatch(self, view_func, path_info=None, method=None, catch_http_exceptions=False): ... def match(self, path_info=None, method=None, return_rule=False, query_args=None): ... def test(self, path_info=None, method=None): ... def allowed_methods(self, path_info=None): ... def get_host(self, domain_part): ... def get_default_redirect(self, rule, method, values, query_args): ... def encode_query_args(self, query_args): ... def make_redirect_url(self, path_info, query_args=None, domain_part=None): ... def make_alias_redirect_url(self, path, endpoint, values, method, query_args): ... def build(self, endpoint, values=None, method=None, force_external=False, append_unknown=True): ... mypy-0.560/typeshed/third_party/3/werkzeug/script.pyi0000644€tŠÔÚ€2›s®0000000105413215007212027077 0ustar jukkaDROPBOX\Domain Users00000000000000from typing import Any argument_types = ... # type: Any converters = ... # type: Any def run(namespace=None, action_prefix='', args=None): ... def fail(message, code=-1): ... def find_actions(namespace, action_prefix): ... def print_usage(actions): ... def analyse_action(func): ... def make_shell(init_func=None, banner=None, use_ipython=True): ... def make_runserver(app_factory, hostname='', port=5000, use_reloader=False, use_debugger=False, use_evalex=True, threaded=False, processes=1, static_files=None, extra_files=None, ssl_context=None): ... mypy-0.560/typeshed/third_party/3/werkzeug/security.pyi0000644€tŠÔÚ€2›s®0000000071513215007212027445 0ustar jukkaDROPBOX\Domain Users00000000000000from typing import Any SALT_CHARS = ... # type: Any DEFAULT_PBKDF2_ITERATIONS = ... # type: Any def pbkdf2_hex(data, salt, iterations=..., keylen=None, hashfunc=None): ... def pbkdf2_bin(data, salt, iterations=..., keylen=None, hashfunc=None): ... def safe_str_cmp(a, b): ... def gen_salt(length): ... def generate_password_hash(password, method='', salt_length=8): ... def check_password_hash(pwhash, password): ... def safe_join(directory, filename): ... mypy-0.560/typeshed/third_party/3/werkzeug/serving.pyi0000644€tŠÔÚ€2›s®0000000616113215007212027254 0ustar jukkaDROPBOX\Domain Users00000000000000from typing import Any from socketserver import ThreadingMixIn, ForkingMixIn from http.server import HTTPServer, BaseHTTPRequestHandler class _SslDummy: def __getattr__(self, name): ... ssl = ... # type: Any LISTEN_QUEUE = ... # type: Any can_open_by_fd = ... # type: Any class WSGIRequestHandler(BaseHTTPRequestHandler): @property def server_version(self): ... def make_environ(self): ... environ = ... # type: Any close_connection = ... # type: Any def run_wsgi(self): ... def handle(self): ... def initiate_shutdown(self): ... def connection_dropped(self, error, environ=None): ... raw_requestline = ... # type: Any def handle_one_request(self): ... def send_response(self, code, message=None): ... def version_string(self): ... def address_string(self): ... def port_integer(self): ... def log_request(self, code='', size=''): ... def log_error(self, *args): ... def log_message(self, format, *args): ... def log(self, type, message, *args): ... BaseRequestHandler = ... # type: Any def generate_adhoc_ssl_pair(cn=None): ... def make_ssl_devcert(base_path, host=None, cn=None): ... def generate_adhoc_ssl_context(): ... def load_ssl_context(cert_file, pkey_file=None, protocol=None): ... class _SSLContext: def __init__(self, protocol): ... def load_cert_chain(self, certfile, keyfile=None, password=None): ... def wrap_socket(self, sock, **kwargs): ... def is_ssl_error(error=None): ... def select_ip_version(host, port): ... class BaseWSGIServer(HTTPServer): multithread = ... # type: Any multiprocess = ... # type: Any request_queue_size = ... # type: Any address_family = ... # type: Any app = ... # type: Any passthrough_errors = ... # type: Any shutdown_signal = ... # type: Any host = ... # type: Any port = ... # type: Any socket = ... # type: Any server_address = ... # type: Any ssl_context = ... # type: Any def __init__(self, host, port, app, handler=None, passthrough_errors=False, ssl_context=None, fd=None): ... def log(self, type, message, *args): ... def serve_forever(self): ... def handle_error(self, request, client_address): ... def get_request(self): ... class ThreadedWSGIServer(ThreadingMixIn, BaseWSGIServer): multithread = ... # type: Any daemon_threads = ... # type: Any class ForkingWSGIServer(ForkingMixIn, BaseWSGIServer): multiprocess = ... # type: Any max_children = ... # type: Any def __init__(self, host, port, app, processes=40, handler=None, passthrough_errors=False, ssl_context=None, fd=None): ... def make_server(host=None, port=None, app=None, threaded=False, processes=1, request_handler=None, passthrough_errors=False, ssl_context=None, fd=None): ... def is_running_from_reloader(): ... def run_simple(hostname, port, application, use_reloader=False, use_debugger=False, use_evalex=True, extra_files=None, reloader_interval=1, reloader_type='', threaded=False, processes=1, request_handler=None, static_files=None, passthrough_errors=False, ssl_context=None): ... def run_with_reloader(*args, **kwargs): ... def main(): ... mypy-0.560/typeshed/third_party/3/werkzeug/test.pyi0000644€tŠÔÚ€2›s®0000000617313215007212026561 0ustar jukkaDROPBOX\Domain Users00000000000000from typing import Any from urllib.request import Request as U2Request from http.cookiejar import CookieJar def stream_encode_multipart(values, use_tempfile=True, threshold=..., boundary=None, charset=''): ... def encode_multipart(values, boundary=None, charset=''): ... def File(fd, filename=None, mimetype=None): ... class _TestCookieHeaders: headers = ... # type: Any def __init__(self, headers): ... def getheaders(self, name): ... def get_all(self, name, default=None): ... class _TestCookieResponse: headers = ... # type: Any def __init__(self, headers): ... def info(self): ... class _TestCookieJar(CookieJar): def inject_wsgi(self, environ): ... def extract_wsgi(self, environ, headers): ... class EnvironBuilder: server_protocol = ... # type: Any wsgi_version = ... # type: Any request_class = ... # type: Any charset = ... # type: Any path = ... # type: Any base_url = ... # type: Any query_string = ... # type: Any args = ... # type: Any method = ... # type: Any headers = ... # type: Any content_type = ... # type: Any errors_stream = ... # type: Any multithread = ... # type: Any multiprocess = ... # type: Any run_once = ... # type: Any environ_base = ... # type: Any environ_overrides = ... # type: Any input_stream = ... # type: Any content_length = ... # type: Any closed = ... # type: Any def __init__(self, path='', base_url=None, query_string=None, method='', input_stream=None, content_type=None, content_length=None, errors_stream=None, multithread=False, multiprocess=False, run_once=False, headers=None, data=None, environ_base=None, environ_overrides=None, charset=''): ... def form_property(name, storage, doc): ... form = ... # type: Any files = ... # type: Any @property def server_name(self): ... @property def server_port(self): ... def __del__(self): ... def close(self): ... def get_environ(self): ... def get_request(self, cls=None): ... class ClientRedirectError(Exception): ... class Client: application = ... # type: Any response_wrapper = ... # type: Any cookie_jar = ... # type: Any allow_subdomain_redirects = ... # type: Any def __init__(self, application, response_wrapper=None, use_cookies=True, allow_subdomain_redirects=False): ... def set_cookie(self, server_name, key, value='', max_age=None, expires=None, path='', domain=None, secure=None, httponly=False, charset=''): ... def delete_cookie(self, server_name, key, path='', domain=None): ... def run_wsgi_app(self, environ, buffered=False): ... def resolve_redirect(self, response, new_location, environ, buffered=False): ... def open(self, *args, **kwargs): ... def get(self, *args, **kw): ... def patch(self, *args, **kw): ... def post(self, *args, **kw): ... def head(self, *args, **kw): ... def put(self, *args, **kw): ... def delete(self, *args, **kw): ... def options(self, *args, **kw): ... def trace(self, *args, **kw): ... def create_environ(*args, **kwargs): ... def run_wsgi_app(app, environ, buffered=False): ... mypy-0.560/typeshed/third_party/3/werkzeug/testapp.pyi0000644€tŠÔÚ€2›s®0000000037513215007212027260 0ustar jukkaDROPBOX\Domain Users00000000000000from typing import Any from werkzeug.wrappers import BaseRequest as Request, BaseResponse as Response logo = ... # type: Any TEMPLATE = ... # type: Any def iter_sys_path(): ... def render_testapp(req): ... def test_app(environ, start_response): ... mypy-0.560/typeshed/third_party/3/werkzeug/urls.pyi0000644€tŠÔÚ€2›s®0000000443013215007212026561 0ustar jukkaDROPBOX\Domain Users00000000000000from collections import namedtuple from typing import Any _URLTuple = namedtuple( '_URLTuple', ['scheme', 'netloc', 'path', 'query', 'fragment'] ) class BaseURL(_URLTuple): def replace(self, **kwargs): ... @property def host(self): ... @property def ascii_host(self): ... @property def port(self): ... @property def auth(self): ... @property def username(self): ... @property def raw_username(self): ... @property def password(self): ... @property def raw_password(self): ... def decode_query(self, *args, **kwargs): ... def join(self, *args, **kwargs): ... def to_url(self): ... def decode_netloc(self): ... def to_uri_tuple(self): ... def to_iri_tuple(self): ... def get_file_location(self, pathformat=None): ... class URL(BaseURL): def encode_netloc(self): ... def encode(self, charset='', errors=''): ... class BytesURL(BaseURL): def encode_netloc(self): ... def decode(self, charset='', errors=''): ... def url_parse(url, scheme=None, allow_fragments=True): ... def url_quote(string, charset='', errors='', safe='', unsafe=''): ... def url_quote_plus(string, charset='', errors='', safe=''): ... def url_unparse(components): ... def url_unquote(string, charset='', errors='', unsafe=''): ... def url_unquote_plus(s, charset='', errors=''): ... def url_fix(s, charset=''): ... def uri_to_iri(uri, charset='', errors=''): ... def iri_to_uri(iri, charset='', errors='', safe_conversion=False): ... def url_decode(s, charset='', decode_keys=False, include_empty=True, errors='', separator='', cls=None): ... def url_decode_stream(stream, charset='', decode_keys=False, include_empty=True, errors='', separator='', cls=None, limit=None, return_iterator=False): ... def url_encode(obj, charset='', encode_keys=False, sort=False, key=None, separator=b''): ... def url_encode_stream(obj, stream=None, charset='', encode_keys=False, sort=False, key=None, separator=b''): ... def url_join(base, url, allow_fragments=True): ... class Href: base = ... # type: Any charset = ... # type: Any sort = ... # type: Any key = ... # type: Any def __init__(self, base='', charset='', sort=False, key=None): ... def __getattr__(self, name): ... def __call__(self, *path, **query): ... mypy-0.560/typeshed/third_party/3/werkzeug/useragents.pyi0000644€tŠÔÚ€2›s®0000000055713215007212027762 0ustar jukkaDROPBOX\Domain Users00000000000000from typing import Any class UserAgentParser: platforms = ... # type: Any browsers = ... # type: Any def __init__(self): ... def __call__(self, user_agent): ... class UserAgent: string = ... # type: Any def __init__(self, environ_or_string): ... def to_header(self): ... def __nonzero__(self): ... __bool__ = ... # type: Any mypy-0.560/typeshed/third_party/3/werkzeug/utils.pyi0000644€tŠÔÚ€2›s®0000000313313215007212026733 0ustar jukkaDROPBOX\Domain Users00000000000000from typing import Any from werkzeug._internal import _DictAccessorProperty class cached_property(property): __name__ = ... # type: Any __module__ = ... # type: Any __doc__ = ... # type: Any func = ... # type: Any def __init__(self, func, name=None, doc=None): ... def __set__(self, obj, value): ... def __get__(self, obj, type=None): ... class environ_property(_DictAccessorProperty): read_only = ... # type: Any def lookup(self, obj): ... class header_property(_DictAccessorProperty): def lookup(self, obj): ... class HTMLBuilder: def __init__(self, dialect): ... def __call__(self, s): ... def __getattr__(self, tag): ... html = ... # type: Any xhtml = ... # type: Any def get_content_type(mimetype, charset): ... def format_string(string, context): ... def secure_filename(filename): ... def escape(s, quote=None): ... def unescape(s): ... def redirect(location, code=302, Response=None): ... def append_slash_redirect(environ, code=301): ... def import_string(import_name, silent=False): ... def find_modules(import_path, include_packages=False, recursive=False): ... def validate_arguments(func, args, kwargs, drop_extra=True): ... def bind_arguments(func, args, kwargs): ... class ArgumentValidationError(ValueError): missing = ... # type: Any extra = ... # type: Any extra_positional = ... # type: Any def __init__(self, missing=None, extra=None, extra_positional=None): ... class ImportStringError(ImportError): import_name = ... # type: Any exception = ... # type: Any def __init__(self, import_name, exception): ... mypy-0.560/typeshed/third_party/3/werkzeug/wrappers.pyi0000644€tŠÔÚ€2›s®0000001605513215007212027445 0ustar jukkaDROPBOX\Domain Users00000000000000from typing import ( Any, Iterable, Mapping, Optional, Sequence, Tuple, Type, Union, ) from .datastructures import ( CombinedMultiDict, EnvironHeaders, Headers, ImmutableMultiDict, MultiDict, TypeConversionDict, ) class BaseRequest: charset = ... # type: str encoding_errors = ... # type: str max_content_length = ... # type: int max_form_memory_size = ... # type: int parameter_storage_class = ... # type: Type list_storage_class = ... # type: Type dict_storage_class = ... # type: Type form_data_parser_class = ... # type: Type trusted_hosts = ... # type: Optional[Sequence[str]] disable_data_descriptor = ... # type: Any environ = ... # type: Mapping[str, object] shallow = ... # type: Any def __init__(self, environ: Mapping[str, object], populate_request: bool = ..., shallow: bool = ...) -> None: ... @property def url_charset(self) -> str: ... @classmethod def from_values(cls, *args, **kwargs) -> 'BaseRequest': ... @classmethod def application(cls, f): ... @property def want_form_data_parsed(self): ... def make_form_data_parser(self): ... def close(self) -> None: ... def __enter__(self): ... def __exit__(self, exc_type, exc_value, tb): ... def stream(self): ... input_stream = ... # type: Any args = ... # type: ImmutableMultiDict def data(self): ... def get_data(self, cache: bool = ..., as_text: bool = ..., parse_form_data: bool = ...) -> bytes: ... form = ... # type: ImmutableMultiDict values = ... # type: CombinedMultiDict files = ... # type: MultiDict cookies = ... # type: TypeConversionDict headers = ... # type: EnvironHeaders path = ... # type: str full_path = ... # type: str script_root = ... # type: str url = ... # type: str base_url = ... # type: str url_root = ... # type: str host_url = ... # type: str host = ... # type: str query_string = ... # type: bytes method = ... # type: str def access_route(self): ... @property def remote_addr(self) -> str: ... remote_user = ... # type: str scheme = ... # type: str is_xhr = ... # type: bool is_secure = ... # type: bool is_multithread = ... # type: bool is_multiprocess = ... # type: bool is_run_once = ... # type: bool class BaseResponse: charset = ... # type: str default_status = ... # type: int default_mimetype = ... # type: str implicit_sequence_conversion = ... # type: bool autocorrect_location_header = ... # type: bool automatically_set_content_length = ... # type: bool headers = ... # type: Headers status_code = ... # type: int status = ... # type: str direct_passthrough = ... # type: bool response = ... # type: Iterable[bytes] def __init__(self, response: Optional[Union[Iterable[bytes], bytes]] = ..., status: Optional[Union[str, int]] = ..., headers: Optional[Union[Headers, Mapping[str, str], Sequence[Tuple[str, str]]]]=None, mimetype: Optional[str] = ..., content_type: Optional[str] = ..., direct_passthrough: bool = ...) -> None: ... def call_on_close(self, func): ... @classmethod def force_type(cls, response, environ=None): ... @classmethod def from_app(cls, app, environ, buffered=False): ... def get_data(self, as_text=False): ... def set_data(self, value): ... data = ... # type: Any def calculate_content_length(self): ... def make_sequence(self): ... def iter_encoded(self): ... def set_cookie(self, key, value='', max_age=None, expires=None, path='', domain=None, secure=False, httponly=False): ... def delete_cookie(self, key, path='', domain=None): ... @property def is_streamed(self) -> bool: ... @property def is_sequence(self) -> bool: ... def close(self) -> None: ... def __enter__(self): ... def __exit__(self, exc_type, exc_value, tb): ... def freeze(self, **kwargs): ... def get_wsgi_headers(self, environ): ... def get_app_iter(self, environ): ... def get_wsgi_response(self, environ): ... def __call__(self, environ, start_response): ... class AcceptMixin: def accept_mimetypes(self): ... def accept_charsets(self): ... def accept_encodings(self): ... def accept_languages(self): ... class ETagRequestMixin: def cache_control(self): ... def if_match(self): ... def if_none_match(self): ... def if_modified_since(self): ... def if_unmodified_since(self): ... def if_range(self): ... def range(self): ... class UserAgentMixin: def user_agent(self): ... class AuthorizationMixin: def authorization(self): ... class StreamOnlyMixin: disable_data_descriptor = ... # type: Any want_form_data_parsed = ... # type: Any class ETagResponseMixin: @property def cache_control(self): ... status_code = ... # type: Any def make_conditional(self, request_or_environ, accept_ranges=False, complete_length=None): ... def add_etag(self, overwrite=False, weak=False): ... def set_etag(self, etag, weak=False): ... def get_etag(self): ... def freeze(self, *, no_etag=False): ... accept_ranges = ... # type: Any content_range = ... # type: Any class ResponseStream: mode = ... # type: Any response = ... # type: Any closed = ... # type: Any def __init__(self, response): ... def write(self, value): ... def writelines(self, seq): ... def close(self): ... def flush(self): ... def isatty(self): ... @property def encoding(self): ... class ResponseStreamMixin: def stream(self): ... class CommonRequestDescriptorsMixin: content_type = ... # type: Any def content_length(self): ... content_encoding = ... # type: Any content_md5 = ... # type: Any referrer = ... # type: Any date = ... # type: Any max_forwards = ... # type: Any @property def mimetype(self): ... @property def mimetype_params(self): ... def pragma(self): ... class CommonResponseDescriptorsMixin: mimetype = ... # type: Any mimetype_params = ... # type: Any location = ... # type: Any age = ... # type: Any content_type = ... # type: Any content_length = ... # type: Any content_location = ... # type: Any content_encoding = ... # type: Any content_md5 = ... # type: Any date = ... # type: Any expires = ... # type: Any last_modified = ... # type: Any retry_after = ... # type: Any vary = ... # type: Any content_language = ... # type: Any allow = ... # type: Any class WWWAuthenticateMixin: @property def www_authenticate(self): ... class Request(BaseRequest, AcceptMixin, ETagRequestMixin, UserAgentMixin, AuthorizationMixin, CommonRequestDescriptorsMixin): ... class PlainRequest(StreamOnlyMixin, Request): ... class Response(BaseResponse, ETagResponseMixin, ResponseStreamMixin, CommonResponseDescriptorsMixin, WWWAuthenticateMixin): ... mypy-0.560/typeshed/third_party/3/werkzeug/wsgi.pyi0000644€tŠÔÚ€2›s®0000000576113215007212026555 0ustar jukkaDROPBOX\Domain Users00000000000000from typing import Any def responder(f): ... def get_current_url(environ, root_only=False, strip_querystring=False, host_only=False, trusted_hosts=None): ... def host_is_trusted(hostname, trusted_list): ... def get_host(environ, trusted_hosts=None): ... def get_content_length(environ): ... def get_input_stream(environ, safe_fallback=True): ... def get_query_string(environ): ... def get_path_info(environ, charset='', errors=''): ... def get_script_name(environ, charset='', errors=''): ... def pop_path_info(environ, charset='', errors=''): ... def peek_path_info(environ, charset='', errors=''): ... def extract_path_info(environ_or_baseurl, path_or_url, charset='', errors='', collapse_http_schemes=True): ... class SharedDataMiddleware: app = ... # type: Any exports = ... # type: Any cache = ... # type: Any cache_timeout = ... # type: Any fallback_mimetype = ... # type: Any def __init__(self, app, exports, disallow=None, cache=True, cache_timeout=..., fallback_mimetype=''): ... def is_allowed(self, filename): ... def get_file_loader(self, filename): ... def get_package_loader(self, package, package_path): ... def get_directory_loader(self, directory): ... def generate_etag(self, mtime, file_size, real_filename): ... def __call__(self, environ, start_response): ... class DispatcherMiddleware: app = ... # type: Any mounts = ... # type: Any def __init__(self, app, mounts=None): ... def __call__(self, environ, start_response): ... class ClosingIterator: def __init__(self, iterable, callbacks=None): ... def __iter__(self): ... def __next__(self): ... def close(self): ... def wrap_file(environ, file, buffer_size=8192): ... class FileWrapper: file = ... # type: Any buffer_size = ... # type: Any def __init__(self, file, buffer_size=8192): ... def close(self): ... def seekable(self): ... def seek(self, *args): ... def tell(self): ... def __iter__(self): ... def __next__(self): ... class _RangeWrapper: iterable = ... # type: Any byte_range = ... # type: Any start_byte = ... # type: Any end_byte = ... # type: Any read_length = ... # type: Any seekable = ... # type: Any end_reached = ... # type: Any def __init__(self, iterable, start_byte=0, byte_range=None): ... def __iter__(self): ... def __next__(self): ... def close(self): ... def make_line_iter(stream, limit=None, buffer_size=..., cap_at_buffer=False): ... def make_chunk_iter(stream, separator, limit=None, buffer_size=..., cap_at_buffer=False): ... class LimitedStream: limit = ... # type: Any def __init__(self, stream, limit): ... def __iter__(self): ... @property def is_exhausted(self): ... def on_exhausted(self): ... def on_disconnect(self): ... def exhaust(self, chunk_size=...): ... def read(self, size=None): ... def readline(self, size=None): ... def readlines(self, size=None): ... def tell(self): ... def __next__(self): ... mypy-0.560/xml/0000755€tŠÔÚ€2›s®0000000000013215007244017472 5ustar jukkaDROPBOX\Domain Users00000000000000mypy-0.560/xml/mypy-html.css0000644€tŠÔÚ€2›s®0000000260113215007206022141 0ustar jukkaDROPBOX\Domain Users00000000000000/* CSS for type check coverage reports */ /* Used by both summary and file. */ body { font-family: "Helvetica Neue", sans-serif; } /* Used only by summary. */ h1 { text-align: center; font-size: 135%; margin: 20px; } table.summary { border-collapse: collapse; margin-left: 7%; margin-right: 7%; width: 85%; } table caption { margin: 1em; } table.summary, tr.summary, th.summary, td.summary { border: 1px solid #aaa; } th.summary, td.summary { padding: 0.4em; } td.summary a { text-decoration: none; } .summary-quality-0 { background-color: #dfd; } .summary-quality-1 { background-color: #ffa; } .summary-quality-2 { background-color: #faa; } td.summary-filename, th.summary-filename { text-align: left; } td.summary-filename { width: 50%; } .summary-precision { text-align: center; } .summary-lines { text-align: center; } /* Used only by file. */ td.table-lines { text-align: right; padding-right: 0.5em; } td.table-code { } span.lineno { text-align: right; } a:link.lineno, a:visited.lineno { color: #999; text-decoration: none; } a:hover.lineno, a:active.lineno { color: #000; text-decoration: underline; } .line-empty, .line-precise { background-color: #dfd; } .line-imprecise { background-color: #ffa; } .line-any, .line-unanalyzed { background-color: #faa; } mypy-0.560/xml/mypy-html.xslt0000644€tŠÔÚ€2›s®0000000736013215007206022352 0ustar jukkaDROPBOX\Domain Users00000000000000

Mypy Type Check Coverage Summary

Summary from
File Imprecision Lines
Total imprecise LOC
imprecise LOC

                  
                    

                  
                
                  
                    

                  
                
mypy-0.560/xml/mypy-txt.xslt0000644€tŠÔÚ€2›s®0000001111613215007206022217 0ustar jukkaDROPBOX\Domain Users00000000000000 Mypy Type Check Coverage Summary ================================ Script: +- -+- -+- -+ | | | | +- -+- -+- -+ | | | | +- -+- -+- -+ | | | | +- -+- -+- -+ mypy-0.560/xml/mypy.xsd0000644€tŠÔÚ€2›s®0000000417513215007206021215 0ustar jukkaDROPBOX\Domain Users00000000000000